|
@@ -22,6 +22,7 @@ import static org.junit.Assert.*;
|
|
|
import java.io.IOException;
|
|
|
import java.net.InetSocketAddress;
|
|
|
import java.util.HashMap;
|
|
|
+import java.util.Iterator;
|
|
|
import java.util.List;
|
|
|
import java.util.Map;
|
|
|
import java.util.Random;
|
|
@@ -38,9 +39,13 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
|
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
|
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
|
|
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
|
|
+import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
|
|
|
+import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
|
|
|
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
|
|
+import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo;
|
|
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
|
|
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
|
|
+import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
|
|
|
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations;
|
|
|
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
|
|
|
import org.apache.hadoop.ipc.RemoteException;
|
|
@@ -184,12 +189,14 @@ public class TestGetBlocks {
|
|
|
CONF.setLong(DFSConfigKeys.DFS_BALANCER_GETBLOCKS_MIN_BLOCK_SIZE_KEY,
|
|
|
DEFAULT_BLOCK_SIZE);
|
|
|
|
|
|
- MiniDFSCluster cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(
|
|
|
- REPLICATION_FACTOR).build();
|
|
|
+ MiniDFSCluster cluster = new MiniDFSCluster.Builder(CONF)
|
|
|
+ .numDataNodes(REPLICATION_FACTOR)
|
|
|
+ .storagesPerDatanode(4)
|
|
|
+ .build();
|
|
|
try {
|
|
|
cluster.waitActive();
|
|
|
// the third block will not be visible to getBlocks
|
|
|
- long fileLen = 2 * DEFAULT_BLOCK_SIZE + 1;
|
|
|
+ long fileLen = 12 * DEFAULT_BLOCK_SIZE + 1;
|
|
|
DFSTestUtil.createFile(cluster.getFileSystem(), new Path("/tmp.txt"),
|
|
|
fileLen, REPLICATION_FACTOR, 0L);
|
|
|
|
|
@@ -197,12 +204,12 @@ public class TestGetBlocks {
|
|
|
List<LocatedBlock> locatedBlocks;
|
|
|
DatanodeInfo[] dataNodes = null;
|
|
|
boolean notWritten;
|
|
|
+ final DFSClient dfsclient = new DFSClient(
|
|
|
+ DFSUtilClient.getNNAddress(CONF), CONF);
|
|
|
do {
|
|
|
- final DFSClient dfsclient = new DFSClient(
|
|
|
- DFSUtilClient.getNNAddress(CONF), CONF);
|
|
|
locatedBlocks = dfsclient.getNamenode()
|
|
|
.getBlockLocations("/tmp.txt", 0, fileLen).getLocatedBlocks();
|
|
|
- assertEquals(3, locatedBlocks.size());
|
|
|
+ assertEquals(13, locatedBlocks.size());
|
|
|
notWritten = false;
|
|
|
for (int i = 0; i < 2; i++) {
|
|
|
dataNodes = locatedBlocks.get(i).getLocations();
|
|
@@ -216,6 +223,7 @@ public class TestGetBlocks {
|
|
|
}
|
|
|
}
|
|
|
} while (notWritten);
|
|
|
+ dfsclient.close();
|
|
|
|
|
|
// get RPC client to namenode
|
|
|
InetSocketAddress addr = new InetSocketAddress("localhost",
|
|
@@ -226,7 +234,7 @@ public class TestGetBlocks {
|
|
|
// get blocks of size fileLen from dataNodes[0]
|
|
|
BlockWithLocations[] locs;
|
|
|
locs = namenode.getBlocks(dataNodes[0], fileLen).getBlocks();
|
|
|
- assertEquals(locs.length, 2);
|
|
|
+ assertEquals(locs.length, 12);
|
|
|
assertEquals(locs[0].getStorageIDs().length, 2);
|
|
|
assertEquals(locs[1].getStorageIDs().length, 2);
|
|
|
|
|
@@ -249,6 +257,8 @@ public class TestGetBlocks {
|
|
|
// get blocks of size BlockSize from a non-existent datanode
|
|
|
DatanodeInfo info = DFSTestUtil.getDatanodeInfo("1.2.3.4");
|
|
|
getBlocksWithException(namenode, info, 2);
|
|
|
+
|
|
|
+ testBlockIterator(cluster);
|
|
|
} finally {
|
|
|
cluster.shutdown();
|
|
|
}
|
|
@@ -266,6 +276,59 @@ public class TestGetBlocks {
|
|
|
assertTrue(getException);
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * BlockIterator iterates over all blocks belonging to DatanodeDescriptor
|
|
|
+ * through multiple storages.
|
|
|
+ * The test verifies that BlockIterator can be set to start iterating from
|
|
|
+ * a particular starting block index.
|
|
|
+ */
|
|
|
+ void testBlockIterator(MiniDFSCluster cluster) {
|
|
|
+ FSNamesystem ns = cluster.getNamesystem();
|
|
|
+ String dId = cluster.getDataNodes().get(0).getDatanodeUuid();
|
|
|
+ DatanodeDescriptor dnd = BlockManagerTestUtil.getDatanode(ns, dId);
|
|
|
+ DatanodeStorageInfo[] storages = dnd.getStorageInfos();
|
|
|
+ assertEquals("DataNode should have 4 storages", 4, storages.length);
|
|
|
+
|
|
|
+ Iterator<BlockInfo> dnBlockIt = null;
|
|
|
+ // check illegal start block number
|
|
|
+ try {
|
|
|
+ dnBlockIt = BlockManagerTestUtil.getBlockIterator(
|
|
|
+ cluster.getNamesystem(), dId, -1);
|
|
|
+ assertTrue("Should throw IllegalArgumentException", false);
|
|
|
+ } catch(IllegalArgumentException ei) {
|
|
|
+ // as expected
|
|
|
+ }
|
|
|
+ assertNull("Iterator should be null", dnBlockIt);
|
|
|
+
|
|
|
+ // form an array of all DataNode blocks
|
|
|
+ int numBlocks = dnd.numBlocks();
|
|
|
+ BlockInfo[] allBlocks = new BlockInfo[numBlocks];
|
|
|
+ int idx = 0;
|
|
|
+ for(DatanodeStorageInfo s : storages) {
|
|
|
+ Iterator<BlockInfo> storageBlockIt =
|
|
|
+ BlockManagerTestUtil.getBlockIterator(s);
|
|
|
+ while(storageBlockIt.hasNext()) {
|
|
|
+ allBlocks[idx++] = storageBlockIt.next();
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // check iterator for every block as a starting point
|
|
|
+ for(int i = 0; i < allBlocks.length; i++) {
|
|
|
+ // create iterator starting from i
|
|
|
+ dnBlockIt = BlockManagerTestUtil.getBlockIterator(ns, dId, i);
|
|
|
+ assertTrue("Block iterator should have next block", dnBlockIt.hasNext());
|
|
|
+ // check iterator lists blocks in the desired order
|
|
|
+ for(int j = i; j < allBlocks.length; j++) {
|
|
|
+ assertEquals("Wrong block order", allBlocks[j], dnBlockIt.next());
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // check start block number larger than numBlocks in the DataNode
|
|
|
+ dnBlockIt = BlockManagerTestUtil.getBlockIterator(
|
|
|
+ ns, dId, allBlocks.length + 1);
|
|
|
+ assertFalse("Iterator should not have next block", dnBlockIt.hasNext());
|
|
|
+ }
|
|
|
+
|
|
|
@Test
|
|
|
public void testBlockKey() {
|
|
|
Map<Block, Long> map = new HashMap<Block, Long>();
|