本文整理汇总了Java中org.apache.hadoop.hdfs.server.protocol.BlockCommand.getTargets方法的典型用法代码示例。如果您正苦于以下问题:Java BlockCommand.getTargets方法的具体用法?Java BlockCommand.getTargets怎么用?Java BlockCommand.getTargets使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hdfs.server.protocol.BlockCommand
的用法示例。
在下文中一共展示了BlockCommand.getTargets方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testConvertBlockCommand
import org.apache.hadoop.hdfs.server.protocol.BlockCommand; //导入方法依赖的package包/类
@Test
public void testConvertBlockCommand() {
Block[] blocks = new Block[] { new Block(21), new Block(22) };
DatanodeInfo[][] dnInfos = new DatanodeInfo[][] { new DatanodeInfo[1],
new DatanodeInfo[2] };
dnInfos[0][0] = DFSTestUtil.getLocalDatanodeInfo();
dnInfos[1][0] = DFSTestUtil.getLocalDatanodeInfo();
dnInfos[1][1] = DFSTestUtil.getLocalDatanodeInfo();
String[][] storageIDs = {{"s00"}, {"s10", "s11"}};
StorageType[][] storageTypes = {{StorageType.DEFAULT},
{StorageType.DEFAULT, StorageType.DEFAULT}};
BlockCommand bc = new BlockCommand(DatanodeProtocol.DNA_TRANSFER, "bp1",
blocks, dnInfos, storageTypes, storageIDs);
BlockCommandProto bcProto = PBHelper.convert(bc);
BlockCommand bc2 = PBHelper.convert(bcProto);
assertEquals(bc.getAction(), bc2.getAction());
assertEquals(bc.getBlocks().length, bc2.getBlocks().length);
Block[] blocks2 = bc2.getBlocks();
for (int i = 0; i < blocks.length; i++) {
assertEquals(blocks[i], blocks2[i]);
}
DatanodeInfo[][] dnInfos2 = bc2.getTargets();
assertEquals(dnInfos.length, dnInfos2.length);
for (int i = 0; i < dnInfos.length; i++) {
DatanodeInfo[] d1 = dnInfos[i];
DatanodeInfo[] d2 = dnInfos2[i];
assertEquals(d1.length, d2.length);
for (int j = 0; j < d1.length; j++) {
compare(d1[j], d2[j]);
}
}
}
示例2: testConvertBlockCommand
import org.apache.hadoop.hdfs.server.protocol.BlockCommand; //导入方法依赖的package包/类
@Test
public void testConvertBlockCommand() {
Block[] blocks = new Block[] { new Block(21), new Block(22) };
DatanodeInfo[][] dnInfos = new DatanodeInfo[][] { new DatanodeInfo[1],
new DatanodeInfo[2] };
dnInfos[0][0] = DFSTestUtil.getLocalDatanodeInfo();
dnInfos[1][0] = DFSTestUtil.getLocalDatanodeInfo();
dnInfos[1][1] = DFSTestUtil.getLocalDatanodeInfo();
BlockCommand bc = new BlockCommand(DatanodeProtocol.DNA_TRANSFER, "bp1",
blocks, dnInfos);
BlockCommandProto bcProto = PBHelper.convert(bc);
BlockCommand bc2 = PBHelper.convert(bcProto);
assertEquals(bc.getAction(), bc2.getAction());
assertEquals(bc.getBlocks().length, bc2.getBlocks().length);
Block[] blocks2 = bc2.getBlocks();
for (int i = 0; i < blocks.length; i++) {
assertEquals(blocks[i], blocks2[i]);
}
DatanodeInfo[][] dnInfos2 = bc2.getTargets();
assertEquals(dnInfos.length, dnInfos2.length);
for (int i = 0; i < dnInfos.length; i++) {
DatanodeInfo[] d1 = dnInfos[i];
DatanodeInfo[] d2 = dnInfos2[i];
assertEquals(d1.length, d2.length);
for (int j = 0; j < d1.length; j++) {
compare(d1[j], d2[j]);
}
}
}
示例3: testConvertBlockCommand
import org.apache.hadoop.hdfs.server.protocol.BlockCommand; //导入方法依赖的package包/类
@Test
public void testConvertBlockCommand() {
Block[] blocks = new Block[]{new Block(21), new Block(22)};
DatanodeInfo[][] dnInfos =
new DatanodeInfo[][]{new DatanodeInfo[1], new DatanodeInfo[2]};
dnInfos[0][0] = DFSTestUtil.getLocalDatanodeInfo();
dnInfos[1][0] = DFSTestUtil.getLocalDatanodeInfo();
dnInfos[1][1] = DFSTestUtil.getLocalDatanodeInfo();
BlockCommand bc =
new BlockCommand(DatanodeProtocol.DNA_TRANSFER, "bp1", blocks, dnInfos);
BlockCommandProto bcProto = PBHelper.convert(bc);
BlockCommand bc2 = PBHelper.convert(bcProto);
assertEquals(bc.getAction(), bc2.getAction());
assertEquals(bc.getBlocks().length, bc2.getBlocks().length);
Block[] blocks2 = bc2.getBlocks();
for (int i = 0; i < blocks.length; i++) {
assertEquals(blocks[i], blocks2[i]);
}
DatanodeInfo[][] dnInfos2 = bc2.getTargets();
assertEquals(dnInfos.length, dnInfos2.length);
for (int i = 0; i < dnInfos.length; i++) {
DatanodeInfo[] d1 = dnInfos[i];
DatanodeInfo[] d2 = dnInfos2[i];
assertEquals(d1.length, d2.length);
for (int j = 0; j < d1.length; j++) {
compare(d1[j], d2[j]);
}
}
}