本文整理汇总了Java中org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker类的典型用法代码示例。如果您正苦于以下问题:Java DataNodeDiskChecker类的具体用法?Java DataNodeDiskChecker怎么用?Java DataNodeDiskChecker使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
DataNodeDiskChecker类属于org.apache.hadoop.hdfs.server.datanode.DataNode包,在下文中一共展示了DataNodeDiskChecker类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testDataDirValidation
import org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker; //导入依赖的package包/类
@Test (timeout = 30000)
public void testDataDirValidation() throws Throwable {
DataNodeDiskChecker diskChecker = mock(DataNodeDiskChecker.class);
doThrow(new IOException()).doThrow(new IOException()).doNothing()
.when(diskChecker).checkDir(any(LocalFileSystem.class), any(Path.class));
LocalFileSystem fs = mock(LocalFileSystem.class);
AbstractList<StorageLocation> locations = new ArrayList<StorageLocation>();
locations.add(StorageLocation.parse("file:/p1/"));
locations.add(StorageLocation.parse("file:/p2/"));
locations.add(StorageLocation.parse("file:/p3/"));
List<StorageLocation> checkedLocations =
DataNode.checkStorageLocations(locations, fs, diskChecker);
assertEquals("number of valid data dirs", 1, checkedLocations.size());
String validDir = checkedLocations.iterator().next().getFile().getPath();
assertThat("p3 should be valid", new File("/p3/").getPath(), is(validDir));
}
示例2: testDataDirValidation
import org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker; //导入依赖的package包/类
@Test(timeout = 30000)
public void testDataDirValidation() throws Throwable {
DataNodeDiskChecker diskChecker = mock(DataNodeDiskChecker.class);
doThrow(new IOException()).doThrow(new IOException()).doNothing()
.when(diskChecker)
.checkDir(any(LocalFileSystem.class), any(Path.class));
LocalFileSystem fs = mock(LocalFileSystem.class);
AbstractList<StorageLocation> locations = new ArrayList<StorageLocation>();
locations.add(StorageLocation.parse("file:/p1/"));
locations.add(StorageLocation.parse("file:/p2/"));
locations.add(StorageLocation.parse("file:/p3/"));
List<StorageLocation> checkedLocations =
DataNode.checkStorageLocations(locations, fs, diskChecker);
assertEquals("number of valid data dirs", 1, checkedLocations.size());
String validDir = checkedLocations.iterator().next().getFile().getPath();
assertThat("p3 should be valid", new File("/p3/").getPath(), is(validDir));
}
示例3: testGetDataDirsFromURIs
import org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker; //导入依赖的package包/类
@Test (timeout = 10000)
public void testGetDataDirsFromURIs() throws Throwable {
DataNodeDiskChecker diskChecker = mock(DataNodeDiskChecker.class);
doThrow(new IOException()).doThrow(new IOException()).doNothing()
.when(diskChecker).checkDir(any(LocalFileSystem.class), any(Path.class));
LocalFileSystem fs = mock(LocalFileSystem.class);
Collection<URI> uris = Arrays.asList(new URI("file:/p1/"),
new URI("file:/p2/"), new URI("file:/p3/"));
List<File> dirs = DataNode.getDataDirsFromURIs(uris, fs, diskChecker);
assertEquals("number of valid data dirs", 1, dirs.size());
String validDir = dirs.iterator().next().getPath();
assertEquals("p3 should be valid", new File("/p3").getPath(), validDir);
}