本文整理匯總了Java中org.apache.hadoop.hbase.HBaseTestingUtility.getDataTestDir方法的典型用法代碼示例。如果您正苦於以下問題:Java HBaseTestingUtility.getDataTestDir方法的具體用法?Java HBaseTestingUtility.getDataTestDir怎麽用?Java HBaseTestingUtility.getDataTestDir使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hbase.HBaseTestingUtility
的用法示例。
在下文中一共展示了HBaseTestingUtility.getDataTestDir方法的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: testMatchingTail
import org.apache.hadoop.hbase.HBaseTestingUtility; //導入方法依賴的package包/類
/**
* Test path compare and prefix checking.
* @throws IOException
*/
@Test
public void testMatchingTail() throws IOException {
HBaseTestingUtility htu = new HBaseTestingUtility();
final FileSystem fs = htu.getTestFileSystem();
Path rootdir = htu.getDataTestDir();
assertTrue(rootdir.depth() > 1);
Path partPath = new Path("a", "b");
Path fullPath = new Path(rootdir, partPath);
Path fullyQualifiedPath = fs.makeQualified(fullPath);
assertFalse(FSUtils.isMatchingTail(fullPath, partPath));
assertFalse(FSUtils.isMatchingTail(fullPath, partPath.toString()));
assertTrue(FSUtils.isStartingWithPath(rootdir, fullPath.toString()));
assertTrue(FSUtils.isStartingWithPath(fullyQualifiedPath, fullPath.toString()));
assertFalse(FSUtils.isStartingWithPath(rootdir, partPath.toString()));
assertFalse(FSUtils.isMatchingTail(fullyQualifiedPath, partPath));
assertTrue(FSUtils.isMatchingTail(fullyQualifiedPath, fullPath));
assertTrue(FSUtils.isMatchingTail(fullyQualifiedPath, fullPath.toString()));
assertTrue(FSUtils.isMatchingTail(fullyQualifiedPath, fs.makeQualified(fullPath)));
assertTrue(FSUtils.isStartingWithPath(rootdir, fullyQualifiedPath.toString()));
assertFalse(FSUtils.isMatchingTail(fullPath, new Path("x")));
assertFalse(FSUtils.isMatchingTail(new Path("x"), fullPath));
}
示例2: testVersion
import org.apache.hadoop.hbase.HBaseTestingUtility; //導入方法依賴的package包/類
@Test
public void testVersion() throws DeserializationException, IOException {
HBaseTestingUtility htu = new HBaseTestingUtility();
final FileSystem fs = htu.getTestFileSystem();
final Path rootdir = htu.getDataTestDir();
assertNull(FSUtils.getVersion(fs, rootdir));
// Write out old format version file. See if we can read it in and convert.
Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);
FSDataOutputStream s = fs.create(versionFile);
final String version = HConstants.FILE_SYSTEM_VERSION;
s.writeUTF(version);
s.close();
assertTrue(fs.exists(versionFile));
FileStatus [] status = fs.listStatus(versionFile);
assertNotNull(status);
assertTrue(status.length > 0);
String newVersion = FSUtils.getVersion(fs, rootdir);
assertEquals(version.length(), newVersion.length());
assertEquals(version, newVersion);
// File will have been converted. Exercise the pb format
assertEquals(version, FSUtils.getVersion(fs, rootdir));
FSUtils.checkVersion(fs, rootdir, true);
}
示例3: testReadAndWriteHRegionInfoFile
import org.apache.hadoop.hbase.HBaseTestingUtility; //導入方法依賴的package包/類
@Test
public void testReadAndWriteHRegionInfoFile() throws IOException, InterruptedException {
HBaseTestingUtility htu = new HBaseTestingUtility();
HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO;
Path basedir = htu.getDataTestDir();
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(htu.getConfiguration());
// Create a region. That'll write the .regioninfo file.
HRegion r = HRegion.createHRegion(hri, basedir, htu.getConfiguration(),
fsTableDescriptors.get(TableName.META_TABLE_NAME));
// Get modtime on the file.
long modtime = getModTime(r);
HRegion.closeHRegion(r);
Thread.sleep(1001);
r = HRegion.openHRegion(basedir, hri, fsTableDescriptors.get(TableName.META_TABLE_NAME),
null, htu.getConfiguration());
// Ensure the file is not written for a second time.
long modtime2 = getModTime(r);
assertEquals(modtime, modtime2);
// Now load the file.
HRegionInfo deserializedHri = HRegionFileSystem.loadRegionInfoFileContent(
r.getRegionFileSystem().getFileSystem(), r.getRegionFileSystem().getRegionDir());
assertTrue(hri.equals(deserializedHri));
}
示例4: setRootDirAndCleanIt
import org.apache.hadoop.hbase.HBaseTestingUtility; //導入方法依賴的package包/類
private String setRootDirAndCleanIt(final HBaseTestingUtility htu,
final String subdir)
throws IOException {
Path testdir = htu.getDataTestDir(subdir);
FileSystem fs = FileSystem.get(htu.getConfiguration());
if (fs.exists(testdir)) assertTrue(fs.delete(testdir, true));
FSUtils.setRootDir(htu.getConfiguration(), testdir);
return FSUtils.getRootDir(htu.getConfiguration()).toString();
}
示例5: testDeleteAndExists
import org.apache.hadoop.hbase.HBaseTestingUtility; //導入方法依賴的package包/類
@Test
public void testDeleteAndExists() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
Configuration conf = htu.getConfiguration();
conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true);
FileSystem fs = FileSystem.get(conf);
FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY);
// then that the correct file is created
String file = UUID.randomUUID().toString();
Path p = new Path(htu.getDataTestDir(), "temptarget" + File.separator + file);
Path p1 = new Path(htu.getDataTestDir(), "temppath" + File.separator + file);
try {
FSDataOutputStream out = FSUtils.create(conf, fs, p, perms, null);
out.close();
assertTrue("The created file should be present", FSUtils.isExists(fs, p));
// delete the file with recursion as false. Only the file will be deleted.
FSUtils.delete(fs, p, false);
// Create another file
FSDataOutputStream out1 = FSUtils.create(conf, fs, p1, perms, null);
out1.close();
// delete the file with recursion as false. Still the file only will be deleted
FSUtils.delete(fs, p1, true);
assertFalse("The created file should be present", FSUtils.isExists(fs, p1));
// and then cleanup
} finally {
FSUtils.delete(fs, p, true);
FSUtils.delete(fs, p1, true);
}
}
示例6: testShouldFlushMeta
import org.apache.hadoop.hbase.HBaseTestingUtility; //導入方法依賴的package包/類
public void testShouldFlushMeta() throws Exception {
// write an edit in the META and ensure the shouldFlush (that the periodic memstore
// flusher invokes) returns true after SYSTEM_CACHE_FLUSH_INTERVAL (even though
// the MEMSTORE_PERIODIC_FLUSH_INTERVAL is set to a higher value)
Configuration conf = new Configuration();
conf.setInt(HRegion.MEMSTORE_PERIODIC_FLUSH_INTERVAL, HRegion.SYSTEM_CACHE_FLUSH_INTERVAL * 10);
HBaseTestingUtility hbaseUtility = HBaseTestingUtility.createLocalHTU(conf);
Path testDir = hbaseUtility.getDataTestDir();
EnvironmentEdgeForMemstoreTest edge = new EnvironmentEdgeForMemstoreTest();
EnvironmentEdgeManager.injectEdge(edge);
edge.setCurrentTimeMillis(1234);
WALFactory wFactory = new WALFactory(conf, null, "1234");
HRegion meta = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO, testDir,
conf, HTableDescriptor.metaTableDescriptor(conf),
wFactory.getMetaWAL(HRegionInfo.FIRST_META_REGIONINFO.
getEncodedNameAsBytes()));
HRegionInfo hri = new HRegionInfo(TableName.valueOf("testShouldFlushMeta"),
Bytes.toBytes("row_0200"), Bytes.toBytes("row_0300"));
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("testShouldFlushMeta"));
desc.addFamily(new HColumnDescriptor("foo".getBytes()));
HRegion r =
HRegion.createHRegion(hri, testDir, conf, desc,
wFactory.getWAL(hri.getEncodedNameAsBytes()));
HRegion.addRegionToMETA(meta, r);
edge.setCurrentTimeMillis(1234 + 100);
StringBuffer sb = new StringBuffer();
assertTrue(meta.shouldFlush(sb) == false);
edge.setCurrentTimeMillis(edge.currentTime() + HRegion.SYSTEM_CACHE_FLUSH_INTERVAL + 1);
assertTrue(meta.shouldFlush(sb) == true);
}
示例7: testFileBucketCacheConfig
import org.apache.hadoop.hbase.HBaseTestingUtility; //導入方法依賴的package包/類
@Test
public void testFileBucketCacheConfig() throws IOException {
HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);
try {
Path p = new Path(htu.getDataTestDir(), "bc.txt");
FileSystem fs = FileSystem.get(this.conf);
fs.create(p).close();
this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);
doBucketCacheConfigTest();
} finally {
htu.cleanupTestDir();
}
}
示例8: setUp
import org.apache.hadoop.hbase.HBaseTestingUtility; //導入方法依賴的package包/類
@Before
public void setUp() throws IOException {
TEST_UTIL = new HBaseTestingUtility();
testDir = TEST_UTIL.getDataTestDir("TestStoreFileRefresherChore");
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), testDir);
}