本文整理汇总了Java中org.apache.hadoop.hbase.util.MD5Hash类的典型用法代码示例。如果您正苦于以下问题:Java MD5Hash类的具体用法?Java MD5Hash怎么用?Java MD5Hash使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
MD5Hash类属于org.apache.hadoop.hbase.util包,在下文中一共展示了MD5Hash类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testCreateHRegionInfoName
import org.apache.hadoop.hbase.util.MD5Hash; //导入依赖的package包/类
@Test
public void testCreateHRegionInfoName() throws Exception {
String tableName = "tablename";
final TableName tn = TableName.valueOf(tableName);
String startKey = "startkey";
final byte[] sk = Bytes.toBytes(startKey);
String id = "id";
// old format region name
byte [] name = HRegionInfo.createRegionName(tn, sk, id, false);
String nameStr = Bytes.toString(name);
assertEquals(tableName + "," + startKey + "," + id, nameStr);
// new format region name.
String md5HashInHex = MD5Hash.getMD5AsHex(name);
assertEquals(HRegionInfo.MD5_HEX_LENGTH, md5HashInHex.length());
name = HRegionInfo.createRegionName(tn, sk, id, true);
nameStr = Bytes.toString(name);
assertEquals(tableName + "," + startKey + ","
+ id + "." + md5HashInHex + ".",
nameStr);
}
示例2: createTableFiles
import org.apache.hadoop.hbase.util.MD5Hash; //导入依赖的package包/类
private Path createTableFiles(final Path rootDir, final String tableName,
final Set<String> tableRegions, final Set<String> tableFamilies,
final Set<String> tableHFiles) throws IOException {
Path tableDir = new Path(rootDir, tableName);
for (int r = 0; r < 10; ++r) {
String regionName = MD5Hash.getMD5AsHex(Bytes.toBytes(r));
tableRegions.add(regionName);
Path regionDir = new Path(tableDir, regionName);
for (int f = 0; f < 3; ++f) {
String familyName = "f" + f;
tableFamilies.add(familyName);
Path familyDir = new Path(regionDir, familyName);
fs.mkdirs(familyDir);
for (int h = 0; h < 5; ++h) {
String hfileName = UUID.randomUUID().toString().replaceAll("-", "");
tableHFiles.add(hfileName);
fs.createNewFile(new Path(familyDir, hfileName));
}
}
}
return tableDir;
}
示例3: testCreateHRegionInfoName
import org.apache.hadoop.hbase.util.MD5Hash; //导入依赖的package包/类
@Test
public void testCreateHRegionInfoName() throws Exception {
String tableName = "tablename";
final byte [] tn = Bytes.toBytes(tableName);
String startKey = "startkey";
final byte [] sk = Bytes.toBytes(startKey);
String id = "id";
// old format region name
byte [] name = HRegionInfo.createRegionName(tn, sk, id, false);
String nameStr = Bytes.toString(name);
assertEquals(tableName + "," + startKey + "," + id, nameStr);
// new format region name.
String md5HashInHex = MD5Hash.getMD5AsHex(name);
assertEquals(HRegionInfo.MD5_HEX_LENGTH, md5HashInHex.length());
name = HRegionInfo.createRegionName(tn, sk, id, true);
nameStr = Bytes.toString(name);
assertEquals(tableName + "," + startKey + ","
+ id + "." + md5HashInHex + ".",
nameStr);
}
示例4: md5PrefixedKey
import org.apache.hadoop.hbase.util.MD5Hash; //导入依赖的package包/类
/**
* Converts the given key to string, and prefixes it with the MD5 hash of
* the index's string representation.
*/
public static String md5PrefixedKey(long key) {
String stringKey = Long.toString(key);
String md5hash = MD5Hash.getMD5AsHex(Bytes.toBytes(stringKey));
// flip the key to randomize
return md5hash + "-" + stringKey;
}
示例5: setKey
import org.apache.hadoop.hbase.util.MD5Hash; //导入依赖的package包/类
public Context setKey(Key key) {
Preconditions.checkNotNull(cipher, "Context does not have a cipher");
// validate the key length
byte[] encoded = key.getEncoded();
if (encoded.length != cipher.getKeyLength()) {
throw new RuntimeException("Illegal key length, have=" + encoded.length +
", want=" + cipher.getKeyLength());
}
this.key = key;
this.keyHash = MD5Hash.getMD5AsHex(encoded);
return this;
}
示例6: testGet
import org.apache.hadoop.hbase.util.MD5Hash; //导入依赖的package包/类
@Test
public void testGet() {
MobFileName mobFileName = MobFileName.create(startKey, dateStr, uuid);
assertEquals(MD5Hash.getMD5AsHex(startKey, 0, startKey.length), mobFileName.getStartKey());
assertEquals(dateStr, mobFileName.getDate());
assertEquals(mobFileName.getFileName(), MD5Hash.getMD5AsHex(startKey, 0, startKey.length)
+ dateStr + uuid);
}
示例7: loadData
import org.apache.hadoop.hbase.util.MD5Hash; //导入依赖的package包/类
public void loadData(final HTable table, int rows, byte[]... families) throws IOException {
byte[] qualifier = Bytes.toBytes("q");
table.setAutoFlush(false);
while (rows-- > 0) {
byte[] value = Bytes.add(Bytes.toBytes(System.currentTimeMillis()), Bytes.toBytes(rows));
byte[] key = Bytes.toBytes(MD5Hash.getMD5AsHex(value));
Put put = new Put(key);
put.setWriteToWAL(false);
for (byte[] family: families) {
put.add(family, qualifier, value);
}
table.put(put);
}
table.flushCommits();
}