本文整理汇总了Java中org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor类的典型用法代码示例。如果您正苦于以下问题:Java ColumnDescriptor类的具体用法?Java ColumnDescriptor怎么用?Java ColumnDescriptor使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ColumnDescriptor类属于org.apache.hadoop.hbase.thrift.generated包,在下文中一共展示了ColumnDescriptor类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: colDescFromThrift
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* This utility method creates a new Hbase HColumnDescriptor object based on a
* Thrift ColumnDescriptor "struct".
*
* @param in
* Thrift ColumnDescriptor object
* @return HColumnDescriptor
* @throws IllegalArgument
*/
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
throws IllegalArgument {
Compression.Algorithm comp =
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
BloomType bt =
BloomType.valueOf(in.bloomFilterType);
if (in.name == null || !in.name.hasRemaining()) {
throw new IllegalArgument("column name is empty");
}
byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0];
HColumnDescriptor col = new HColumnDescriptor(parsedName)
.setMaxVersions(in.maxVersions)
.setCompressionType(comp)
.setInMemory(in.inMemory)
.setBlockCacheEnabled(in.blockCacheEnabled)
.setTimeToLive(in.timeToLive > 0 ? in.timeToLive : Integer.MAX_VALUE)
.setBloomFilterType(bt);
return col;
}
示例2: getColumnDescriptors
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
*
* @return a List of ColumnDescriptors for use in creating a table. Has one
* default ColumnDescriptor and one ColumnDescriptor with fewer versions
*/
private static List<ColumnDescriptor> getColumnDescriptors() {
ArrayList<ColumnDescriptor> cDescriptors = new ArrayList<ColumnDescriptor>();
// A default ColumnDescriptor
ColumnDescriptor cDescA = new ColumnDescriptor();
cDescA.name = columnAname;
cDescriptors.add(cDescA);
// A slightly customized ColumnDescriptor (only 2 versions)
ColumnDescriptor cDescB = new ColumnDescriptor(columnBname, 2, "NONE",
false, "NONE", 0, 0, false, -1);
cDescriptors.add(cDescB);
return cDescriptors;
}
示例3: colDescFromThrift
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* This utility method creates a new Hbase HColumnDescriptor object based on a
* Thrift ColumnDescriptor "struct".
*
* @param in
* Thrift ColumnDescriptor object
* @return HColumnDescriptor
* @throws IllegalArgument
*/
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
throws IllegalArgument {
Compression.Algorithm comp =
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
StoreFile.BloomType bt =
BloomType.valueOf(in.bloomFilterType);
if (in.name == null || !in.name.hasRemaining()) {
throw new IllegalArgument("column name is empty");
}
byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0];
HColumnDescriptor col = new HColumnDescriptor(parsedName)
.setMaxVersions(in.maxVersions)
.setCompressionType(comp)
.setInMemory(in.inMemory)
.setBlockCacheEnabled(in.blockCacheEnabled)
.setTimeToLive(in.timeToLive)
.setBloomFilterType(bt);
return col;
}
示例4: colDescFromThrift
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* This utility method creates a new Hbase HColumnDescriptor object based on a
* Thrift ColumnDescriptor "struct".
*
* @param in
* Thrift ColumnDescriptor object
* @return HColumnDescriptor
* @throws IllegalArgument
*/
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
throws IllegalArgument {
Compression.Algorithm comp =
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
BloomType bt =
BloomType.valueOf(in.bloomFilterType);
if (in.name == null || !in.name.hasRemaining()) {
throw new IllegalArgument("column name is empty");
}
byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0];
HColumnDescriptor col = new HColumnDescriptor(parsedName)
.setMaxVersions(in.maxVersions)
.setCompressionType(comp)
.setInMemory(in.inMemory)
.setBlockCacheEnabled(in.blockCacheEnabled)
.setTimeToLive(in.timeToLive)
.setBloomFilterType(bt);
return col;
}
示例5: colDescFromThrift
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* This utility method creates a new Hbase HColumnDescriptor object based on a
* Thrift ColumnDescriptor "struct".
*
* @param in Thrift ColumnDescriptor object
* @return HColumnDescriptor
* @throws IllegalArgument if the column name is empty
*/
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
throws IllegalArgument {
Compression.Algorithm comp =
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT));
BloomType bt =
BloomType.valueOf(in.bloomFilterType);
if (in.name == null || !in.name.hasRemaining()) {
throw new IllegalArgument("column name is empty");
}
byte [] parsedName = CellUtil.parseColumn(Bytes.getBytes(in.name))[0];
HColumnDescriptor col = new HColumnDescriptor(parsedName)
.setMaxVersions(in.maxVersions)
.setCompressionType(comp)
.setInMemory(in.inMemory)
.setBlockCacheEnabled(in.blockCacheEnabled)
.setTimeToLive(in.timeToLive > 0 ? in.timeToLive : Integer.MAX_VALUE)
.setBloomFilterType(bt);
return col;
}
示例6: getColumnDescriptors
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* @return a List of ColumnDescriptors for use in creating a table. Has one
* default ColumnDescriptor and one ColumnDescriptor with fewer versions
*/
private static List<ColumnDescriptor> getColumnDescriptors() {
ArrayList<ColumnDescriptor> cDescriptors = new ArrayList<>(2);
// A default ColumnDescriptor
ColumnDescriptor cDescA = new ColumnDescriptor();
cDescA.name = columnAname;
cDescriptors.add(cDescA);
// A slightly customized ColumnDescriptor (only 2 versions)
ColumnDescriptor cDescB = new ColumnDescriptor(columnBname, 2, "NONE",
false, "NONE", 0, 0, false, -1);
cDescriptors.add(cDescB);
return cDescriptors;
}
示例7: colDescFromThrift
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* This utility method creates a new Hbase HColumnDescriptor object based on a
* Thrift ColumnDescriptor "struct".
*
* @param in
* Thrift ColumnDescriptor object
* @return HColumnDescriptor
* @throws IllegalArgument
*/
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
throws IllegalArgument {
Compression.Algorithm comp =
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
StoreFile.BloomType bt =
BloomType.valueOf(in.bloomFilterType);
if (in.name == null || !in.name.hasRemaining()) {
throw new IllegalArgument("column name is empty");
}
byte [] parsedName = KeyValue.parseColumn(Bytes.getBytes(in.name))[0];
HColumnDescriptor col = new HColumnDescriptor(parsedName,
in.maxVersions, comp.getName(), in.inMemory, in.blockCacheEnabled,
in.timeToLive, bt.toString());
return col;
}
示例8: colDescFromHbase
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* This utility method creates a new Thrift ColumnDescriptor "struct" based on
* an Hbase HColumnDescriptor object.
*
* @param in
* Hbase HColumnDescriptor object
* @return Thrift ColumnDescriptor
*/
static public ColumnDescriptor colDescFromHbase(HColumnDescriptor in) {
ColumnDescriptor col = new ColumnDescriptor();
col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY));
col.maxVersions = in.getMaxVersions();
col.compression = in.getCompression().toString();
col.inMemory = in.isInMemory();
col.blockCacheEnabled = in.isBlockCacheEnabled();
col.bloomFilterType = in.getBloomFilterType().toString();
return col;
}
示例9: createTestTables
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
public static void createTestTables(Hbase.Iface handler) throws Exception {
// Create/enable/disable/delete tables, ensure methods act correctly
assertEquals(handler.getTableNames().size(), 0);
handler.createTable(tableAname, getColumnDescriptors());
assertEquals(handler.getTableNames().size(), 1);
assertEquals(handler.getColumnDescriptors(tableAname).size(), 2);
assertTrue(handler.isTableEnabled(tableAname));
handler.createTable(tableBname, new ArrayList<ColumnDescriptor>());
assertEquals(handler.getTableNames().size(), 2);
}
示例10: getTable
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
@Test
public void getTable() throws Exception {
TTransport ttransport = htDataSource.getTTransport();
TProtocol tprotocol = new TBinaryProtocol(ttransport);
Hbase.Client client = new Hbase.Client(tprotocol);
//
String TABLE_NAME = "UIH_OverallItemInfo";
//
long beg = System.currentTimeMillis();
Map<ByteBuffer, ColumnDescriptor> hcds = client
.getColumnDescriptors(ByteBufferHelper.toByteBuffer(TABLE_NAME));
//
StringBuilder buffer = new StringBuilder();
buffer.append("[");
buffer.append(TABLE_NAME);
buffer.append("] ");
int size = hcds.size();
int i = 0;
for (ColumnDescriptor column : hcds.values()) {
buffer.append(ByteHelper.toString(column.getName()));
buffer.append(", " + column.getTimeToLive());// server沒提供
if (i < size - 1) {
buffer.append(", ");
}
i++;
}
ttransport.close();
//
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
System.out.println(buffer);
}
示例11: listTables
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
@Test
// 50 times: 59110 mills.
public void listTables() throws Exception {
int count = 50;
//
long beg = System.currentTimeMillis();
for (int i = 0; i < count; i++) {
TTransport ttransport = htDataSource.getTTransport();
TProtocol tprotocol = new TBinaryProtocol(ttransport);
Hbase.Client client = new Hbase.Client(tprotocol);
List<ByteBuffer> list = client.getTableNames();
for (ByteBuffer name : list) {
StringBuilder buffer = new StringBuilder();
buffer.append("[");
buffer.append(ByteBufferHelper.toString(name));
buffer.append("] ");
Map<ByteBuffer, ColumnDescriptor> hcds = client
.getColumnDescriptors(name);
int size = hcds.size();
int j = 0;
for (ColumnDescriptor column : hcds.values()) {
buffer.append(ByteHelper.toString(column.getName()));
if (j < size - 1) {
buffer.append(", ");
}
j++;
}
//
System.out.println(buffer);
ttransport.close();
}
}
long end = System.currentTimeMillis();
System.out.println(count + " times: " + (end - beg) + " mills. ");
}
示例12: getTable
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
@Test
public void getTable() throws Exception {
HtSession thriftSession = htSessionFactory.openSession();
//
String TABLE_NAME = "UIH_OverallItemInfo";
long beg = System.currentTimeMillis();
Map<ByteBuffer, ColumnDescriptor> hcds = thriftSession
.getColumnDescriptors(ByteBufferHelper.toByteBuffer(TABLE_NAME));
//
StringBuilder buffer = new StringBuilder();
buffer.append("[");
buffer.append(TABLE_NAME);
buffer.append("] ");
int size = hcds.size();
int i = 0;
for (ColumnDescriptor column : hcds.values()) {
buffer.append(ByteHelper.toString(column.getName()));
buffer.append(", " + column.getTimeToLive());// server沒提供
if (i < size - 1) {
buffer.append(", ");
}
i++;
}
htSessionFactory.closeSession();
long end = System.currentTimeMillis();
//
System.out.println((end - beg) + " at mills.");
System.out.println(buffer);
}
示例13: listTables
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
@Test
// 50 times: 52851 mills.
public void listTables() throws Exception {
int count = 50;
//
long beg = System.currentTimeMillis();
for (int i = 0; i < count; i++) {
HtSession htSession = htSessionFactory.openSession();
System.out.println(htSession);
//
List<ByteBuffer> list = htSession.getTableNames();
for (ByteBuffer name : list) {
StringBuilder buffer = new StringBuilder();
buffer.append("[");
buffer.append(ByteBufferHelper.toString(name));
buffer.append("] ");
Map<ByteBuffer, ColumnDescriptor> hcds = htSession
.getColumnDescriptors(name);
int size = hcds.size();
int j = 0;
for (ColumnDescriptor column : hcds.values()) {
buffer.append(ByteHelper.toString(column.getName()));
if (j < size - 1) {
buffer.append(", ");
}
j++;
}
//
System.out.println(buffer);
}
htSessionFactory.closeSession();
}
long end = System.currentTimeMillis();
System.out.println(count + " times: " + (end - beg) + " mills. ");
}
示例14: getTable
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* getting a table
*
* @throws Exception
*/
@Test
public void getTable() throws Exception {
String TABLE_NAME = "UIH_OverallItemInfo";
//
long beg = System.currentTimeMillis();
Map<ByteBuffer, ColumnDescriptor> hcds = client
.getColumnDescriptors(ByteBufferHelper.toByteBuffer(TABLE_NAME));
//
StringBuilder buffer = new StringBuilder();
buffer.append("[");
buffer.append(TABLE_NAME);
buffer.append("] ");
int size = hcds.size();
int i = 0;
for (ColumnDescriptor column : hcds.values()) {
buffer.append(ByteHelper.toString(column.getName()));
buffer.append(", " + column.getTimeToLive());// server沒提供
if (i < size - 1) {
buffer.append(", ");
}
i++;
}
//
long end = System.currentTimeMillis();
System.out.println((end - beg) + " at mills.");
System.out.println(buffer);
}
示例15: listTables
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; //导入依赖的package包/类
/**
* list all tables
*
* @throws Exception
*/
@Test
// 50 times: 59193 mills.
public void listTables() throws Exception {
int count = 50;
//
long beg = System.currentTimeMillis();
for (int i = 0; i < count; i++) {
TTransport ttransport = createTTransport();
TProtocol tprotocol = createTBinaryProtocol(ttransport);
Hbase.Client client = createClient(tprotocol);
System.out.println(client);
//
List<ByteBuffer> list = client.getTableNames();
for (ByteBuffer name : list) {
StringBuilder buffer = new StringBuilder();
buffer.append("[");
buffer.append(ByteBufferHelper.toString(name));
buffer.append("] ");
Map<ByteBuffer, ColumnDescriptor> hcds = client
.getColumnDescriptors(name);
int size = hcds.size();
int j = 0;
for (ColumnDescriptor column : hcds.values()) {
buffer.append(ByteHelper.toString(column.getName()));
if (j < size - 1) {
buffer.append(", ");
}
j++;
}
//
System.out.println(buffer);
}
ttransport.close();
}
long end = System.currentTimeMillis();
System.out.println(count + " times: " + (end - beg) + " mills. ");
}