本文整理汇总了Java中org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair类的典型用法代码示例。如果您正苦于以下问题:Java BytesBytesPair类的具体用法?Java BytesBytesPair怎么用?Java BytesBytesPair使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
BytesBytesPair类属于org.apache.hadoop.hbase.protobuf.generated.HBaseProtos包,在下文中一共展示了BytesBytesPair类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: parseFrom
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* @param pbBytes A pb serialized {@link FuzzyRowFilter} instance
* @return An instance of {@link FuzzyRowFilter} made from <code>bytes</code>
* @throws DeserializationException
* @see #toByteArray
*/
public static FuzzyRowFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.FuzzyRowFilter proto;
try {
proto = FilterProtos.FuzzyRowFilter.parseFrom(pbBytes);
} catch (InvalidProtocolBufferException e) {
throw new DeserializationException(e);
}
int count = proto.getFuzzyKeysDataCount();
ArrayList<Pair<byte[], byte[]>> fuzzyKeysData = new ArrayList<Pair<byte[], byte[]>>(count);
for (int i = 0; i < count; ++i) {
BytesBytesPair current = proto.getFuzzyKeysData(i);
byte[] keyBytes = current.getFirst().toByteArray();
byte[] keyMeta = current.getSecond().toByteArray();
fuzzyKeysData.add(new Pair<byte[], byte[]>(keyBytes, keyMeta));
}
return new FuzzyRowFilter(fuzzyKeysData);
}
示例2: parseFrom
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* @param pbBytes A pb serialized {@link FuzzyRowFilter} instance
* @return An instance of {@link FuzzyRowFilter} made from <code>bytes</code>
* @throws DeserializationException
* @see #toByteArray
*/
public static FuzzyRowFilter parseFrom(final byte [] pbBytes)
throws DeserializationException {
FilterProtos.FuzzyRowFilter proto;
try {
proto = FilterProtos.FuzzyRowFilter.parseFrom(pbBytes);
} catch (InvalidProtocolBufferException e) {
throw new DeserializationException(e);
}
int count = proto.getFuzzyKeysDataCount();
ArrayList<Pair<byte[], byte[]>> fuzzyKeysData= new ArrayList<Pair<byte[], byte[]>>(count);
for (int i = 0; i < count; ++i) {
BytesBytesPair current = proto.getFuzzyKeysData(i);
byte[] keyBytes = current.getFirst().toByteArray();
byte[] keyMeta = current.getSecond().toByteArray();
fuzzyKeysData.add(new Pair<byte[], byte[]>(keyBytes, keyMeta));
}
return new FuzzyRowFilter(fuzzyKeysData);
}
示例3: write
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* Write out this instance on the passed in <code>out</code> stream.
* We write it as a protobuf.
* @param out
* @throws IOException
* @see #read(DataInputStream)
*/
void write(final DataOutputStream out) throws IOException {
HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
for (Map.Entry<byte[], byte[]> e : this.map.entrySet()) {
HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));
bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));
builder.addMapEntry(bbpBuilder.build());
}
out.write(ProtobufUtil.PB_MAGIC);
builder.build().writeDelimitedTo(out);
}
示例4: parsePB
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* Fill our map with content of the pb we read off disk
* @param fip protobuf message to read
*/
void parsePB(final HFileProtos.FileInfoProto fip) {
this.map.clear();
for (BytesBytesPair pair : fip.getMapEntryList()) {
this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());
}
}
示例5: toByteArray
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* @return The filter serialized using pb
*/
public byte[] toByteArray() {
FilterProtos.FuzzyRowFilter.Builder builder = FilterProtos.FuzzyRowFilter.newBuilder();
for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
BytesBytesPair.Builder bbpBuilder = BytesBytesPair.newBuilder();
bbpBuilder.setFirst(ByteStringer.wrap(fuzzyData.getFirst()));
bbpBuilder.setSecond(ByteStringer.wrap(fuzzyData.getSecond()));
builder.addFuzzyKeysData(bbpBuilder);
}
return builder.build().toByteArray();
}
示例6: extractFuzzyRowFilterPairs
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
private static List<Pair<byte[], byte[]>> extractFuzzyRowFilterPairs(FuzzyRowFilter filter)
throws IOException {
// TODO: Change FuzzyRowFilter to expose fuzzyKeysData.
FilterProtos.FuzzyRowFilter filterProto =
FilterProtos.FuzzyRowFilter.parseFrom(filter.toByteArray());
List<Pair<byte[], byte[]>> result =
new ArrayList<>(filterProto.getFuzzyKeysDataCount());
for (BytesBytesPair protoPair : filterProto.getFuzzyKeysDataList()) {
result.add(
new Pair<>(
protoPair.getFirst().toByteArray(),
protoPair.getSecond().toByteArray()));
}
return result;
}
示例7: write
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* Write out this instance on the passed in <code>out</code> stream.
* We write it as a protobuf.
* @param out
* @throws IOException
* @see #read(DataInputStream)
*/
void write(final DataOutputStream out) throws IOException {
HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));
bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));
builder.addMapEntry(bbpBuilder.build());
}
out.write(ProtobufUtil.PB_MAGIC);
builder.build().writeDelimitedTo(out);
}
示例8: parsePB
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* Fill our map with content of the pb we read off disk
* @param fip protobuf message to read
*/
void parsePB(final HFileProtos.FileInfoProto fip) {
this.map.clear();
for (BytesBytesPair pair: fip.getMapEntryList()) {
this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());
}
}
示例9: toByteArray
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* @return The filter serialized using pb
*/
public byte [] toByteArray() {
FilterProtos.FuzzyRowFilter.Builder builder =
FilterProtos.FuzzyRowFilter.newBuilder();
for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
BytesBytesPair.Builder bbpBuilder = BytesBytesPair.newBuilder();
bbpBuilder.setFirst(ByteStringer.wrap(fuzzyData.getFirst()));
bbpBuilder.setSecond(ByteStringer.wrap(fuzzyData.getSecond()));
builder.addFuzzyKeysData(bbpBuilder);
}
return builder.build().toByteArray();
}
示例10: write
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* Write out this instance on the passed in <code>out</code> stream.
* We write it as a protobuf.
* @param out
* @throws IOException
* @see #read(DataInputStream)
*/
void write(final DataOutputStream out) throws IOException {
HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
bbpBuilder.setFirst(HBaseZeroCopyByteString.wrap(e.getKey()));
bbpBuilder.setSecond(HBaseZeroCopyByteString.wrap(e.getValue()));
builder.addMapEntry(bbpBuilder.build());
}
out.write(ProtobufUtil.PB_MAGIC);
builder.build().writeDelimitedTo(out);
}
示例11: toByteArray
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* @return The filter serialized using pb
*/
public byte [] toByteArray() {
FilterProtos.FuzzyRowFilter.Builder builder =
FilterProtos.FuzzyRowFilter.newBuilder();
for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
BytesBytesPair.Builder bbpBuilder = BytesBytesPair.newBuilder();
bbpBuilder.setFirst(HBaseZeroCopyByteString.wrap(fuzzyData.getFirst()));
bbpBuilder.setSecond(HBaseZeroCopyByteString.wrap(fuzzyData.getSecond()));
builder.addFuzzyKeysData(bbpBuilder);
}
return builder.build().toByteArray();
}
示例12: write
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* Write out this instance on the passed in <code>out</code> stream.
* We write it as a protobuf.
* @param out
* @throws IOException
* @see #read(DataInputStream)
*/
void write(final DataOutputStream out) throws IOException {
HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
bbpBuilder.setFirst(ZeroCopyLiteralByteString.wrap(e.getKey()));
bbpBuilder.setSecond(ZeroCopyLiteralByteString.wrap(e.getValue()));
builder.addMapEntry(bbpBuilder.build());
}
out.write(ProtobufUtil.PB_MAGIC);
builder.build().writeDelimitedTo(out);
}
示例13: toByteArray
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* @return The filter serialized using pb
*/
public byte [] toByteArray() {
FilterProtos.FuzzyRowFilter.Builder builder =
FilterProtos.FuzzyRowFilter.newBuilder();
for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
BytesBytesPair.Builder bbpBuilder = BytesBytesPair.newBuilder();
bbpBuilder.setFirst(ZeroCopyLiteralByteString.wrap(fuzzyData.getFirst()));
bbpBuilder.setSecond(ZeroCopyLiteralByteString.wrap(fuzzyData.getSecond()));
builder.addFuzzyKeysData(bbpBuilder);
}
return builder.build().toByteArray();
}
示例14: convert
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* @return Convert the current {@link HTableDescriptor} into a pb TableSchema instance.
*/
public TableSchema convert() {
TableSchema.Builder builder = TableSchema.newBuilder();
builder.setName(ByteString.copyFrom(getName()));
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e: this.values.entrySet()) {
BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
aBuilder.setFirst(ByteString.copyFrom(e.getKey().get()));
aBuilder.setSecond(ByteString.copyFrom(e.getValue().get()));
builder.addAttributes(aBuilder.build());
}
for (HColumnDescriptor hcd: getColumnFamilies()) {
builder.addColumnFamilies(hcd.convert());
}
return builder.build();
}
示例15: convert
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; //导入依赖的package包/类
/**
* @param cfs
* @return An {@link HColumnDescriptor} made from the passed in <code>cfs</code>
*/
public static HColumnDescriptor convert(final ColumnFamilySchema cfs) {
// Use the empty constructor so we preserve the initial values set on construction for things
// like maxVersion. Otherwise, we pick up wrong values on deserialization which makes for
// unrelated-looking test failures that are hard to trace back to here.
HColumnDescriptor hcd = new HColumnDescriptor();
hcd.name = cfs.getName().toByteArray();
for (BytesBytesPair a: cfs.getAttributesList()) {
hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
return hcd;
}