本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector类的典型用法代码示例。如果您正苦于以下问题:Java BinaryObjectInspector类的具体用法?Java BinaryObjectInspector怎么用?Java BinaryObjectInspector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
BinaryObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector.primitive包,在下文中一共展示了BinaryObjectInspector类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
/**
*
* Initialize HiveUDF and create object inspectors. It requires that the argument length is = 1 and that the ObjectInspector of arguments[0] is of type org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector
*
* @param arguments array of length 1 containing one WritableBinaryObjectInspector
*
* @return ObjectInspector that is able to parse the result of the evaluate method of the UDF (List Object Inspector for Strings)
*
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentException in case the first argument is not of WritableBinaryObjectInspector
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in case the number of arguments is != 1
*
*/
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("namecoinExtractField only takes one argument: Binary ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("namecoinExtractField only takes one argument: Binary ");
}
if (!(arguments[0] instanceof BinaryObjectInspector)) {
throw new UDFArgumentException("first argument must be a Binary containing a Namecoin script");
}
// these are only used for bitcointransaction structs exported to other formats, such as ORC
this.wboi = (BinaryObjectInspector)arguments[0];
// the UDF returns the hash value of a BitcoinTransaction as byte array
return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
示例2: get
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
public static IHivePrimitiveConverter get( final ObjectInspector objectInspector ){
switch( objectInspector.getCategory() ){
case PRIMITIVE:
PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)objectInspector;
switch( primitiveInspector.getPrimitiveCategory() ){
case BINARY:
return new HiveBytesPrimitiveConverter( (BinaryObjectInspector)objectInspector );
case BOOLEAN:
return new HiveBooleanPrimitiveConverter( (BooleanObjectInspector)objectInspector );
case BYTE:
return new HiveBytePrimitiveConverter( (ByteObjectInspector)objectInspector );
case DOUBLE:
return new HiveDoublePrimitiveConverter( (DoubleObjectInspector)objectInspector );
case FLOAT:
return new HiveFloatPrimitiveConverter( (FloatObjectInspector)objectInspector );
case INT:
return new HiveIntegerPrimitiveConverter( (IntObjectInspector)objectInspector );
case LONG:
return new HiveLongPrimitiveConverter( (LongObjectInspector)objectInspector );
case SHORT:
return new HiveShortPrimitiveConverter( (ShortObjectInspector)objectInspector );
case STRING:
return new HiveStringPrimitiveConverter( (StringObjectInspector)objectInspector );
case DATE:
case TIMESTAMP:
case VOID:
case UNKNOWN:
default:
return new HiveDefaultPrimitiveConverter();
}
default :
return new HiveDefaultPrimitiveConverter();
}
}
示例3: getByteBuffer
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
public ByteBuffer getByteBuffer(Object data, ObjectInspector objectInspector) {
if (objectInspector instanceof BinaryObjectInspector) {
BytesWritable bw = ((BinaryObjectInspector) objectInspector).getPrimitiveWritableObject(data);
byte[] result = new byte[bw.getLength()];
System.arraycopy(bw.getBytes(), 0, result, 0, bw.getLength());
return ByteBuffer.wrap(result);
} else {
throw new RuntimeException("Unknown object inspector type: " + objectInspector.getCategory()
+ " Type name: " + objectInspector.getTypeName());
}
}
示例4: asBinaryOI
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
@Nonnull
public static BinaryObjectInspector asBinaryOI(@Nonnull final ObjectInspector argOI)
throws UDFArgumentException {
if (!BINARY_TYPE_NAME.equals(argOI.getTypeName())) {
throw new UDFArgumentException("Argument type must be Binary: " + argOI.getTypeName());
}
return (BinaryObjectInspector) argOI;
}
示例5: init
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
super.init(m, parameters);
ObjectInspector ret;
if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {
if (parameters[0] instanceof StructObjectInspector) {
hllOI = (StructObjectInspector) parameters[0];
hll_mode = HLLMode.MERGING;
} else {
primitiveOIs = Lists.newArrayList();
for (ObjectInspector param : parameters) {
primitiveOIs.add((PrimitiveObjectInspector) param);
}
baos = new ByteArrayOutputStream();
dos = new DataOutputStream(baos);
hashFunc = Hashing.murmur3_128(SEED);
hll_mode = HLLMode.HASHING;
}
} else {
intermediateOI = (BinaryObjectInspector) parameters[0];
}
if (m == Mode.PARTIAL1 || m == Mode.PARTIAL2) {
ret = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
} else {
List<String> field_names = Lists.newArrayList(CARDINALITY,SIGNATURE);
List<ObjectInspector> field_ois = Lists.newArrayList();
field_ois.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
field_ois.add(PrimitiveObjectInspectorFactory.writableBinaryObjectInspector);
ret = ObjectInspectorFactory.getStandardStructObjectInspector(field_names, field_ois);
}
return ret;
}
示例6: iterate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
@Override
public void iterate(AggregationBuffer agg, Object[] args)
throws HiveException {
if (args[0] == null) {
return;
}
final BytesWritable bw = ((BinaryObjectInspector) inputOI).getPrimitiveWritableObject(args[0]);
HyperLogLog hll = ((HyperLogLogBuffer)agg).hll;
merge(hll, bw);
}
示例7: write
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
@Override
void write(Object obj) throws IOException {
long rawDataSize = 0;
if (obj != null) {
BytesWritable val =
((BinaryObjectInspector) inspector).getPrimitiveWritableObject(obj);
stream.write(val.getBytes(), 0, val.getLength());
length.write(val.getLength());
// Raw data size is the length of the BytesWritable, i.e. the number of bytes
rawDataSize = val.getLength();
}
super.write(obj, rawDataSize);
}
示例8: TestCopyBinary
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
/**
* Tests that after copying a lazy binary object, calling materialize on the original and the
* copy doesn't advance the tree reader twice
* @throws Exception
*/
@Test
public void TestCopyBinary() throws Exception {
ReaderWriterProfiler.setProfilerOptions(null);
OrcLazyBinary lazyBinary = new OrcLazyBinary(new LazyBinaryTreeReader(0, 0) {
int nextCalls = 0;
@Override
public Object next(Object previous) throws IOException {
if (nextCalls == 0) {
nextCalls++;
return new BytesWritable("a".getBytes());
}
throw new IOException("next should only be called once");
}
@Override
protected boolean seekToRow(long currentRow) throws IOException {
return true;
}
});
BinaryObjectInspector binaryOI = (BinaryObjectInspector)
OrcLazyObjectInspectorUtils.createLazyObjectInspector(TypeInfoFactory.binaryTypeInfo);
OrcLazyBinary lazyBinary2 = (OrcLazyBinary) binaryOI.copyObject(lazyBinary);
Assert.assertEquals("a", new String(((BytesWritable) lazyBinary.materialize()).getBytes()));
Assert.assertEquals("a", new String(((BytesWritable) lazyBinary2.materialize()).getBytes()));
}
示例9: setSafeValue
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
@Override
public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
final byte[] value = ((BinaryObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue);
((NullableVarBinaryVector) outputVV).getMutator().setSafe(outputIndex, value, 0, value.length);
}
示例10: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null || arguments[0].get() == null) {
return null;
}
Object input = arguments[0].get();
switch(inputType) {
case BOOLEAN:
return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
case BYTE:
return new Byte(((ByteObjectInspector)argumentOI).get(input));
case SHORT:
return new Short(((ShortObjectInspector)argumentOI).get(input));
case INT:
return new Integer(((IntObjectInspector)argumentOI).get(input));
case LONG:
return new Long(((LongObjectInspector)argumentOI).get(input));
case FLOAT:
return new Float(((FloatObjectInspector)argumentOI).get(input));
case DOUBLE:
return new Double(((DoubleObjectInspector)argumentOI).get(input));
case STRING:
return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
case BINARY:
return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
case VARCHAR:
return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
case DATE:
return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
case TIMESTAMP:
return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
case DECIMAL:
// return type is a HiveVarchar
HiveDecimal decimalValue =
PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
}
throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
示例11: HiveBytesPrimitiveConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
public HiveBytesPrimitiveConverter( final BinaryObjectInspector inspector ){
this.inspector = inspector;
}
示例12: OrcBytesPrimitiveConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
public OrcBytesPrimitiveConverter( final BinaryObjectInspector inspector ){
this.inspector = inspector;
}
示例13: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null || arguments[0].get() == null) {
return null;
}
Object input = arguments[0].get();
switch(inputType) {
case BOOLEAN:
return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
case BYTE:
return new Byte(((ByteObjectInspector)argumentOI).get(input));
case SHORT:
return new Short(((ShortObjectInspector)argumentOI).get(input));
case INT:
return new Integer(((IntObjectInspector)argumentOI).get(input));
case LONG:
return new Long(((LongObjectInspector)argumentOI).get(input));
case FLOAT:
return new Float(((FloatObjectInspector)argumentOI).get(input));
case DOUBLE:
return new Double(((DoubleObjectInspector)argumentOI).get(input));
case STRING:
return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
case BINARY:
return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
case VARCHAR:
if (outputType == PrimitiveCategory.CHAR) {
HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector) argumentOI);
return new HiveChar(hiveVarchar.getValue(), HiveChar.MAX_CHAR_LENGTH);
} else {
return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
}
case CHAR:
return PrimitiveObjectInspectorUtils.getHiveChar(input, (HiveCharObjectInspector) argumentOI);
case DATE:
return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
case TIMESTAMP:
return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
case DECIMAL:
// return type is a HiveVarchar
HiveDecimal decimalValue =
PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
}
throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
示例14: serializePrimitive
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; //导入依赖的package包/类
private static void serializePrimitive(BlockBuilder builder, Object object, PrimitiveObjectInspector inspector)
{
requireNonNull(builder, "parent builder is null");
if (object == null) {
builder.appendNull();
return;
}
switch (inspector.getPrimitiveCategory()) {
case BOOLEAN:
BooleanType.BOOLEAN.writeBoolean(builder, ((BooleanObjectInspector) inspector).get(object));
return;
case BYTE:
BigintType.BIGINT.writeLong(builder, ((ByteObjectInspector) inspector).get(object));
return;
case SHORT:
BigintType.BIGINT.writeLong(builder, ((ShortObjectInspector) inspector).get(object));
return;
case INT:
BigintType.BIGINT.writeLong(builder, ((IntObjectInspector) inspector).get(object));
return;
case LONG:
BigintType.BIGINT.writeLong(builder, ((LongObjectInspector) inspector).get(object));
return;
case FLOAT:
DoubleType.DOUBLE.writeDouble(builder, ((FloatObjectInspector) inspector).get(object));
return;
case DOUBLE:
DoubleType.DOUBLE.writeDouble(builder, ((DoubleObjectInspector) inspector).get(object));
return;
case STRING:
VarcharType.VARCHAR.writeSlice(builder, Slices.utf8Slice(((StringObjectInspector) inspector).getPrimitiveJavaObject(object)));
return;
case DATE:
DateType.DATE.writeLong(builder, formatDateAsLong(object, (DateObjectInspector) inspector));
return;
case TIMESTAMP:
TimestampType.TIMESTAMP.writeLong(builder, formatTimestampAsLong(object, (TimestampObjectInspector) inspector));
return;
case BINARY:
VARBINARY.writeSlice(builder, Slices.wrappedBuffer(((BinaryObjectInspector) inspector).getPrimitiveJavaObject(object)));
return;
}
throw new RuntimeException("Unknown primitive type: " + inspector.getPrimitiveCategory());
}