本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector类的典型用法代码示例。如果您正苦于以下问题:Java ShortObjectInspector类的具体用法?Java ShortObjectInspector怎么用?Java ShortObjectInspector使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ShortObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector.primitive包,在下文中一共展示了ShortObjectInspector类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: get
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
public static IHivePrimitiveConverter get( final ObjectInspector objectInspector ){
switch( objectInspector.getCategory() ){
case PRIMITIVE:
PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)objectInspector;
switch( primitiveInspector.getPrimitiveCategory() ){
case BINARY:
return new HiveBytesPrimitiveConverter( (BinaryObjectInspector)objectInspector );
case BOOLEAN:
return new HiveBooleanPrimitiveConverter( (BooleanObjectInspector)objectInspector );
case BYTE:
return new HiveBytePrimitiveConverter( (ByteObjectInspector)objectInspector );
case DOUBLE:
return new HiveDoublePrimitiveConverter( (DoubleObjectInspector)objectInspector );
case FLOAT:
return new HiveFloatPrimitiveConverter( (FloatObjectInspector)objectInspector );
case INT:
return new HiveIntegerPrimitiveConverter( (IntObjectInspector)objectInspector );
case LONG:
return new HiveLongPrimitiveConverter( (LongObjectInspector)objectInspector );
case SHORT:
return new HiveShortPrimitiveConverter( (ShortObjectInspector)objectInspector );
case STRING:
return new HiveStringPrimitiveConverter( (StringObjectInspector)objectInspector );
case DATE:
case TIMESTAMP:
case VOID:
case UNKNOWN:
default:
return new HiveDefaultPrimitiveConverter();
}
default :
return new HiveDefaultPrimitiveConverter();
}
}
示例2: createPrimitive
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
private Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
throws SerDeException {
if (obj == null) {
return null;
}
switch (inspector.getPrimitiveCategory()) {
case VOID:
return null;
case BOOLEAN:
return new BooleanWritable(((BooleanObjectInspector) inspector).get(obj) ? Boolean.TRUE : Boolean.FALSE);
case BYTE:
return new ByteWritable((byte) ((ByteObjectInspector) inspector).get(obj));
case DOUBLE:
return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
case FLOAT:
return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
case INT:
return new IntWritable(((IntObjectInspector) inspector).get(obj));
case LONG:
return new LongWritable(((LongObjectInspector) inspector).get(obj));
case SHORT:
return new ShortWritable((short) ((ShortObjectInspector) inspector).get(obj));
case STRING:
return new BinaryWritable(Binary.fromString(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj)));
default:
throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
}
}
示例3: write
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
@Override
void write(Object obj) throws IOException {
if (obj != null) {
switch (inspector.getCategory()) {
case PRIMITIVE:
switch (((PrimitiveObjectInspector) inspector).getPrimitiveCategory()) {
case SHORT:
buffer[bufferIndex++] = new Long(((ShortObjectInspector) inspector).get(obj));
setRawDataSize(RawDatasizeConst.SHORT_SIZE);
break;
case INT:
buffer[bufferIndex++] = new Long(((IntObjectInspector) inspector).get(obj));
setRawDataSize(RawDatasizeConst.INT_SIZE);
break;
case LONG:
buffer[bufferIndex++] = new Long(((LongObjectInspector) inspector).get(obj));
setRawDataSize(RawDatasizeConst.LONG_SIZE);
break;
default:
throw new IllegalArgumentException("Bad Category: Dictionary Encoding not available for " +
((PrimitiveObjectInspector) inspector).getPrimitiveCategory());
}
break;
default:
throw new IllegalArgumentException("Bad Category: DictionaryEncoding not available for " + inspector.getCategory());
}
// Increment the total memory for the buffered long
memoryEstimate.incrementTotalMemory(RawDatasizeConst.LONG_SIZE);
bufferedBytes += RawDatasizeConst.LONG_SIZE;
} else {
buffer[bufferIndex++] = null;
setRawDataSize(RawDatasizeConst.NULL_SIZE);
}
if (bufferIndex == buffer.length) {
flush();
}
}
示例4: setSafeValue
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
@Override
public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
final int value = (short) ((ShortObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue);
((NullableIntVector) outputVV).getMutator().setSafe(outputIndex, value);
}
示例5: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null || arguments[0].get() == null) {
return null;
}
Object input = arguments[0].get();
switch(inputType) {
case BOOLEAN:
return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
case BYTE:
return new Byte(((ByteObjectInspector)argumentOI).get(input));
case SHORT:
return new Short(((ShortObjectInspector)argumentOI).get(input));
case INT:
return new Integer(((IntObjectInspector)argumentOI).get(input));
case LONG:
return new Long(((LongObjectInspector)argumentOI).get(input));
case FLOAT:
return new Float(((FloatObjectInspector)argumentOI).get(input));
case DOUBLE:
return new Double(((DoubleObjectInspector)argumentOI).get(input));
case STRING:
return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
case BINARY:
return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
case VARCHAR:
return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
case DATE:
return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
case TIMESTAMP:
return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
case DECIMAL:
// return type is a HiveVarchar
HiveDecimal decimalValue =
PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
}
throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
示例6: HiveShortPrimitiveConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
public HiveShortPrimitiveConverter( final ShortObjectInspector inspector ){
this.inspector = inspector;
}
示例7: OrcShortPrimitiveConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
public OrcShortPrimitiveConverter( final ShortObjectInspector inspector ){
this.inspector = inspector;
}
示例8: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null || arguments[0].get() == null) {
return null;
}
Object input = arguments[0].get();
switch(inputType) {
case BOOLEAN:
return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
case BYTE:
return new Byte(((ByteObjectInspector)argumentOI).get(input));
case SHORT:
return new Short(((ShortObjectInspector)argumentOI).get(input));
case INT:
return new Integer(((IntObjectInspector)argumentOI).get(input));
case LONG:
return new Long(((LongObjectInspector)argumentOI).get(input));
case FLOAT:
return new Float(((FloatObjectInspector)argumentOI).get(input));
case DOUBLE:
return new Double(((DoubleObjectInspector)argumentOI).get(input));
case STRING:
return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
case BINARY:
return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
case VARCHAR:
if (outputType == PrimitiveCategory.CHAR) {
HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector) argumentOI);
return new HiveChar(hiveVarchar.getValue(), HiveChar.MAX_CHAR_LENGTH);
} else {
return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
}
case CHAR:
return PrimitiveObjectInspectorUtils.getHiveChar(input, (HiveCharObjectInspector) argumentOI);
case DATE:
return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
case TIMESTAMP:
return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
case DECIMAL:
// return type is a HiveVarchar
HiveDecimal decimalValue =
PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
}
throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
示例9: serializePrimitive
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector; //导入依赖的package包/类
private static void serializePrimitive(BlockBuilder builder, Object object, PrimitiveObjectInspector inspector)
{
requireNonNull(builder, "parent builder is null");
if (object == null) {
builder.appendNull();
return;
}
switch (inspector.getPrimitiveCategory()) {
case BOOLEAN:
BooleanType.BOOLEAN.writeBoolean(builder, ((BooleanObjectInspector) inspector).get(object));
return;
case BYTE:
BigintType.BIGINT.writeLong(builder, ((ByteObjectInspector) inspector).get(object));
return;
case SHORT:
BigintType.BIGINT.writeLong(builder, ((ShortObjectInspector) inspector).get(object));
return;
case INT:
BigintType.BIGINT.writeLong(builder, ((IntObjectInspector) inspector).get(object));
return;
case LONG:
BigintType.BIGINT.writeLong(builder, ((LongObjectInspector) inspector).get(object));
return;
case FLOAT:
DoubleType.DOUBLE.writeDouble(builder, ((FloatObjectInspector) inspector).get(object));
return;
case DOUBLE:
DoubleType.DOUBLE.writeDouble(builder, ((DoubleObjectInspector) inspector).get(object));
return;
case STRING:
VarcharType.VARCHAR.writeSlice(builder, Slices.utf8Slice(((StringObjectInspector) inspector).getPrimitiveJavaObject(object)));
return;
case DATE:
DateType.DATE.writeLong(builder, formatDateAsLong(object, (DateObjectInspector) inspector));
return;
case TIMESTAMP:
TimestampType.TIMESTAMP.writeLong(builder, formatTimestampAsLong(object, (TimestampObjectInspector) inspector));
return;
case BINARY:
VARBINARY.writeSlice(builder, Slices.wrappedBuffer(((BinaryObjectInspector) inspector).getPrimitiveJavaObject(object)));
return;
}
throw new RuntimeException("Unknown primitive type: " + inspector.getPrimitiveCategory());
}