本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector類的典型用法代碼示例。如果您正苦於以下問題:Java PrimitiveObjectInspector類的具體用法?Java PrimitiveObjectInspector怎麽用?Java PrimitiveObjectInspector使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
PrimitiveObjectInspector類屬於org.apache.hadoop.hive.serde2.objectinspector包,在下文中一共展示了PrimitiveObjectInspector類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: getRangeExecuter
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public static IExpressionNode getRangeExecuter( boolean invert , final PrimitiveObjectInspector minPrimitiveObjectInspector , final PrimitiveObjectInspector maxPrimitiveObjectInspector , final IExtractNode targetColumn ){
switch( minPrimitiveObjectInspector.getPrimitiveCategory() ){
case STRING:
String minStr = ( (WritableConstantStringObjectInspector)minPrimitiveObjectInspector ).getWritableConstantValue().toString();
String maxStr = ( (WritableConstantStringObjectInspector)maxPrimitiveObjectInspector ).getWritableConstantValue().toString();
IFilter filter = new RangeStringCompareFilter( minStr , true , maxStr , true , invert );
return new ExecuterNode( targetColumn , filter );
case BYTE:
return getNumberRangeExecuter( invert , new ByteObj( ( (WritableConstantByteObjectInspector)minPrimitiveObjectInspector ).getWritableConstantValue().get() ) , new ByteObj( ( (WritableConstantByteObjectInspector)maxPrimitiveObjectInspector ).getWritableConstantValue().get() ) , targetColumn );
case SHORT:
return getNumberRangeExecuter( invert , new ShortObj( ( (WritableConstantShortObjectInspector)minPrimitiveObjectInspector ).getWritableConstantValue().get() ) , new ShortObj( ( (WritableConstantShortObjectInspector)maxPrimitiveObjectInspector ).getWritableConstantValue().get() ) , targetColumn );
case INT:
return getNumberRangeExecuter( invert , new IntegerObj( ( (WritableConstantIntObjectInspector)minPrimitiveObjectInspector ).getWritableConstantValue().get() ) , new IntegerObj( ( (WritableConstantIntObjectInspector)maxPrimitiveObjectInspector ).getWritableConstantValue().get() ) , targetColumn );
case LONG:
return getNumberRangeExecuter( invert , new LongObj( ( (WritableConstantLongObjectInspector)minPrimitiveObjectInspector ).getWritableConstantValue().get() ) , new LongObj( ( (WritableConstantLongObjectInspector)maxPrimitiveObjectInspector ).getWritableConstantValue().get() ) , targetColumn );
case FLOAT:
return getNumberRangeExecuter( invert , new FloatObj( ( (WritableConstantFloatObjectInspector)minPrimitiveObjectInspector ).getWritableConstantValue().get() ) , new FloatObj( ( (WritableConstantFloatObjectInspector)maxPrimitiveObjectInspector ).getWritableConstantValue().get() ) , targetColumn );
case DOUBLE:
return getNumberRangeExecuter( invert , new DoubleObj( ( (WritableConstantDoubleObjectInspector)minPrimitiveObjectInspector ).getWritableConstantValue().get() ) , new DoubleObj( ( (WritableConstantDoubleObjectInspector)maxPrimitiveObjectInspector ).getWritableConstantValue().get() ) , targetColumn );
default:
return null;
}
}
示例2: MDSMapObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
else{
throw new RuntimeException( "Map key type is string only." );
}
valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() );
if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
}
else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
}
else{
getField = new NestedGetField();
}
}
示例3: getDrillType
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public static MinorType getDrillType(ObjectInspector oi) {
switch(oi.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
if (TYPE_HIVE2DRILL.containsKey(poi.getPrimitiveCategory())) {
return TYPE_HIVE2DRILL.get(poi.getPrimitiveCategory());
}
throw new UnsupportedOperationException();
}
case MAP:
case LIST:
case STRUCT:
default:
throw new UnsupportedOperationException();
}
}
示例4: initialize
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException(String.format("%s needs 1 argument, got %d", udfName, arguments.length));
}
if (arguments[0].getCategory() != Category.PRIMITIVE ||
((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() != inputType) {
String actual = arguments[0].getCategory() + (arguments[0].getCategory() == Category.PRIMITIVE ?
"[" + ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() + "]" : "");
throw new UDFArgumentException(
String.format("%s only takes primitive type %s, got %s", udfName, inputType, actual));
}
argumentOI = arguments[0];
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(outputType);
}
示例5: checkArgGroups
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public static void checkArgGroups(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, PrimitiveGrouping... grps)
throws UDFArgumentTypeException {
PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
for (PrimitiveGrouping grp : grps) {
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType) == grp) {
inputTypes[i] = inputType;
return;
}
}
// build error message
StringBuilder sb = new StringBuilder();
sb.append("_FUNC_ only takes ");
sb.append(grps[0]);
for (int j = 1; j < grps.length; j++) {
sb.append(", ");
sb.append(grps[j]);
}
sb.append(" types as ");
sb.append(getArgOrder(i));
sb.append(" argument, got ");
sb.append(inputType);
throw new UDFArgumentTypeException(i, sb.toString());
}
示例6: obtainIntConverter
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public static void obtainIntConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
switch (inputType) {
case BYTE:
case SHORT:
case INT:
case VOID:
break;
default:
throw new UDFArgumentTypeException(i, "_FUNC_ only takes INT/SHORT/BYTE types as "
+ getArgOrder(i) + " argument, got " + inputType);
}
Converter converter = ObjectInspectorConverters.getConverter(arguments[i],
PrimitiveObjectInspectorFactory.writableIntObjectInspector);
converters[i] = converter;
inputTypes[i] = inputType;
}
示例7: obtainLongConverter
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public static void obtainLongConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
switch (inputType) {
case BYTE:
case SHORT:
case INT:
case LONG:
break;
default:
throw new UDFArgumentTypeException(i,
"_FUNC_ only takes LONG/INT/SHORT/BYTE types as " + getArgOrder(i)
+ " argument, got " + inputType);
}
Converter converter = ObjectInspectorConverters.getConverter(arguments[i],
PrimitiveObjectInspectorFactory.writableIntObjectInspector);
converters[i] = converter;
inputTypes[i] = inputType;
}
示例8: obtainDateConverter
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public static void obtainDateConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
ObjectInspector outOi;
switch (inputType) {
case STRING:
case VARCHAR:
case CHAR:
outOi = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
break;
case TIMESTAMP:
case DATE:
case VOID:
outOi = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
break;
default:
throw new UDFArgumentTypeException(i,
"_FUNC_ only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i)
+ " argument, got " + inputType);
}
converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
inputTypes[i] = inputType;
}
示例9: obtainTimestampConverter
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public static void obtainTimestampConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
ObjectInspector outOi;
switch (inputType) {
case STRING:
case VARCHAR:
case CHAR:
case TIMESTAMP:
case DATE:
break;
default:
throw new UDFArgumentTypeException(i,
"_FUNC_ only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i)
+ " argument, got " + inputType);
}
outOi = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
inputTypes[i] = inputType;
}
示例10: getMinorType
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
public static MinorType getMinorType(ObjectInspector oi) {
switch(oi.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
if (TYPE_HIVE2MINOR.containsKey(poi.getPrimitiveCategory())) {
return TYPE_HIVE2MINOR.get(poi.getPrimitiveCategory());
}
throw new UnsupportedOperationException();
}
case MAP:
case LIST:
case STRUCT:
default:
throw new UnsupportedOperationException();
}
}
示例11: isNumericObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
private static boolean isNumericObjectInspector(ObjectInspector oi, boolean constant) {
Category c = oi.getCategory();
if (c != Category.PRIMITIVE) {
return false;
}
PrimitiveCategory pc = ((PrimitiveObjectInspector) oi)
.getPrimitiveCategory();
if (pc != PrimitiveCategory.INT && pc != PrimitiveCategory.LONG) {
return false;
}
if (constant && !(oi instanceof ConstantObjectInspector)) {
return false;
}
return true;
}
示例12: getPrimitiveType
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
private static Type getPrimitiveType(final ObjectInspector fieldInspector) {
final PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) fieldInspector)
.getPrimitiveCategory();
if (HiveTypeMapping.getHIVE_TO_CANONICAL().containsKey(primitiveCategory.name())) {
return HiveTypeMapping.getHIVE_TO_CANONICAL().get(primitiveCategory.name());
}
switch (primitiveCategory) {
case DECIMAL:
final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
.getTypeInfo();
return DecimalType.createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.getScale());
case CHAR:
final int cLength = ((CharTypeInfo) ((PrimitiveObjectInspector)
fieldInspector).getTypeInfo()).getLength();
return CharType.createCharType(cLength);
case VARCHAR:
final int vLength = ((VarcharTypeInfo) ((PrimitiveObjectInspector) fieldInspector)
.getTypeInfo()).getLength();
return VarcharType.createVarcharType(vLength);
default:
return null;
}
}
示例13: deserializeDate
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
private Object deserializeDate(Object value, PrimitiveTypeInfo type)
throws SerDeException {
long ts;
// Dates can be either ISO8601 Strings or numeric timestamps. Any other data type or format cannot be
// deserialized.
if (value instanceof String) {
try {
ts = JsonHelper.parseTimestamp((String) value).getTime();
} catch (Exception e) {
throw new SerDeException("Invalid time string: " + value);
}
} else if (value instanceof Number) {
ts = ((Number) value).longValue();
} else if (value instanceof java.util.Date) {
ts = ((java.util.Date) value).getTime();
} else {
throw new SerDeException("Invalid time value: " + value);
}
if (type.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE) {
return new Date(ts);
} else {
return new Timestamp(ts);
}
}
示例14: getFeatureOutputOI
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
@Nonnull
protected final ObjectInspector getFeatureOutputOI(@Nonnull final FeatureType featureType)
throws UDFArgumentException {
final PrimitiveObjectInspector outputOI;
if (dense_model) {
// TODO validation
outputOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector; // see DenseModel (long/string is also parsed as int)
} else {
switch (featureType) {
case STRING:
outputOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
break;
case INT:
outputOI = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
break;
case LONG:
outputOI = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
break;
default:
throw new IllegalStateException("Unexpected feature type: " + featureType);
}
}
return outputOI;
}
示例15: kNNentries
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; //導入依賴的package包/類
@Nonnull
private static Int2ObjectMap<Int2FloatMap> kNNentries(@Nonnull final Object kNNiObj,
@Nonnull final MapObjectInspector knnItemsOI,
@Nonnull final PrimitiveObjectInspector knnItemsKeyOI,
@Nonnull final MapObjectInspector knnItemsValueOI,
@Nonnull final PrimitiveObjectInspector knnItemsValueKeyOI,
@Nonnull final PrimitiveObjectInspector knnItemsValueValueOI,
@Nullable Int2ObjectMap<Int2FloatMap> knnItems, @Nonnull final MutableInt nnzKNNi) {
if (knnItems == null) {
knnItems = new Int2ObjectOpenHashMap<>(1024);
} else {
knnItems.clear();
}
int numElementOfKNNItems = 0;
for (Map.Entry<?, ?> entry : knnItemsOI.getMap(kNNiObj).entrySet()) {
int user = PrimitiveObjectInspectorUtils.getInt(entry.getKey(), knnItemsKeyOI);
Int2FloatMap ru = int2floatMap(knnItemsValueOI.getMap(entry.getValue()),
knnItemsValueKeyOI, knnItemsValueValueOI);
knnItems.put(user, ru);
numElementOfKNNItems += ru.size();
}
nnzKNNi.setValue(numElementOfKNNItems);
return knnItems;
}