本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector类的典型用法代码示例。如果您正苦于以下问题:Java ObjectInspector类的具体用法?Java ObjectInspector怎么用?Java ObjectInspector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector包,在下文中一共展示了ObjectInspector类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
private StructObjectInspector initialize(final ObjectInspector jsonArg, final ObjectInspector jqArg, final List<ObjectInspector> nameAndTypeArgs) throws UDFArgumentException {
this.in = Arguments.asString(jsonArg, "JSON");
try {
this.jq = JsonQuery.compile(Arguments.asConstantNonNullString(jqArg, "JQ"));
} catch (final JsonQueryException e) {
throw new UDFArgumentException("JQ is invalid: " + e.getMessage());
}
this.marshaller = ResultObjectMarshallers.create(Arguments.asConstantNonNullStrings(nameAndTypeArgs, "TYPE or NAME:TYPE"));
this.scope = new Scope();
this.mapper = new ObjectMapper(new JsonFactory().enable(Feature.ALLOW_UNQUOTED_CONTROL_CHARS));
return marshaller.objectInspector();
}
示例2: getFromTypeInfo
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
public static OrcSerde getFromTypeInfo( final Configuration config , final TypeInfo typeInfo )throws IOException{
ObjectInspector objectInspector = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo( typeInfo );
if( !( typeInfo instanceof StructTypeInfo ) ){
throw new IOException( "Input type info is not StructTypeInfo : " + typeInfo.toString() );
}
String columnsName = "";
String columnsType = "";
List<TypeInfo> typeInfoList = ( (StructTypeInfo)typeInfo ).getAllStructFieldTypeInfos();
List<StructField> structField = (List<StructField>)( ( (StructObjectInspector)objectInspector ).getAllStructFieldRefs() );
for( int i = 0 ; i < structField.size() ; i++ ){
if( ! columnsName.isEmpty() ){
columnsName = columnsName.concat( "," );
columnsType = columnsType.concat( "," );
}
columnsName = columnsName.concat( structField.get(i).getFieldName() );
columnsType = columnsType.concat( typeInfoList.get(i).toString() );
}
OrcSerde serde = new OrcSerde();
Properties table = new Properties();
table.setProperty( serdeConstants.LIST_COLUMNS , columnsName );
table.setProperty( serdeConstants.LIST_COLUMN_TYPES , columnsType );
serde.initialize( config , table );
return serde;
}
示例3: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException(String.format("%s needs 1 argument, got %d", udfName, arguments.length));
}
if (arguments[0].getCategory() != Category.PRIMITIVE ||
((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() != inputType) {
String actual = arguments[0].getCategory() + (arguments[0].getCategory() == Category.PRIMITIVE ?
"[" + ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() + "]" : "");
throw new UDFArgumentException(
String.format("%s only takes primitive type %s, got %s", udfName, inputType, actual));
}
argumentOI = arguments[0];
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(outputType);
}
示例4: MDSMapObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
else{
throw new RuntimeException( "Map key type is string only." );
}
valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() );
if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
}
else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
}
else{
getField = new NestedGetField();
}
}
示例5: testMultiColumn
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
@Test
public void testMultiColumn() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
toConstantOI("name:string"),
toConstantOI("offset:int"),
});
assertEquals("struct<name:string,offset:int>", oi.getTypeName());
final List<Object> results = evaluate(sut, toObject(TEST_JSON));
assertEquals(3, results.size());
final HivePath namePath = new HivePath(oi, ".name");
final HivePath offsetPath = new HivePath(oi, ".offset");
assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
assertEquals(540, offsetPath.extract(results.get(0)).asInt());
assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
assertEquals(480, offsetPath.extract(results.get(1)).asInt());
assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
示例6: isMonarchTypeSupported
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
public static boolean isMonarchTypeSupported(final ObjectInspector oi) {
if (ObjectInspector.Category.PRIMITIVE.equals(oi.getCategory())) {
/** handle primitive type definitions like decimal(20,20) or varchar(100) **/
String typeStr = oi.getTypeName();
final int argPos = typeStr.indexOf('(');
if (argPos > 0) {
typeStr = typeStr.substring(0, argPos);
}
return TYPE_HIVE_TO_MONARCH_MAP.containsKey(typeStr);
} else if (oi instanceof ListObjectInspector) {
ListObjectInspector loi = (ListObjectInspector)oi;
return isMonarchTypeSupported(loi.getListElementObjectInspector());
} else if (oi instanceof MapObjectInspector) {
MapObjectInspector moi = (MapObjectInspector)oi;
return isMonarchTypeSupported(moi.getMapKeyObjectInspector()) &&
isMonarchTypeSupported(moi.getMapValueObjectInspector());
} else if (oi instanceof StructObjectInspector) {
return ((StructObjectInspector) oi).getAllStructFieldRefs().stream()
.map(StructField::getFieldObjectInspector)
.allMatch(MonarchPredicateHandler::isMonarchTypeSupported);
} else if (oi instanceof UnionObjectInspector) {
return ((UnionObjectInspector) oi).getObjectInspectors().stream()
.allMatch(MonarchPredicateHandler::isMonarchTypeSupported);
}
return false;
}
示例7: matchAndCreateUDFHolder
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
private HiveFuncHolder matchAndCreateUDFHolder(String udfName,
Class<? extends UDF> udfClazz,
CompleteType[] argTypes,
ObjectInspector[] argOIs) {
try {
GenericUDF udfInstance = new GenericUDFBridge(udfName, false/* is operator */, udfClazz.getName());
ObjectInspector returnOI = udfInstance.initialize(argOIs);
return new HiveFuncHolder(
udfName,
udfClazz,
argTypes,
returnOI,
CompleteType.fromMinorType(ObjectInspectorHelper.getMinorType(returnOI)),
nonDeterministicUDFs.contains(udfClazz));
} catch (Exception e) { /*ignore this*/ }
return null;
}
示例8: getDrillType
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
public static MinorType getDrillType(ObjectInspector oi) {
switch(oi.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
if (TYPE_HIVE2DRILL.containsKey(poi.getPrimitiveCategory())) {
return TYPE_HIVE2DRILL.get(poi.getPrimitiveCategory());
}
throw new UnsupportedOperationException();
}
case MAP:
case LIST:
case STRUCT:
default:
throw new UnsupportedOperationException();
}
}
示例9: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, STRING_GROUP, VOID_GROUP);
obtainDateConverter(arguments, 0, inputTypes, converters);
obtainStringConverter(arguments, 1, inputTypes, converters);
if (arguments[1] instanceof ConstantObjectInspector) {
String dayOfWeek = getConstantStringValue(arguments, 1);
isDayOfWeekConst = true;
dayOfWeekIntConst = getIntDayOfWeek(dayOfWeek);
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
return outputOI;
}
示例10: obtainLongConverter
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
public static void obtainLongConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
switch (inputType) {
case BYTE:
case SHORT:
case INT:
case LONG:
break;
default:
throw new UDFArgumentTypeException(i,
"_FUNC_ only takes LONG/INT/SHORT/BYTE types as " + getArgOrder(i)
+ " argument, got " + inputType);
}
Converter converter = ObjectInspectorConverters.getConverter(arguments[i],
PrimitiveObjectInspectorFactory.writableIntObjectInspector);
converters[i] = converter;
inputTypes[i] = inputType;
}
示例11: checkArgumentTypes
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
private static void checkArgumentTypes(TypeInfo[] parameters) throws UDFArgumentTypeException {
if (parameters.length != 2) {
throw new UDFArgumentTypeException(parameters.length - 1,
"Exactly two arguments are expected.");
}
if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+ parameters[0].getTypeName() + " is passed.");
}
if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
+ parameters[1].getTypeName() + " is passed.");
}
if (!acceptedPrimitiveCategory(((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(0, "Only numeric type arguments are accepted but "
+ parameters[0].getTypeName() + " is passed.");
}
if (!acceptedPrimitiveCategory(((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory())) {
throw new UDFArgumentTypeException(1, "Only numeric type arguments are accepted but "
+ parameters[1].getTypeName() + " is passed.");
}
}
示例12: testMissingFieldsInConversions
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
@Test
public void testMissingFieldsInConversions() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("{foo: 10}"),
toConstantOI("foo:int"),
toConstantOI("bar:int"),
});
final List<Object> results = evaluate(sut, toObject(null));
assertEquals(1, results.size());
assertEquals(10, new HivePath(oi, ".foo").extract(results.get(0)).asInt());
assertTrue(new HivePath(oi, ".bar").extract(results.get(0)).isNull());
}
示例13: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, NUMERIC_GROUP, VOID_GROUP);
obtainDateConverter(arguments, 0, inputTypes, converters);
obtainIntConverter(arguments, 1, inputTypes, converters);
if (arguments[1] instanceof ConstantObjectInspector) {
numMonthsConst = getConstantIntValue(arguments, 1);
isNumMonthsConst = true;
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
return outputOI;
}
示例14: getMinorType
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
public static MinorType getMinorType(ObjectInspector oi) {
switch(oi.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
if (TYPE_HIVE2MINOR.containsKey(poi.getPrimitiveCategory())) {
return TYPE_HIVE2MINOR.get(poi.getPrimitiveCategory());
}
throw new UnsupportedOperationException();
}
case MAP:
case LIST:
case STRUCT:
default:
throw new UnsupportedOperationException();
}
}
示例15: obtainTimestampConverter
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; //导入依赖的package包/类
public static void obtainTimestampConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
ObjectInspector outOi;
switch (inputType) {
case STRING:
case VARCHAR:
case CHAR:
case TIMESTAMP:
case DATE:
break;
default:
throw new UDFArgumentTypeException(i,
"_FUNC_ only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i)
+ " argument, got " + inputType);
}
outOi = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
inputTypes[i] = inputType;
}