本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory类的典型用法代码示例。如果您正苦于以下问题:Java PrimitiveObjectInspectorFactory类的具体用法?Java PrimitiveObjectInspectorFactory怎么用?Java PrimitiveObjectInspectorFactory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
PrimitiveObjectInspectorFactory类属于org.apache.hadoop.hive.serde2.objectinspector.primitive包,在下文中一共展示了PrimitiveObjectInspectorFactory类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: MDSMapObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
public MDSMapObjectInspector( final MapTypeInfo typeInfo ){
TypeInfo keyTypeInfo = typeInfo.getMapKeyTypeInfo();
if( keyTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE && ( (PrimitiveTypeInfo)keyTypeInfo ).getPrimitiveCategory() == PrimitiveCategory.STRING ){
keyObjectInspector = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}
else{
throw new RuntimeException( "Map key type is string only." );
}
valueObjectInspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( typeInfo.getMapValueTypeInfo() );
if( valueObjectInspector.getCategory() == ObjectInspector.Category.PRIMITIVE ){
getField = new PrimitiveGetField( (PrimitiveObjectInspector)valueObjectInspector );
}
else if( valueObjectInspector.getCategory() == ObjectInspector.Category.UNION ){
getField = new UnionGetField( (UnionTypeInfo)( typeInfo.getMapValueTypeInfo() ) );
}
else{
getField = new NestedGetField();
}
}
示例2: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
public StructObjectInspector initialize(ObjectInspector[] arg0) throws UDFArgumentException {
if (arg0.length != 1) {
//throw new UDFArgumentLengthException("ExplodeMap takes only one argument");
}
if (arg0[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
//throw new UDFArgumentException("ExplodeMap takes string as a parameter");
}
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("col1");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
fieldNames.add("col2");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例3: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException(String.format("%s needs 1 argument, got %d", udfName, arguments.length));
}
if (arguments[0].getCategory() != Category.PRIMITIVE ||
((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() != inputType) {
String actual = arguments[0].getCategory() + (arguments[0].getCategory() == Category.PRIMITIVE ?
"[" + ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() + "]" : "");
throw new UDFArgumentException(
String.format("%s only takes primitive type %s, got %s", udfName, inputType, actual));
}
argumentOI = arguments[0];
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(outputType);
}
示例4: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, STRING_GROUP, VOID_GROUP);
obtainDateConverter(arguments, 0, inputTypes, converters);
obtainStringConverter(arguments, 1, inputTypes, converters);
if (arguments[1] instanceof ConstantObjectInspector) {
String dayOfWeek = getConstantStringValue(arguments, 1);
isDayOfWeekConst = true;
dayOfWeekIntConst = getIntDayOfWeek(dayOfWeek);
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
return outputOI;
}
示例5: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, STRING_GROUP, VOID_GROUP);
obtainStringConverter(arguments, 0, inputTypes, converters);
obtainStringConverter(arguments, 1, inputTypes, converters);
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
return outputOI;
}
示例6: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, NUMERIC_GROUP, VOID_GROUP);
obtainDateConverter(arguments, 0, inputTypes, converters);
obtainIntConverter(arguments, 1, inputTypes, converters);
if (arguments[1] instanceof ConstantObjectInspector) {
numMonthsConst = getConstantIntValue(arguments, 1);
isNumMonthsConst = true;
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
return outputOI;
}
示例7: obtainIntConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
public static void obtainIntConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
switch (inputType) {
case BYTE:
case SHORT:
case INT:
case VOID:
break;
default:
throw new UDFArgumentTypeException(i, "_FUNC_ only takes INT/SHORT/BYTE types as "
+ getArgOrder(i) + " argument, got " + inputType);
}
Converter converter = ObjectInspectorConverters.getConverter(arguments[i],
PrimitiveObjectInspectorFactory.writableIntObjectInspector);
converters[i] = converter;
inputTypes[i] = inputType;
}
示例8: obtainLongConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
public static void obtainLongConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
switch (inputType) {
case BYTE:
case SHORT:
case INT:
case LONG:
break;
default:
throw new UDFArgumentTypeException(i,
"_FUNC_ only takes LONG/INT/SHORT/BYTE types as " + getArgOrder(i)
+ " argument, got " + inputType);
}
Converter converter = ObjectInspectorConverters.getConverter(arguments[i],
PrimitiveObjectInspectorFactory.writableIntObjectInspector);
converters[i] = converter;
inputTypes[i] = inputType;
}
示例9: obtainDateConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
public static void obtainDateConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
ObjectInspector outOi;
switch (inputType) {
case STRING:
case VARCHAR:
case CHAR:
outOi = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
break;
case TIMESTAMP:
case DATE:
case VOID:
outOi = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
break;
default:
throw new UDFArgumentTypeException(i,
"_FUNC_ only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i)
+ " argument, got " + inputType);
}
converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
inputTypes[i] = inputType;
}
示例10: obtainTimestampConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
public static void obtainTimestampConverter(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
ObjectInspector outOi;
switch (inputType) {
case STRING:
case VARCHAR:
case CHAR:
case TIMESTAMP:
case DATE:
break;
default:
throw new UDFArgumentTypeException(i,
"_FUNC_ only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i)
+ " argument, got " + inputType);
}
outOi = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
inputTypes[i] = inputType;
}
示例11: testSingleColumn2
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Test
public void testSingleColumn2() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
toConstantOI("struct<name:string,offset:int>"),
});
assertEquals("struct<col1:struct<name:string,offset:int>>", oi.getTypeName());
final List<Object> results = evaluate(sut, toObject(TEST_JSON));
assertEquals(3, results.size());
final HivePath namePath = new HivePath(oi, ".col1.name");
final HivePath offsetPath = new HivePath(oi, ".col1.offset");
assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
assertEquals(540, offsetPath.extract(results.get(0)).asInt());
assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
assertEquals(480, offsetPath.extract(results.get(1)).asInt());
assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
示例12: testMultiColumn
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Test
public void testMultiColumn() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
toConstantOI("name:string"),
toConstantOI("offset:int"),
});
assertEquals("struct<name:string,offset:int>", oi.getTypeName());
final List<Object> results = evaluate(sut, toObject(TEST_JSON));
assertEquals(3, results.size());
final HivePath namePath = new HivePath(oi, ".name");
final HivePath offsetPath = new HivePath(oi, ".offset");
assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
assertEquals(540, offsetPath.extract(results.get(0)).asInt());
assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
assertEquals(480, offsetPath.extract(results.get(1)).asInt());
assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
示例13: testAbortOnError
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Test
public void testAbortOnError() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
@SuppressWarnings("unused")
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("if $error then error($error.message) else . end"),
toConstantOI("string"),
});
try {
evaluate(sut, toObject("\"corrupt \"string"));
fail("should fail");
} catch (final HiveException e) {
assertTrue(e.getMessage().contains("Unrecognized token 'string'"));
}
}
示例14: testMoreOnStringOutputConversions
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Test
public void testMoreOnStringOutputConversions() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("{foo: {a: 1}, bar: null, baz: \"baz\"}"),
toConstantOI("foo:string"),
toConstantOI("bar:string"),
toConstantOI("baz:string"),
});
final List<Object> results = evaluate(sut, toObject("null"));
assertEquals(1, results.size());
final Object obj = results.get(0);
assertEquals("{\"a\":1}", new HivePath(oi, ".foo").extract(obj).asString());
assertTrue(new HivePath(oi, ".bar").extract(obj).isNull());
assertEquals("baz", new HivePath(oi, ".baz").extract(obj).asString());
}
示例15: testNullInputs
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入依赖的package包/类
@Test
public void testNullInputs() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("."),
toConstantOI("string"),
});
final List<Object> results = evaluate(sut, toObject("null"), null, toObject(null));
assertEquals(3, results.size());
assertTrue(new HivePath(oi, ".col1").extract(results.get(0)).isNull());
assertTrue(new HivePath(oi, ".col1").extract(results.get(1)).isNull());
assertTrue(new HivePath(oi, ".col1").extract(results.get(2)).isNull());
}