本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector类的典型用法代码示例。如果您正苦于以下问题:Java StandardStructObjectInspector类的具体用法?Java StandardStructObjectInspector怎么用?Java StandardStructObjectInspector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
StandardStructObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector包,在下文中一共展示了StandardStructObjectInspector类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testBasic
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
@Test
public void testBasic() throws HiveException {
String userAgent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36";
ParseUserAgent parseUserAgent = new ParseUserAgent();
StandardStructObjectInspector resultInspector = (StandardStructObjectInspector) parseUserAgent
.initialize(new ObjectInspector[]{
PrimitiveObjectInspectorFactory.javaStringObjectInspector
});
Object row = parseUserAgent.evaluate(new DeferredObject[]{new DeferredJavaObject(userAgent)});
checkField(resultInspector, row, "DeviceName", "Linux Desktop");
checkField(resultInspector, row, "AgentNameVersionMajor", "Chrome 58");
}
示例2: toMetacatType
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
@Override
public Type toMetacatType(final String type) {
// Hack to fix presto "varchar" type coming in with no length which is required by Hive.
final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(
"varchar".equals(type.toLowerCase()) ? serdeConstants.STRING_TYPE_NAME : type);
ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo);
// The standard struct object inspector forces field names to lower case, however in Metacat we need to preserve
// the original case of the struct fields so we wrap it with our wrapper to force the fieldNames to keep
// their original case
if (typeInfo.getCategory().equals(ObjectInspector.Category.STRUCT)) {
final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
final StandardStructObjectInspector objectInspector = (StandardStructObjectInspector) oi;
oi = new HiveTypeConverter.SameCaseStandardStructObjectInspector(
structTypeInfo.getAllStructFieldNames(), objectInspector);
}
return getCanonicalType(oi);
}
示例3: structObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
static StandardStructObjectInspector structObjectInspector(Properties tableProperties) {
// extract column info - don't use Hive constants as they were renamed in 0.9 breaking compatibility
// the column names are saved as the given inspector to #serialize doesn't preserves them (maybe because it's an external table)
// use the class since StructType requires it ...
List<String> columnNames = StringUtils.tokenize(tableProperties.getProperty(HiveConstants.COLUMNS), ",");
List<TypeInfo> colTypes = TypeInfoUtils.getTypeInfosFromTypeString(tableProperties.getProperty(HiveConstants.COLUMNS_TYPES));
// create a standard writable Object Inspector - used later on by serialization/deserialization
List<ObjectInspector> inspectors = new ArrayList<ObjectInspector>();
for (TypeInfo typeInfo : colTypes) {
inspectors.add(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
}
示例4: init
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
/**
* Receives the passed in argument object inspectors and returns the desired
* return type's object inspector to inform hive of return type of UDAF.
*
* @param mode
* Mode (i.e. PARTIAL 1, COMPLETE...) for determining input and output
* object inspector type.
* @param parameters
* List of object inspectors for input arguments.
* @return The object inspector type indicates the UDAF return type (i.e.
* returned type of terminate(...)).
*/
@Override
public ObjectInspector init(final Mode mode, final ObjectInspector[] parameters) throws HiveException {
super.init(mode, parameters);
if ((mode == Mode.PARTIAL1) || (mode == Mode.COMPLETE)) {
inputInspector_ = (PrimitiveObjectInspector) parameters[0];
if (parameters.length > 1) {
lgKInspector_ = (PrimitiveObjectInspector) parameters[1];
}
if (parameters.length > 2) {
hllTypeInspector_ = (PrimitiveObjectInspector) parameters[2];
}
} else {
// mode = partial2 || final
intermediateInspector_ = (StandardStructObjectInspector) parameters[0];
}
if ((mode == Mode.PARTIAL1) || (mode == Mode.PARTIAL2)) {
// intermediate results need to include the lgK and the target HLL type
return ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList(LG_K_FIELD, HLL_TYPE_FIELD, SKETCH_FIELD),
Arrays.asList(
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.INT),
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.STRING),
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.BINARY)
)
);
}
// final results include just the sketch
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.BINARY);
}
示例5: init
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector init(final Mode mode, final ObjectInspector[] parameters) throws HiveException {
super.init(mode, parameters);
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
inputObjectInspector = (PrimitiveObjectInspector) parameters[0];
if (parameters.length > 1) {
seedObjectInspector = (PrimitiveObjectInspector) parameters[1];
}
} else {
intermediateObjectInspector = (StandardStructObjectInspector) parameters[0];
}
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
// intermediate results need to include the seed
return ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList(SEED_FIELD, SKETCH_FIELD),
Arrays.asList(
PrimitiveObjectInspectorFactory
.getPrimitiveWritableObjectInspector(PrimitiveCategory.LONG),
PrimitiveObjectInspectorFactory
.getPrimitiveWritableObjectInspector(PrimitiveCategory.BINARY)
)
);
}
// final results include just the sketch
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.BINARY);
}
示例6: init
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
/**
* Receives the passed in argument object inspectors and returns the desired
* return type's object inspector to inform hive of return type of UDAF.
*
* @param mode
* Mode (i.e. PARTIAL 1, COMPLETE...) for determining input/output
* object inspector type.
* @param parameters
* List of object inspectors for input arguments.
* @return The object inspector type indicates the UDAF return type (i.e.
* returned type of terminate(...)).
*/
@Override
public ObjectInspector init(final Mode mode, final ObjectInspector[] parameters) throws HiveException {
super.init(mode, parameters);
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
inputObjectInspector = (PrimitiveObjectInspector) parameters[0];
if (parameters.length > 1) {
nominalEntriesObjectInspector = (PrimitiveObjectInspector) parameters[1];
}
if (parameters.length > 2) {
seedObjectInspector = (PrimitiveObjectInspector) parameters[2];
}
} else {
// mode = partial2 || final
intermediateObjectInspector = (StandardStructObjectInspector) parameters[0];
}
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
// intermediate results need to include the nominal number of entries and the seed
return ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList(NOMINAL_ENTRIES_FIELD, SEED_FIELD, SKETCH_FIELD),
Arrays.asList(
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.INT),
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.LONG),
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.BINARY)
)
);
}
// final results include just the sketch
return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveCategory.BINARY);
}
示例7: checkField
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
private void checkField(StandardStructObjectInspector resultInspector, Object row, String fieldName, String expectedValue) {
assertEquals(expectedValue, resultInspector.getStructFieldData(row, resultInspector.getStructFieldRef(fieldName)).toString());
}
示例8: SameCaseStandardStructObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
public SameCaseStandardStructObjectInspector(final List<String> realFieldNames,
final StandardStructObjectInspector structObjectInspector) {
this.realFieldNames = realFieldNames;
this.structObjectInspector = structObjectInspector;
}
示例9: createHiveStructInspector
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; //导入依赖的package包/类
private static StandardStructObjectInspector createHiveStructInspector(ObjectInspector objectInspector)
{
return getStandardStructObjectInspector(ImmutableList.of("a", "b", "c"), ImmutableList.of(objectInspector, objectInspector, objectInspector));
}