本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector類的典型用法代碼示例。如果您正苦於以下問題:Java StructObjectInspector類的具體用法?Java StructObjectInspector怎麽用?Java StructObjectInspector使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
StructObjectInspector類屬於org.apache.hadoop.hive.serde2.objectinspector包,在下文中一共展示了StructObjectInspector類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: initialize
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
private StructObjectInspector initialize(final ObjectInspector jsonArg, final ObjectInspector jqArg, final List<ObjectInspector> nameAndTypeArgs) throws UDFArgumentException {
this.in = Arguments.asString(jsonArg, "JSON");
try {
this.jq = JsonQuery.compile(Arguments.asConstantNonNullString(jqArg, "JQ"));
} catch (final JsonQueryException e) {
throw new UDFArgumentException("JQ is invalid: " + e.getMessage());
}
this.marshaller = ResultObjectMarshallers.create(Arguments.asConstantNonNullStrings(nameAndTypeArgs, "TYPE or NAME:TYPE"));
this.scope = new Scope();
this.mapper = new ObjectMapper(new JsonFactory().enable(Feature.ALLOW_UNQUOTED_CONTROL_CHARS));
return marshaller.objectInspector();
}
示例2: getFromTypeInfo
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
public static OrcSerde getFromTypeInfo( final Configuration config , final TypeInfo typeInfo )throws IOException{
ObjectInspector objectInspector = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo( typeInfo );
if( !( typeInfo instanceof StructTypeInfo ) ){
throw new IOException( "Input type info is not StructTypeInfo : " + typeInfo.toString() );
}
String columnsName = "";
String columnsType = "";
List<TypeInfo> typeInfoList = ( (StructTypeInfo)typeInfo ).getAllStructFieldTypeInfos();
List<StructField> structField = (List<StructField>)( ( (StructObjectInspector)objectInspector ).getAllStructFieldRefs() );
for( int i = 0 ; i < structField.size() ; i++ ){
if( ! columnsName.isEmpty() ){
columnsName = columnsName.concat( "," );
columnsType = columnsType.concat( "," );
}
columnsName = columnsName.concat( structField.get(i).getFieldName() );
columnsType = columnsType.concat( typeInfoList.get(i).toString() );
}
OrcSerde serde = new OrcSerde();
Properties table = new Properties();
table.setProperty( serdeConstants.LIST_COLUMNS , columnsName );
table.setProperty( serdeConstants.LIST_COLUMN_TYPES , columnsType );
serde.initialize( config , table );
return serde;
}
示例3: initialize
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
public StructObjectInspector initialize(ObjectInspector[] arg0) throws UDFArgumentException {
if (arg0.length != 1) {
//throw new UDFArgumentLengthException("ExplodeMap takes only one argument");
}
if (arg0[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
//throw new UDFArgumentException("ExplodeMap takes string as a parameter");
}
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("col1");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
fieldNames.add("col2");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例4: isMonarchTypeSupported
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
public static boolean isMonarchTypeSupported(final ObjectInspector oi) {
if (ObjectInspector.Category.PRIMITIVE.equals(oi.getCategory())) {
/** handle primitive type definitions like decimal(20,20) or varchar(100) **/
String typeStr = oi.getTypeName();
final int argPos = typeStr.indexOf('(');
if (argPos > 0) {
typeStr = typeStr.substring(0, argPos);
}
return TYPE_HIVE_TO_MONARCH_MAP.containsKey(typeStr);
} else if (oi instanceof ListObjectInspector) {
ListObjectInspector loi = (ListObjectInspector)oi;
return isMonarchTypeSupported(loi.getListElementObjectInspector());
} else if (oi instanceof MapObjectInspector) {
MapObjectInspector moi = (MapObjectInspector)oi;
return isMonarchTypeSupported(moi.getMapKeyObjectInspector()) &&
isMonarchTypeSupported(moi.getMapValueObjectInspector());
} else if (oi instanceof StructObjectInspector) {
return ((StructObjectInspector) oi).getAllStructFieldRefs().stream()
.map(StructField::getFieldObjectInspector)
.allMatch(MonarchPredicateHandler::isMonarchTypeSupported);
} else if (oi instanceof UnionObjectInspector) {
return ((UnionObjectInspector) oi).getObjectInspectors().stream()
.allMatch(MonarchPredicateHandler::isMonarchTypeSupported);
}
return false;
}
示例5: addRow
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Override
public void addRow(Object row) throws IOException {
int rowId = internalBatch.size++;
if (fields != null) {
StructObjectInspector soi = (StructObjectInspector) inspector;
for (int i = 0; i < fields.length; ++i) {
setColumn(rowId, internalBatch.cols[i], fields[i].getFieldObjectInspector(),
soi.getStructFieldData(row, fields[i]));
}
} else {
setColumn(rowId, internalBatch.cols[0], inspector, row);
}
if (internalBatch.size == internalBatch.getMaxSize()) {
flushInternalBatch();
}
}
示例6: testSingleColumn2
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Test
public void testSingleColumn2() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
toConstantOI("struct<name:string,offset:int>"),
});
assertEquals("struct<col1:struct<name:string,offset:int>>", oi.getTypeName());
final List<Object> results = evaluate(sut, toObject(TEST_JSON));
assertEquals(3, results.size());
final HivePath namePath = new HivePath(oi, ".col1.name");
final HivePath offsetPath = new HivePath(oi, ".col1.offset");
assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
assertEquals(540, offsetPath.extract(results.get(0)).asInt());
assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
assertEquals(480, offsetPath.extract(results.get(1)).asInt());
assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
示例7: testMultiColumn
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Test
public void testMultiColumn() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
toConstantOI("name:string"),
toConstantOI("offset:int"),
});
assertEquals("struct<name:string,offset:int>", oi.getTypeName());
final List<Object> results = evaluate(sut, toObject(TEST_JSON));
assertEquals(3, results.size());
final HivePath namePath = new HivePath(oi, ".name");
final HivePath offsetPath = new HivePath(oi, ".offset");
assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
assertEquals(540, offsetPath.extract(results.get(0)).asInt());
assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
assertEquals(480, offsetPath.extract(results.get(1)).asInt());
assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
示例8: testAbortOnError
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Test
public void testAbortOnError() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
@SuppressWarnings("unused")
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("if $error then error($error.message) else . end"),
toConstantOI("string"),
});
try {
evaluate(sut, toObject("\"corrupt \"string"));
fail("should fail");
} catch (final HiveException e) {
assertTrue(e.getMessage().contains("Unrecognized token 'string'"));
}
}
示例9: testMoreOnStringOutputConversions
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Test
public void testMoreOnStringOutputConversions() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("{foo: {a: 1}, bar: null, baz: \"baz\"}"),
toConstantOI("foo:string"),
toConstantOI("bar:string"),
toConstantOI("baz:string"),
});
final List<Object> results = evaluate(sut, toObject("null"));
assertEquals(1, results.size());
final Object obj = results.get(0);
assertEquals("{\"a\":1}", new HivePath(oi, ".foo").extract(obj).asString());
assertTrue(new HivePath(oi, ".bar").extract(obj).isNull());
assertEquals("baz", new HivePath(oi, ".baz").extract(obj).asString());
}
示例10: testNullInputs
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Test
public void testNullInputs() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("."),
toConstantOI("string"),
});
final List<Object> results = evaluate(sut, toObject("null"), null, toObject(null));
assertEquals(3, results.size());
assertTrue(new HivePath(oi, ".col1").extract(results.get(0)).isNull());
assertTrue(new HivePath(oi, ".col1").extract(results.get(1)).isNull());
assertTrue(new HivePath(oi, ".col1").extract(results.get(2)).isNull());
}
示例11: testMissingFieldsInConversions
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Test
public void testMissingFieldsInConversions() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("{foo: 10}"),
toConstantOI("foo:int"),
toConstantOI("bar:int"),
});
final List<Object> results = evaluate(sut, toObject(null));
assertEquals(1, results.size());
assertEquals(10, new HivePath(oi, ".foo").extract(results.get(0)).asInt());
assertTrue(new HivePath(oi, ".bar").extract(results.get(0)).isNull());
}
示例12: initialize
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
/**
*
* Initialize HiveUDF and create object inspectors. It requires that the argument length is = 1 and that the ObjectInspector of arguments[0] is of type StructObjectInspector
*
* @param arguments array of length 1 containing one StructObjectInspector
*
* @return ObjectInspector that is able to parse the result of the evaluate method of the UDF (BinaryWritable)
*
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentException in case the first argument is not of StructObjectInspector
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in case the number of arguments is != 1
*
*/
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (!(arguments[0] instanceof StructObjectInspector)) {
throw new UDFArgumentException("first argument must be a Struct containing a BitcoinTransaction");
}
this.soi = (StructObjectInspector)arguments[0];
// these are only used for bitcointransaction structs exported to other formats, such as ORC
this.wboi = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
this.wioi = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
this.wloi = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
// the UDF returns the hash value of a BitcoinTransaction as byte array
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
示例13: readListOfOutputsFromTable
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
/**
* Read list of Bitcoin transaction outputs from a table in Hive in any format (e.g. ORC, Parquet)
*
* @param loi ObjectInspector for processing the Object containing a list
* @param listOfOutputsObject object containing the list of outputs to a Bitcoin Transaction
*
* @return a list of BitcoinTransactionOutputs
*
*/
private List<BitcoinTransactionOutput> readListOfOutputsFromTable(ListObjectInspector loi, Object listOfOutputsObject) {
int listLength=loi.getListLength(listOfOutputsObject);
List<BitcoinTransactionOutput> result=new ArrayList<>(listLength);
StructObjectInspector listOfOutputsElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector();
for (int i=0;i<listLength;i++) {
Object currentListOfOutputsObject = loi.getListElement(listOfOutputsObject,i);
StructField valueSF = listOfOutputsElementObjectInspector.getStructFieldRef("value");
StructField txoutscriptlengthSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscriptlength");
StructField txoutscriptSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscript");
if ((valueSF==null) || (txoutscriptlengthSF==null) || (txoutscriptSF==null)) {
LOG.warn("Invalid BitcoinTransactionOutput detected at position "+i);
return new ArrayList<>();
}
long currentValue=wloi.get(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,valueSF));
byte[] currentTxOutScriptLength=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptlengthSF));
byte[] currentTxOutScript=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptSF));
BitcoinTransactionOutput currentBitcoinTransactionOutput = new BitcoinTransactionOutput(currentValue,currentTxOutScriptLength,currentTxOutScript);
result.add(currentBitcoinTransactionOutput);
}
return result;
}
示例14: initialize
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Override
public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
try {
// Extract a collector for LogisticRegressionDataGeneratorUDTF
Field collector = GenericUDTF.class.getDeclaredField("collector");
collector.setAccessible(true);
udtf.setCollector((Collector) collector.get(this));
// To avoid HadoopUtils#getTaskId()
Class<?> clazz = udtf.getClass();
Field rnd1 = clazz.getDeclaredField("rnd1");
Field rnd2 = clazz.getDeclaredField("rnd2");
Field r_seed = clazz.getDeclaredField("r_seed");
r_seed.setAccessible(true);
final long seed = r_seed.getLong(udtf) + (int) Thread.currentThread().getId();
rnd1.setAccessible(true);
rnd2.setAccessible(true);
rnd1.set(udtf, new Random(seed));
rnd2.set(udtf, new Random(seed + 1));
} catch (Exception e) {
e.printStackTrace();
}
return udtf.initialize(argOIs);
}
示例15: initialize
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入依賴的package包/類
@Override
public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length < 2) {
throw new UDFArgumentException(
"_FUNC_ takes 2 arguments: List<Int|BigInt|Text> features, float target [, constant string options]");
}
this.featureListOI = HiveUtils.asListOI(argOIs[0]);
this.featureType = getFeatureType(featureListOI);
this.targetOI = HiveUtils.asDoubleCompatibleOI(argOIs[1]);
processOptions(argOIs);
this.model = createModel();
try {
this.optimizer = createOptimizer(optimizerOptions);
} catch (Throwable e) {
throw new UDFArgumentException(e);
}
this.count = 0L;
this.sampled = 0;
return getReturnOI(getFeatureOutputOI(featureType));
}