本文整理匯總了Java中org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector.getStructFieldRef方法的典型用法代碼示例。如果您正苦於以下問題:Java StructObjectInspector.getStructFieldRef方法的具體用法?Java StructObjectInspector.getStructFieldRef怎麽用?Java StructObjectInspector.getStructFieldRef使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector
的用法示例。
在下文中一共展示了StructObjectInspector.getStructFieldRef方法的13個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: readListOfOutputsFromTable
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
/**
* Read list of Bitcoin transaction outputs from a table in Hive in any format (e.g. ORC, Parquet)
*
* @param loi ObjectInspector for processing the Object containing a list
* @param listOfOutputsObject object containing the list of outputs to a Bitcoin Transaction
*
* @return a list of BitcoinTransactionOutputs
*
*/
private List<BitcoinTransactionOutput> readListOfOutputsFromTable(ListObjectInspector loi, Object listOfOutputsObject) {
int listLength=loi.getListLength(listOfOutputsObject);
List<BitcoinTransactionOutput> result=new ArrayList<>(listLength);
StructObjectInspector listOfOutputsElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector();
for (int i=0;i<listLength;i++) {
Object currentListOfOutputsObject = loi.getListElement(listOfOutputsObject,i);
StructField valueSF = listOfOutputsElementObjectInspector.getStructFieldRef("value");
StructField txoutscriptlengthSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscriptlength");
StructField txoutscriptSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscript");
if ((valueSF==null) || (txoutscriptlengthSF==null) || (txoutscriptSF==null)) {
LOG.warn("Invalid BitcoinTransactionOutput detected at position "+i);
return new ArrayList<>();
}
long currentValue=wloi.get(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,valueSF));
byte[] currentTxOutScriptLength=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptlengthSF));
byte[] currentTxOutScript=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptSF));
BitcoinTransactionOutput currentBitcoinTransactionOutput = new BitcoinTransactionOutput(currentValue,currentTxOutScriptLength,currentTxOutScript);
result.add(currentBitcoinTransactionOutput);
}
return result;
}
示例2: readListOfInputsFromTable
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
/**
* Read list of Bitcoin transaction inputs from a table in Hive in any format (e.g. ORC, Parquet)
*
* @param loi ObjectInspector for processing the Object containing a list
* @param listOfInputsObject object containing the list of inputs to a Bitcoin Transaction
*
* @return a list of BitcoinTransactionInputs
*
*/
private List<BitcoinTransactionInput> readListOfInputsFromTable(ListObjectInspector loi, Object listOfInputsObject) {
int listLength=loi.getListLength(listOfInputsObject);
List<BitcoinTransactionInput> result = new ArrayList<>(listLength);
StructObjectInspector listOfInputsElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector();
for (int i=0;i<listLength;i++) {
Object currentlistofinputsObject = loi.getListElement(listOfInputsObject,i);
StructField prevtransactionhashSF = listOfInputsElementObjectInspector.getStructFieldRef("prevtransactionhash");
StructField previoustxoutindexSF = listOfInputsElementObjectInspector.getStructFieldRef("previoustxoutindex");
StructField txinscriptlengthSF = listOfInputsElementObjectInspector.getStructFieldRef("txinscriptlength");
StructField txinscriptSF = listOfInputsElementObjectInspector.getStructFieldRef("txinscript");
StructField seqnoSF = listOfInputsElementObjectInspector.getStructFieldRef("seqno");
boolean prevFieldsNull = (prevtransactionhashSF==null) || (previoustxoutindexSF==null);
boolean inFieldsNull = (txinscriptlengthSF==null) || (txinscriptSF==null);
boolean otherAttribNull = seqnoSF==null;
if (prevFieldsNull || inFieldsNull || otherAttribNull) {
LOG.warn("Invalid BitcoinTransactionInput detected at position "+i);
return new ArrayList<>();
}
byte[] currentPrevTransactionHash = wboi.getPrimitiveJavaObject(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,prevtransactionhashSF));
long currentPreviousTxOutIndex = wloi.get(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,previoustxoutindexSF));
byte[] currentTxInScriptLength= wboi.getPrimitiveJavaObject(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,txinscriptlengthSF));
byte[] currentTxInScript= wboi.getPrimitiveJavaObject(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,txinscriptSF));
long currentSeqNo = wloi.get(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,seqnoSF));
BitcoinTransactionInput currentBitcoinTransactionInput = new BitcoinTransactionInput(currentPrevTransactionHash,currentPreviousTxOutIndex,currentTxInScriptLength,currentTxInScript,currentSeqNo);
result.add(currentBitcoinTransactionInput);
}
return result;
}
示例3: readListOfBitcoinScriptWitnessFromTable
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
/**
* Read list of Bitcoin ScriptWitness items from a table in Hive in any format (e.g. ORC, Parquet)
*
* @param loi ObjectInspector for processing the Object containing a list
* @param listOfScriptWitnessItemObject object containing the list of scriptwitnessitems of a Bitcoin Transaction
*
* @return a list of BitcoinScriptWitnessItem
*
*/
private List<BitcoinScriptWitnessItem> readListOfBitcoinScriptWitnessFromTable(ListObjectInspector loi, Object listOfScriptWitnessItemObject) {
int listLength=loi.getListLength(listOfScriptWitnessItemObject);
List<BitcoinScriptWitnessItem> result = new ArrayList<>(listLength);
StructObjectInspector listOfScriptwitnessItemElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector();
for (int i=0;i<listLength;i++) {
Object currentlistofscriptwitnessitemObject = loi.getListElement(listOfScriptWitnessItemObject,i);
StructField stackitemcounterSF = listOfScriptwitnessItemElementObjectInspector.getStructFieldRef("stackitemcounter");
StructField scriptwitnesslistSF = listOfScriptwitnessItemElementObjectInspector.getStructFieldRef("scriptwitnesslist");
boolean scriptwitnessitemNull = (stackitemcounterSF==null) || (scriptwitnesslistSF==null) ;
if (scriptwitnessitemNull) {
LOG.warn("Invalid BitcoinScriptWitnessItem detected at position "+i);
return new ArrayList<>();
}
byte[] stackItemCounter = wboi.getPrimitiveJavaObject(listOfScriptwitnessItemElementObjectInspector.getStructFieldData(currentlistofscriptwitnessitemObject,stackitemcounterSF));
Object listofscriptwitnessObject = soi.getStructFieldData(currentlistofscriptwitnessitemObject,scriptwitnesslistSF);
ListObjectInspector loiScriptWitness=(ListObjectInspector)scriptwitnesslistSF.getFieldObjectInspector();
StructObjectInspector listOfScriptwitnessElementObjectInspector = (StructObjectInspector)loiScriptWitness.getListElementObjectInspector();
int listWitnessLength = loiScriptWitness.getListLength(listofscriptwitnessObject);
List<BitcoinScriptWitness> currentScriptWitnessList = new ArrayList<>(listWitnessLength);
for (int j=0;j<listWitnessLength;j++) {
Object currentlistofscriptwitnessObject = loi.getListElement(listofscriptwitnessObject,j);
StructField witnessscriptlengthSF = listOfScriptwitnessElementObjectInspector.getStructFieldRef("witnessscriptlength");
StructField witnessscriptSF = listOfScriptwitnessElementObjectInspector.getStructFieldRef("witnessscript");
boolean scriptwitnessNull = (witnessscriptlengthSF==null) || (witnessscriptSF==null);
if (scriptwitnessNull) {
LOG.warn("Invalid BitcoinScriptWitness detected at position "+j+ "for BitcoinScriptWitnessItem "+i);
return new ArrayList<>();
}
byte[] scriptWitnessLength = wboi.getPrimitiveJavaObject(listOfScriptwitnessElementObjectInspector.getStructFieldData(currentlistofscriptwitnessObject,witnessscriptlengthSF));
byte[] scriptWitness = wboi.getPrimitiveJavaObject(listOfScriptwitnessElementObjectInspector.getStructFieldData(currentlistofscriptwitnessObject,witnessscriptSF));
currentScriptWitnessList.add(new BitcoinScriptWitness(scriptWitnessLength,scriptWitness));
}
BitcoinScriptWitnessItem currentBitcoinScriptWitnessItem = new BitcoinScriptWitnessItem(stackItemCounter,currentScriptWitnessList);
result.add(currentBitcoinScriptWitnessItem);
}
return result;
}
示例4: init
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters) throws HiveException {
assert (parameters.length == 3);
super.init(mode, parameters);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
this.wOI = HiveUtils.asDoubleCompatibleOI(parameters[0]);
this.vOI = HiveUtils.asListOI(parameters[1]);
this.vElemOI = HiveUtils.asDoubleCompatibleOI(vOI.getListElementObjectInspector());
this.xOI = HiveUtils.asDoubleCompatibleOI(parameters[2]);
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) parameters[0];
this.internalMergeOI = soi;
this.retField = soi.getStructFieldRef("ret");
this.sumVjXjField = soi.getStructFieldRef("sumVjXj");
this.sumV2X2Field = soi.getStructFieldRef("sumV2X2");
this.retOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
this.sumVjXjOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
this.sumV2X2OI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI();
} else {
outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
return outputOI;
}
示例5: init
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters) throws HiveException {
assert (parameters.length == 1);
super.init(mode, parameters);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
this.inputListOI = (ListObjectInspector) parameters[0];
this.inputListElemOI = HiveUtils.asDoubleCompatibleOI(inputListOI.getListElementObjectInspector());
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) parameters[0];
this.internalMergeOI = soi;
this.sizeField = soi.getStructFieldRef("size");
this.sumField = soi.getStructFieldRef("sum");
this.countField = soi.getStructFieldRef("count");
this.sizeOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
this.sumOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
this.countOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI();
} else {// terminate
outputOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
}
return outputOI;
}
示例6: init
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] argOIs) throws HiveException {
super.init(mode, argOIs);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
this.inputKeyOI = HiveUtils.asPrimitiveObjectInspector(argOIs[0]);
this.inputValueOI = argOIs[1];
this.sizeOI = HiveUtils.asIntegerOI(argOIs[2]);
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) argOIs[0];
this.internalMergeOI = soi;
this.partialMapField = soi.getStructFieldRef("partialMap");
// re-extract input key/value OIs
MapObjectInspector partialMapOI = (MapObjectInspector) partialMapField.getFieldObjectInspector();
this.inputKeyOI = HiveUtils.asPrimitiveObjectInspector(partialMapOI.getMapKeyObjectInspector());
this.inputValueOI = partialMapOI.getMapValueObjectInspector();
this.partialMapOI = ObjectInspectorFactory.getStandardMapObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(inputKeyOI),
ObjectInspectorUtils.getStandardObjectInspector(inputValueOI));
this.sizeField = soi.getStructFieldRef("size");
this.sizeOI = (PrimitiveObjectInspector) sizeField.getFieldObjectInspector();
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI(inputKeyOI, inputValueOI);
} else {// terminate
outputOI = ObjectInspectorFactory.getStandardMapObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(inputKeyOI),
ObjectInspectorUtils.getStandardObjectInspector(inputValueOI));
}
return outputOI;
}
示例7: loadValues
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
private static void loadValues(Object2ObjectMap<Object, Object> map, File file,
PrimitiveObjectInspector keyOI, PrimitiveObjectInspector valueOI) throws IOException,
SerDeException {
if (!file.exists()) {
return;
}
if (!file.getName().endsWith(".crc")) {
if (file.isDirectory()) {
for (File f : file.listFiles()) {
loadValues(map, f, keyOI, valueOI);
}
} else {
LazySimpleSerDe serde = HiveUtils.getKeyValueLineSerde(keyOI, valueOI);
StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
StructField keyRef = lineOI.getStructFieldRef("key");
StructField valueRef = lineOI.getStructFieldRef("value");
PrimitiveObjectInspector keyRefOI = (PrimitiveObjectInspector) keyRef.getFieldObjectInspector();
PrimitiveObjectInspector valueRefOI = (PrimitiveObjectInspector) valueRef.getFieldObjectInspector();
BufferedReader reader = null;
try {
reader = HadoopUtils.getBufferedReader(file);
String line;
while ((line = reader.readLine()) != null) {
Text lineText = new Text(line);
Object lineObj = serde.deserialize(lineText);
List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
Object f0 = fields.get(0);
Object f1 = fields.get(1);
Object k = keyRefOI.getPrimitiveJavaObject(f0);
Object v = valueRefOI.getPrimitiveWritableObject(valueRefOI.copyObject(f1));
map.put(k, v);
}
} finally {
IOUtils.closeQuietly(reader);
}
}
}
}
示例8: extractField
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
protected Object extractField(Object target) {
List<String> flNames = fieldNames;
for (int i = 0; i < flNames.size(); i++) {
String fl = flNames.get(i);
if (target instanceof HiveType) {
HiveType type = (HiveType) target;
ObjectInspector inspector = type.getObjectInspector();
if (inspector instanceof StructObjectInspector) {
StructObjectInspector soi = (StructObjectInspector) inspector;
StructField field = soi.getStructFieldRef(fl);
ObjectInspector foi = field.getFieldObjectInspector();
Assert.isTrue(foi.getCategory() == ObjectInspector.Category.PRIMITIVE,
String.format("Field [%s] needs to be a primitive; found [%s]", fl, foi.getTypeName()));
// expecting a writeable - simply do a toString
target = soi.getStructFieldData(type.getObject(), field);
}
else {
return FieldExtractor.NOT_FOUND;
}
}
else {
return FieldExtractor.NOT_FOUND;
}
}
if (target == null || target instanceof NullWritable) {
return StringUtils.EMPTY;
}
return target.toString();
}
示例9: loadPredictionModel
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
private long loadPredictionModel(Map<Object, PredictionModel> label2model, File file,
PrimitiveObjectInspector labelOI, PrimitiveObjectInspector featureOI,
WritableFloatObjectInspector weightOI) throws IOException, SerDeException {
long count = 0L;
if (!file.exists()) {
return count;
}
if (!file.getName().endsWith(".crc")) {
if (file.isDirectory()) {
for (File f : file.listFiles()) {
count += loadPredictionModel(label2model, f, labelOI, featureOI, weightOI);
}
} else {
LazySimpleSerDe serde = HiveUtils.getLineSerde(labelOI, featureOI, weightOI);
StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
StructField c1ref = lineOI.getStructFieldRef("c1");
StructField c2ref = lineOI.getStructFieldRef("c2");
StructField c3ref = lineOI.getStructFieldRef("c3");
PrimitiveObjectInspector c1refOI = (PrimitiveObjectInspector) c1ref.getFieldObjectInspector();
PrimitiveObjectInspector c2refOI = (PrimitiveObjectInspector) c2ref.getFieldObjectInspector();
FloatObjectInspector c3refOI = (FloatObjectInspector) c3ref.getFieldObjectInspector();
BufferedReader reader = null;
try {
reader = HadoopUtils.getBufferedReader(file);
String line;
while ((line = reader.readLine()) != null) {
count++;
Text lineText = new Text(line);
Object lineObj = serde.deserialize(lineText);
List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
Object f0 = fields.get(0);
Object f1 = fields.get(1);
Object f2 = fields.get(2);
if (f0 == null || f1 == null || f2 == null) {
continue; // avoid the case that key or value is null
}
Object label = c1refOI.getPrimitiveWritableObject(c1refOI.copyObject(f0));
PredictionModel model = label2model.get(label);
if (model == null) {
model = createModel();
label2model.put(label, model);
}
Object k = c2refOI.getPrimitiveWritableObject(c2refOI.copyObject(f1));
float v = c3refOI.get(f2);
model.set(k, new WeightValue(v, false));
}
} finally {
IOUtils.closeQuietly(reader);
}
}
}
return count;
}
示例10: init
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] OIs) throws HiveException {
super.init(mode, OIs);
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
weightOI = HiveUtils.asDoubleCompatibleOI(OIs[0]);
// set const values
nBins = HiveUtils.getConstInt(OIs[1]);
if (OIs.length == 3) {
autoShrink = HiveUtils.getConstBoolean(OIs[2]);
}
// check value of `num_of_bins`
if (nBins < 2) {
throw new UDFArgumentException(
"Only greater than or equal to 2 is accepted but " + nBins
+ " was passed as `num_of_bins`.");
}
quantiles = getQuantiles();
} else {
structOI = (StructObjectInspector) OIs[0];
autoShrinkField = structOI.getStructFieldRef("autoShrink");
histogramField = structOI.getStructFieldRef("histogram");
quantilesField = structOI.getStructFieldRef("quantiles");
autoShrinkOI = (WritableBooleanObjectInspector) autoShrinkField.getFieldObjectInspector();
histogramOI = (StandardListObjectInspector) histogramField.getFieldObjectInspector();
quantilesOI = (StandardListObjectInspector) quantilesField.getFieldObjectInspector();
histogramElOI = (WritableDoubleObjectInspector) histogramOI.getListElementObjectInspector();
quantileOI = (WritableDoubleObjectInspector) quantilesOI.getListElementObjectInspector();
}
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
final ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldOIs.add(PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
fieldOIs.add(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector));
fieldOIs.add(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector));
return ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList("autoShrink", "histogram", "quantiles"), fieldOIs);
} else {
return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
}
}
示例11: init
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] argOIs) throws HiveException {
super.init(mode, argOIs);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
// this flag will be used in `processOptions` and `iterate` (= when Mode.PARTIAL1 or Mode.COMPLETE)
this.sortByKey = (argOIs.length == 2 && !HiveUtils.isConstString(argOIs[1]))
|| (argOIs.length == 3 && HiveUtils.isConstString(argOIs[2]));
if (sortByKey) {
this.valueOI = argOIs[0];
this.keyOI = HiveUtils.asPrimitiveObjectInspector(argOIs[1]);
} else {
// sort values by value itself
this.valueOI = HiveUtils.asPrimitiveObjectInspector(argOIs[0]);
this.keyOI = HiveUtils.asPrimitiveObjectInspector(argOIs[0]);
}
processOptions(argOIs);
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) argOIs[0];
this.internalMergeOI = soi;
// re-extract input value OI
this.valueListField = soi.getStructFieldRef("valueList");
StandardListObjectInspector valueListOI = (StandardListObjectInspector) valueListField.getFieldObjectInspector();
this.valueOI = valueListOI.getListElementObjectInspector();
this.valueListOI = ObjectInspectorFactory.getStandardListObjectInspector(valueOI);
// re-extract input key OI
this.keyListField = soi.getStructFieldRef("keyList");
StandardListObjectInspector keyListOI = (StandardListObjectInspector) keyListField.getFieldObjectInspector();
this.keyOI = HiveUtils.asPrimitiveObjectInspector(keyListOI.getListElementObjectInspector());
this.keyListOI = ObjectInspectorFactory.getStandardListObjectInspector(keyOI);
this.sizeField = soi.getStructFieldRef("size");
this.reverseOrderField = soi.getStructFieldRef("reverseOrder");
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI(valueOI, keyOI);
} else {// terminate
outputOI = ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(valueOI));
}
return outputOI;
}
示例12: init
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters) throws HiveException {
assert (parameters.length == 1 || parameters.length == 4 || parameters.length == 5);
super.init(mode, parameters);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
processOptions(parameters);
this.wordOI = HiveUtils.asStringOI(parameters[0]);
this.valueOI = HiveUtils.asDoubleCompatibleOI(parameters[1]);
this.labelOI = HiveUtils.asIntegerOI(parameters[2]);
this.probOI = HiveUtils.asDoubleCompatibleOI(parameters[3]);
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) parameters[0];
this.internalMergeOI = soi;
this.wcListField = soi.getStructFieldRef("wcList");
this.probMapField = soi.getStructFieldRef("probMap");
this.topicsOptionField = soi.getStructFieldRef("topics");
this.alphaOptionField = soi.getStructFieldRef("alpha");
this.deltaOptionField = soi.getStructFieldRef("delta");
this.wcListElemOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
this.wcListOI = ObjectInspectorFactory.getStandardListObjectInspector(wcListElemOI);
this.probMapKeyOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
this.probMapValueElemOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
this.probMapValueOI = ObjectInspectorFactory.getStandardListObjectInspector(probMapValueElemOI);
this.probMapOI = ObjectInspectorFactory.getStandardMapObjectInspector(probMapKeyOI,
probMapValueOI);
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI();
} else {
final ArrayList<String> fieldNames = new ArrayList<String>();
final ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("label");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
fieldNames.add("probability");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
outputOI = ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(
fieldNames, fieldOIs));
}
return outputOI;
}
示例13: init
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; //導入方法依賴的package包/類
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters) throws HiveException {
assert (parameters.length == 1 || parameters.length == 4 || parameters.length == 5);
super.init(mode, parameters);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
processOptions(parameters);
this.wordOI = HiveUtils.asStringOI(parameters[0]);
this.valueOI = HiveUtils.asDoubleCompatibleOI(parameters[1]);
this.labelOI = HiveUtils.asIntegerOI(parameters[2]);
this.lambdaOI = HiveUtils.asDoubleCompatibleOI(parameters[3]);
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) parameters[0];
this.internalMergeOI = soi;
this.wcListField = soi.getStructFieldRef("wcList");
this.lambdaMapField = soi.getStructFieldRef("lambdaMap");
this.topicsOptionField = soi.getStructFieldRef("topics");
this.alphaOptionField = soi.getStructFieldRef("alpha");
this.deltaOptionField = soi.getStructFieldRef("delta");
this.wcListElemOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
this.wcListOI = ObjectInspectorFactory.getStandardListObjectInspector(wcListElemOI);
this.lambdaMapKeyOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
this.lambdaMapValueElemOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
this.lambdaMapValueOI = ObjectInspectorFactory.getStandardListObjectInspector(lambdaMapValueElemOI);
this.lambdaMapOI = ObjectInspectorFactory.getStandardMapObjectInspector(
lambdaMapKeyOI, lambdaMapValueOI);
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI();
} else {
final ArrayList<String> fieldNames = new ArrayList<String>();
final ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("label");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
fieldNames.add("probability");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
outputOI = ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(
fieldNames, fieldOIs));
}
return outputOI;
}