本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils类的典型用法代码示例。如果您正苦于以下问题:Java ObjectInspectorUtils类的具体用法?Java ObjectInspectorUtils怎么用?Java ObjectInspectorUtils使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ObjectInspectorUtils类属于org.apache.hadoop.hive.serde2.objectinspector包,在下文中一共展示了ObjectInspectorUtils类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initReduceSide
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
private ObjectInspector initReduceSide(StructObjectInspector inputStructOI)
throws HiveException {
List<? extends StructField> fields = inputStructOI.getAllStructFieldRefs();
int length = fields.size();
this.inputStructOI = inputStructOI;
this.inputOIs = new ObjectInspector[length];
this.outputOIs = new ObjectInspector[length];
for (int i = 0; i < length; i++) {
StructField field = fields.get(i);
ObjectInspector oi = field.getFieldObjectInspector();
inputOIs[i] = oi;
outputOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(oi);
}
return ObjectInspectorUtils.getStandardObjectInspector(inputStructOI);
}
示例2: iterate
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Override
public void iterate(AggregationBuffer aggregate, Object[] parameters) throws HiveException {
FunnelAggregateBuffer funnelAggregate = (FunnelAggregateBuffer) aggregate;
// Add the funnel steps if not already stored
if (funnelAggregate.funnelSteps.isEmpty()) {
// Funnel steps start at index 2
addFunnelSteps(funnelAggregate, Arrays.copyOfRange(parameters, 2, parameters.length));
}
// Get the action_column value and add it (if it matches a funnel)
Object action = parameters[0];
Object timestamp = parameters[1];
if (action != null && timestamp != null) {
// Get the action value
Object actionValue = ObjectInspectorUtils.copyToStandardObject(action, actionObjectInspector);
// Get the timestamp value
Object timestampValue = ObjectInspectorUtils.copyToStandardObject(timestamp, timestampObjectInspector);
// If the action is not null and it is one of the funnels we are looking for, keep it
if (actionValue != null && timestampValue != null && funnelAggregate.funnelSet.contains(actionValue)) {
funnelAggregate.actions.add(actionValue);
funnelAggregate.timestamps.add(timestampValue);
}
}
}
示例3: testTailKWithKey
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Test
public void testTailKWithKey() throws Exception {
ObjectInspector[] inputOIs = new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-k -2")};
final String[] values = new String[] {"banana", "apple", "candy"};
final double[] keys = new double[] {0.7, 0.5, 0.8};
evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs);
evaluator.reset(agg);
for (int i = 0; i < values.length; i++) {
evaluator.iterate(agg, new Object[] {values[i], keys[i]});
}
List<Object> res = evaluator.terminate(agg);
Assert.assertEquals(2, res.size());
Assert.assertEquals("apple", res.get(0));
Assert.assertEquals("banana", res.get(1));
}
示例4: getConstStringArray
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Nullable
public static String[] getConstStringArray(@Nonnull final ObjectInspector oi)
throws UDFArgumentException {
if (!ObjectInspectorUtils.isConstantObjectInspector(oi)) {
throw new UDFArgumentException("argument must be a constant value: "
+ TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
}
ConstantObjectInspector constOI = (ConstantObjectInspector) oi;
if (constOI.getCategory() != Category.LIST) {
throw new UDFArgumentException("argument must be an array: "
+ TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
}
final List<?> lst = (List<?>) constOI.getWritableConstantValue();
if (lst == null) {
return null;
}
final int size = lst.size();
final String[] ary = new String[size];
for (int i = 0; i < size; i++) {
Object o = lst.get(i);
if (o != null) {
ary[i] = o.toString();
}
}
return ary;
}
示例5: parseFeatures
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Nullable
FeatureValue[] parseFeatures(@Nonnull final List<?> features) {
final int size = features.size();
if (size == 0) {
return null;
}
final ObjectInspector featureInspector = featureListOI.getListElementObjectInspector();
final FeatureValue[] featureVector = new FeatureValue[size];
for (int i = 0; i < size; i++) {
Object f = features.get(i);
if (f == null) {
continue;
}
final FeatureValue fv;
if (parseFeature) {
fv = FeatureValue.parse(f);
} else {
Object k = ObjectInspectorUtils.copyToStandardObject(f, featureInspector);
fv = new FeatureValue(k, 1.f);
}
featureVector[i] = fv;
}
return featureVector;
}
示例6: getReturnOI
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Nonnull
protected StructObjectInspector getReturnOI(@Nonnull ObjectInspector labelRawOI,
@Nonnull ObjectInspector featureOutputOI) {
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("label");
ObjectInspector labelOI = ObjectInspectorUtils.getStandardObjectInspector(labelRawOI);
fieldOIs.add(labelOI);
fieldNames.add("feature");
fieldOIs.add(featureOutputOI);
fieldNames.add("weight");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
if (useCovariance()) {
fieldNames.add("covar");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例7: parseFeatures
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Nullable
protected final FeatureValue[] parseFeatures(@Nonnull final List<?> features) {
final int size = features.size();
if (size == 0) {
return null;
}
final ObjectInspector featureInspector = featureListOI.getListElementObjectInspector();
final FeatureValue[] featureVector = new FeatureValue[size];
for (int i = 0; i < size; i++) {
Object f = features.get(i);
if (f == null) {
continue;
}
final FeatureValue fv;
if (parseFeature) {
fv = FeatureValue.parse(f);
} else {
Object k = ObjectInspectorUtils.copyToStandardObject(f, featureInspector);
fv = new FeatureValue(k, 1.f);
}
featureVector[i] = fv;
}
return featureVector;
}
示例8: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
final StringBuilder buf = new StringBuilder(128);
final int numArgs = argOIs.length;
final int last = numArgs - 1;
for (int i = 0; i < numArgs; i++) {
if (HiveUtils.isNumberOI(argOIs[i])) {
buf.append('Q'); // quantitative
} else {
buf.append('C'); // categorical
}
if (i != last) {
buf.append(',');
}
}
String value = buf.toString();
return ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector, value);
}
示例9: internalMergeOutputOI
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Nonnull
private static StructObjectInspector internalMergeOutputOI(
@CheckForNull PrimitiveObjectInspector[] inputOIs) throws UDFArgumentException {
Preconditions.checkNotNull(inputOIs);
final int numOIs = inputOIs.length;
final List<String> fieldNames = new ArrayList<String>(numOIs);
final List<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(numOIs);
for (int i = 0; i < numOIs; i++) {
fieldNames.add("f" + String.valueOf(i));
ObjectInspector elemOI = ObjectInspectorUtils.getStandardObjectInspector(
inputOIs[i], ObjectInspectorCopyOption.WRITABLE);
ListObjectInspector listOI = ObjectInspectorFactory.getStandardListObjectInspector(elemOI);
fieldOIs.add(listOI);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例10: terminalOutputOI
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Nonnull
private static StructObjectInspector terminalOutputOI(
@CheckForNull PrimitiveObjectInspector[] inputOIs) {
Preconditions.checkNotNull(inputOIs);
Preconditions.checkArgument(inputOIs.length >= 1, inputOIs.length);
final List<String> fieldNames = new ArrayList<>(inputOIs.length);
final List<ObjectInspector> fieldOIs = new ArrayList<>(inputOIs.length);
for (int i = 0; i < inputOIs.length; i++) {
fieldNames.add("f" + String.valueOf(i + 1));
ObjectInspector keyOI = ObjectInspectorUtils.getStandardObjectInspector(
inputOIs[i], ObjectInspectorCopyOption.WRITABLE);
MapObjectInspector mapOI = ObjectInspectorFactory.getStandardMapObjectInspector(
keyOI, PrimitiveObjectInspectorFactory.javaIntObjectInspector);
fieldOIs.add(mapOI);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例11: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
ret.clear();
for (int i = 0; i < arguments.length; i++) {
final Object arrayObject = arguments[i].get();
if (arrayObject == null) {
continue;
}
final ListObjectInspector arrayOI = (ListObjectInspector) argumentOIs[i];
final int arraylength = arrayOI.getListLength(arrayObject);
for (int j = 0; j < arraylength; j++) {
Object rawObj = arrayOI.getListElement(arrayObject, j);
ObjectInspector elemOI = arrayOI.getListElementObjectInspector();
Object obj = ObjectInspectorUtils.copyToStandardObject(rawObj, elemOI);
ret.add(obj);
}
}
return ret;
}
示例12: init
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
super.init(m, parameters);
if (m == Mode.PARTIAL1) {
inputOI = parameters[0];
return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(inputOI));
} else {
if (!(parameters[0] instanceof StandardListObjectInspector)) {
inputOI = ObjectInspectorUtils.getStandardObjectInspector(parameters[0]);
return (StandardListObjectInspector) ObjectInspectorFactory.getStandardListObjectInspector(inputOI);
} else {
internalMergeOI = (StandardListObjectInspector) parameters[0];
inputOI = internalMergeOI.getListElementObjectInspector();
loi = (StandardListObjectInspector) ObjectInspectorUtils.getStandardObjectInspector(internalMergeOI);
return loi;
}
}
}
示例13: internalMergeOI
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Nonnull
private static StructObjectInspector internalMergeOI(
@Nonnull PrimitiveObjectInspector keyOI, @Nonnull ObjectInspector valueOI) {
List<String> fieldNames = new ArrayList<String>();
List<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("partialMap");
fieldOIs.add(ObjectInspectorFactory.getStandardMapObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(keyOI),
ObjectInspectorUtils.getStandardObjectInspector(valueOI)));
fieldNames.add("size");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例14: iterate
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Override
public void iterate(@SuppressWarnings("deprecation") AggregationBuffer agg,
Object[] parameters) throws HiveException {
assert (parameters.length == 3);
if (parameters[0] == null) {
return;
}
Object key = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputKeyOI);
Object value = ObjectInspectorUtils.copyToStandardObject(parameters[1], inputValueOI);
int size = Math.abs(HiveUtils.getInt(parameters[2], sizeOI)); // size could be negative for tail-k
MapAggregationBuffer myagg = (MapAggregationBuffer) agg;
if (myagg.container == null) {
initBuffer(myagg, size);
}
myagg.container.put(key, value);
}
示例15: init
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入依赖的package包/类
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] argOIs) throws HiveException {
super.init(mode, argOIs);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
inputKeyOI = HiveUtils.asPrimitiveObjectInspector(argOIs[0]);
inputValueOI = argOIs[1];
} else {// from partial aggregation
internalMergeOI = (StandardMapObjectInspector) argOIs[0];
inputKeyOI = HiveUtils.asPrimitiveObjectInspector(internalMergeOI.getMapKeyObjectInspector());
inputValueOI = internalMergeOI.getMapValueObjectInspector();
}
return ObjectInspectorFactory.getStandardMapObjectInspector(
ObjectInspectorUtils.getStandardObjectInspector(inputKeyOI),
ObjectInspectorUtils.getStandardObjectInspector(inputValueOI));
}