本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.getStandardObjectInspector方法的典型用法代码示例。如果您正苦于以下问题:Java ObjectInspectorUtils.getStandardObjectInspector方法的具体用法?Java ObjectInspectorUtils.getStandardObjectInspector怎么用?Java ObjectInspectorUtils.getStandardObjectInspector使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils
的用法示例。
在下文中一共展示了ObjectInspectorUtils.getStandardObjectInspector方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initReduceSide
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
private ObjectInspector initReduceSide(StructObjectInspector inputStructOI)
throws HiveException {
List<? extends StructField> fields = inputStructOI.getAllStructFieldRefs();
int length = fields.size();
this.inputStructOI = inputStructOI;
this.inputOIs = new ObjectInspector[length];
this.outputOIs = new ObjectInspector[length];
for (int i = 0; i < length; i++) {
StructField field = fields.get(i);
ObjectInspector oi = field.getFieldObjectInspector();
inputOIs[i] = oi;
outputOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(oi);
}
return ObjectInspectorUtils.getStandardObjectInspector(inputStructOI);
}
示例2: getReturnOI
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Nonnull
protected StructObjectInspector getReturnOI(@Nonnull ObjectInspector labelRawOI,
@Nonnull ObjectInspector featureOutputOI) {
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("label");
ObjectInspector labelOI = ObjectInspectorUtils.getStandardObjectInspector(labelRawOI);
fieldOIs.add(labelOI);
fieldNames.add("feature");
fieldOIs.add(featureOutputOI);
fieldNames.add("weight");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
if (useCovariance()) {
fieldNames.add("covar");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例3: internalMergeOutputOI
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Nonnull
private static StructObjectInspector internalMergeOutputOI(
@CheckForNull PrimitiveObjectInspector[] inputOIs) throws UDFArgumentException {
Preconditions.checkNotNull(inputOIs);
final int numOIs = inputOIs.length;
final List<String> fieldNames = new ArrayList<String>(numOIs);
final List<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(numOIs);
for (int i = 0; i < numOIs; i++) {
fieldNames.add("f" + String.valueOf(i));
ObjectInspector elemOI = ObjectInspectorUtils.getStandardObjectInspector(
inputOIs[i], ObjectInspectorCopyOption.WRITABLE);
ListObjectInspector listOI = ObjectInspectorFactory.getStandardListObjectInspector(elemOI);
fieldOIs.add(listOI);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例4: terminalOutputOI
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Nonnull
private static StructObjectInspector terminalOutputOI(
@CheckForNull PrimitiveObjectInspector[] inputOIs) {
Preconditions.checkNotNull(inputOIs);
Preconditions.checkArgument(inputOIs.length >= 1, inputOIs.length);
final List<String> fieldNames = new ArrayList<>(inputOIs.length);
final List<ObjectInspector> fieldOIs = new ArrayList<>(inputOIs.length);
for (int i = 0; i < inputOIs.length; i++) {
fieldNames.add("f" + String.valueOf(i + 1));
ObjectInspector keyOI = ObjectInspectorUtils.getStandardObjectInspector(
inputOIs[i], ObjectInspectorCopyOption.WRITABLE);
MapObjectInspector mapOI = ObjectInspectorFactory.getStandardMapObjectInspector(
keyOI, PrimitiveObjectInspectorFactory.javaIntObjectInspector);
fieldOIs.add(mapOI);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例5: init
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
super.init(m, parameters);
if (m == Mode.PARTIAL1) {
inputOI = parameters[0];
return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(inputOI));
} else {
if (!(parameters[0] instanceof StandardListObjectInspector)) {
inputOI = ObjectInspectorUtils.getStandardObjectInspector(parameters[0]);
return (StandardListObjectInspector) ObjectInspectorFactory.getStandardListObjectInspector(inputOI);
} else {
internalMergeOI = (StandardListObjectInspector) parameters[0];
inputOI = internalMergeOI.getListElementObjectInspector();
loi = (StandardListObjectInspector) ObjectInspectorUtils.getStandardObjectInspector(internalMergeOI);
return loi;
}
}
}
示例6: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException(
"map_tail_n only takes 2 arguments: map<object, object>, int");
}
if (!(arguments[0] instanceof MapObjectInspector)) {
throw new UDFArgumentException("The first argument must be a map");
}
this.mapObjectInspector = (MapObjectInspector) arguments[0];
if (!(arguments[1] instanceof IntObjectInspector)) {
throw new UDFArgumentException("The second argument must be an int");
}
this.intObjectInspector = (IntObjectInspector) arguments[1];
ObjectInspector keyOI = ObjectInspectorUtils.getStandardObjectInspector(mapObjectInspector.getMapKeyObjectInspector());
ObjectInspector valueOI = mapObjectInspector.getMapValueObjectInspector();
return ObjectInspectorFactory.getStandardMapObjectInspector(keyOI, valueOI);
}
示例7: initMapSide
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
private ObjectInspector initMapSide(ObjectInspector[] parameters) throws HiveException {
int length = parameters.length;
this.inputOIs = parameters;
this.outputOIs = new ObjectInspector[length];
List<String> fieldNames = new ArrayList<String>(length);
List<ObjectInspector> fieldOIs = Arrays.asList(outputOIs);
for (int i = 0; i < length; i++) {
fieldNames.add("col" + i);
outputOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(parameters[i]);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例8: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
final int numArgs = argOIs.length;
if (numArgs < 2) {
throw new UDFArgumentException(
"_FUNC_(const int xtimes, [, const string options], *) takes at least two arguments");
}
// xtimes
int xtimes = HiveUtils.getAsConstInt(argOIs[0]);
if (xtimes < 1) {
throw new UDFArgumentException("Illegal xtimes value: " + xtimes);
}
this.argOIs = argOIs;
processOptions(argOIs);
this.amplifier = (seed == -1L) ? new RandomizedAmplifier<Object[]>(numBuffers, xtimes)
: new RandomizedAmplifier<Object[]>(numBuffers, xtimes, seed);
amplifier.setDropoutListener(this);
final List<String> fieldNames = new ArrayList<String>();
final List<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
final int argStartIndex = hasOption ? 2 : 1;
for (int i = argStartIndex; i < numArgs; i++) {
fieldNames.add("c" + (i - 1));
ObjectInspector rawOI = argOIs[i];
ObjectInspector retOI = ObjectInspectorUtils.getStandardObjectInspector(rawOI,
ObjectInspectorCopyOption.DEFAULT);
fieldOIs.add(retOI);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例9: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(@Nonnull ObjectInspector[] argOIs)
throws UDFArgumentException {
final int argLength = argOIs.length;
if (argLength < 2) {
throw new UDFArgumentLengthException("Expecting at least two arrays as arguments: "
+ argLength);
}
ListObjectInspector[] argListOIs = new ListObjectInspector[argLength];
ListObjectInspector arg0ListOI = HiveUtils.asListOI(argOIs[0]);
ObjectInspector arg0ElemOI = arg0ListOI.getListElementObjectInspector();
argListOIs[0] = arg0ListOI;
for (int i = 1; i < argLength; i++) {
ListObjectInspector listOI = HiveUtils.asListOI(argOIs[i]);
if (!ObjectInspectorUtils.compareTypes(listOI.getListElementObjectInspector(),
arg0ElemOI)) {
throw new UDFArgumentException("Array types does not match: "
+ arg0ElemOI.getTypeName() + " != "
+ listOI.getListElementObjectInspector().getTypeName());
}
argListOIs[i] = listOI;
}
this.argListOIs = argListOIs;
this.result = new ArrayList<Object>();
return ObjectInspectorUtils.getStandardObjectInspector(arg0ListOI);
}
示例10: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1) {
throw new UDFArgumentLengthException(
"_FUNC_(array1, array2) needs at least 1 argument.");
}
final int nargs = arguments.length;
for (int i = 0; i < nargs; i++) {
switch (arguments[i].getCategory()) {
case LIST:
if (((ListObjectInspector) (arguments[i])).getListElementObjectInspector()
.getCategory()
.equals(Category.PRIMITIVE)) {
break;
}
default:
throw new UDFArgumentTypeException(0, "Argument " + i
+ " of function CONCAT_ARRAY must be " + LIST_TYPE_NAME + "<"
+ Category.PRIMITIVE + ">, but " + arguments[0].getTypeName()
+ " was found.");
}
}
ListObjectInspector[] listOIs = new ListObjectInspector[nargs];
for (int i = 0; i < nargs; i++) {
listOIs[i] = (ListObjectInspector) arguments[i];
}
this.argumentOIs = listOIs;
ObjectInspector firstElemOI = listOIs[0].getListElementObjectInspector();
ObjectInspector returnElemOI = ObjectInspectorUtils.getStandardObjectInspector(firstElemOI);
return ObjectInspectorFactory.getStandardListObjectInspector(returnElemOI);
}
示例11: getFeatureOutputOI
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Nonnull
protected ObjectInspector getFeatureOutputOI(@Nonnull PrimitiveObjectInspector featureInputOI)
throws UDFArgumentException {
if (dense_model) {
// TODO validation
return PrimitiveObjectInspectorFactory.javaIntObjectInspector; // see DenseModel
}
return ObjectInspectorUtils.getStandardObjectInspector(featureInputOI);
}
示例12: initialize
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
final int numArgs = argOIs.length;
if (numArgs < 4) {
throw new UDFArgumentException(
"each_top_k(int K, Object group, double cmpKey, *) takes at least 4 arguments: "
+ numArgs);
}
this.argOIs = argOIs;
this._constantK = ObjectInspectorUtils.isConstantObjectInspector(argOIs[0]);
if (_constantK) {
final int k = HiveUtils.getAsConstInt(argOIs[0]);
if (k == 0) {
throw new UDFArgumentException("k should not be 0");
}
this._queue = getQueue(k);
} else {
this.kOI = HiveUtils.asIntCompatibleOI(argOIs[0]);
this._prevK = 0;
}
this.prevGroupOI = ObjectInspectorUtils.getStandardObjectInspector(argOIs[1],
ObjectInspectorCopyOption.DEFAULT);
this.cmpKeyOI = HiveUtils.asDoubleCompatibleOI(argOIs[2]);
this._tuple = null;
this._previousGroup = null;
final ArrayList<String> fieldNames = new ArrayList<String>(numArgs);
final ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(numArgs);
fieldNames.add("rank");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
fieldNames.add("key");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
for (int i = 3; i < numArgs; i++) {
fieldNames.add("c" + (i - 2));
ObjectInspector rawOI = argOIs[i];
ObjectInspector retOI = ObjectInspectorUtils.getStandardObjectInspector(rawOI,
ObjectInspectorCopyOption.DEFAULT);
fieldOIs.add(retOI);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}