本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector方法的典型用法代码示例。如果您正苦于以下问题:Java PrimitiveObjectInspectorFactory.writableDoubleObjectInspector方法的具体用法?Java PrimitiveObjectInspectorFactory.writableDoubleObjectInspector怎么用?Java PrimitiveObjectInspectorFactory.writableDoubleObjectInspector使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
的用法示例。
在下文中一共展示了PrimitiveObjectInspectorFactory.writableDoubleObjectInspector方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: init
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters) throws HiveException {
assert (parameters.length == 5);
super.init(mode, parameters);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
this.wiOI = HiveUtils.asDoubleCompatibleOI(parameters[0]);
this.vijOI = HiveUtils.asListOI(parameters[1]);
this.vijElemOI = HiveUtils.asFloatingPointOI(vijOI.getListElementObjectInspector());
this.vjiOI = HiveUtils.asListOI(parameters[2]);
this.vjiElemOI = HiveUtils.asFloatingPointOI(vjiOI.getListElementObjectInspector());
this.xiOI = HiveUtils.asDoubleCompatibleOI(parameters[3]);
this.xjOI = HiveUtils.asDoubleCompatibleOI(parameters[4]);
} else {// from partial aggregation
this.mergeInputOI = HiveUtils.asDoubleOI(parameters[0]);
}
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
示例2: init
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
super.init(m, parameters);
// initialize input
if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {// from original data
this.xhOI = HiveUtils.asNumberOI(parameters[0]);
this.xkOI = HiveUtils.asNumberOI(parameters[1]);
this.w0OI = HiveUtils.asNumberOI(parameters[2]);
this.w1OI = HiveUtils.asNumberOI(parameters[3]);
this.w2OI = HiveUtils.asNumberOI(parameters[4]);
this.w3OI = HiveUtils.asNumberOI(parameters[5]);
}
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
示例3: merge
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
void merge(int size, @Nonnull Object posterioriObj,
@Nonnull StandardListObjectInspector posterioriOI) throws HiveException {
if (size != _k) {
if (_k == -1) {
this._k = size;
this._posteriori = new double[size];
} else {
throw new HiveException("Mismatch in the number of elements: _k=" + _k
+ ", size=" + size);
}
}
final double[] posteriori = _posteriori;
final DoubleObjectInspector doubleOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
for (int i = 0, len = _k; i < len; i++) {
Object o2 = posterioriOI.getListElement(posterioriObj, i);
posteriori[i] += doubleOI.get(o2);
}
}
示例4: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 4 && argOIs.length != 5) {
throw new UDFArgumentException("_FUNC_ takes 4 or 5 arguments");
}
this.modelTypeOI = HiveUtils.asIntegerOI(argOIs[1]);
this.stringOI = HiveUtils.asStringOI(argOIs[2]);
ListObjectInspector listOI = HiveUtils.asListOI(argOIs[3]);
this.featureListOI = listOI;
ObjectInspector elemOI = listOI.getListElementObjectInspector();
this.featureElemOI = HiveUtils.asDoubleCompatibleOI(elemOI);
boolean classification = false;
if (argOIs.length == 5) {
classification = HiveUtils.getConstBoolean(argOIs[4]);
}
this.classification = classification;
if (classification) {
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
}
示例5: merge
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
void merge(final int o_size, @Nonnull final Object o_sum, @Nonnull final Object o_count,
@Nonnull final StandardListObjectInspector sumOI,
@Nonnull final StandardListObjectInspector countOI) throws HiveException {
final WritableDoubleObjectInspector sumElemOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
final WritableLongObjectInspector countElemOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
if (o_size != _size) {
if (_size == -1) {
init(o_size);
} else {
throw new HiveException("Mismatch in the number of elements");
}
}
final double[] sum = _sum;
final long[] count = _count;
for (int i = 0, len = _size; i < len; i++) {
Object sum_e = sumOI.getListElement(o_sum, i);
sum[i] += sumElemOI.get(sum_e);
Object count_e = countOI.getListElement(o_count, i);
count[i] += countElemOI.get(count_e);
}
}
示例6: getObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
} else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
} else {
throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
}
}
示例7: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 3);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
if (arguments.length == 3) {
if (arguments[2] instanceof ConstantObjectInspector) {
isRoundOffNeeded = getConstantBooleanValue(arguments, 2);
}
}
// the function should support both short date and full timestamp format
// time part of the timestamp should not be skipped
checkArgGroups(arguments, 0, tsInputTypes, STRING_GROUP, DATE_GROUP);
checkArgGroups(arguments, 1, tsInputTypes, STRING_GROUP, DATE_GROUP);
checkArgGroups(arguments, 0, dtInputTypes, STRING_GROUP, DATE_GROUP);
checkArgGroups(arguments, 1, dtInputTypes, STRING_GROUP, DATE_GROUP);
obtainTimestampConverter(arguments, 0, tsInputTypes, tsConverters);
obtainTimestampConverter(arguments, 1, tsInputTypes, tsConverters);
obtainDateConverter(arguments, 0, dtInputTypes, dtConverters);
obtainDateConverter(arguments, 1, dtInputTypes, dtConverters);
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
return outputOI;
}
示例8: newObjectInspectorFromHiveType
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
private static ObjectInspector newObjectInspectorFromHiveType(final ASTNode type) {
// matching by token names, because token IDs (which are static final) drastically change between versions.
switch (type.getToken().getText()) {
case "TOK_STRING":
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
case "TOK_INT":
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
case "TOK_DOUBLE":
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
case "TOK_FLOAT":
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
case "TOK_BIGINT":
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
case "TOK_BOOLEAN": {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
case "TOK_STRUCT": {
final ASTNode tabColList = (ASTNode) type.getChild(0);
final List<String> names = new ArrayList<>();
final List<ObjectInspector> ois = new ArrayList<>();
for (final Node tabCol : tabColList.getChildren()) {
final ASTNode a = (ASTNode) tabCol;
names.add(a.getChild(0).toString());
ois.add(newObjectInspectorFromHiveType((ASTNode) a.getChild(1)));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(names, ois);
}
case "TOK_MAP": {
final ObjectInspector keyType = newObjectInspectorFromHiveType((ASTNode) type.getChild(0));
final ObjectInspector valueType = newObjectInspectorFromHiveType((ASTNode) type.getChild(1));
return ObjectInspectorFactory.getStandardMapObjectInspector(keyType, valueType);
}
case "TOK_LIST": {
final ObjectInspector itemType = newObjectInspectorFromHiveType((ASTNode) type.getChild(0));
return ObjectInspectorFactory.getStandardListObjectInspector(itemType);
}
default:
throw new IllegalArgumentException("unsupported type: " + type.toStringTree());
}
}
示例9: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 2) {
throw new UDFArgumentException("_FUNC_ takes exactly 2 arguments: " + argOIs.length);
}
this.yOI = HiveUtils.asIntegerOI(argOIs[0]);
this.zoomOI = HiveUtils.asIntegerOI(argOIs[1]);
this.result = new DoubleWritable();
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
示例10: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 2) {
throw new UDFArgumentException("_FUNC_ takes exactly 2 arguments: " + argOIs.length);
}
this.xOI = HiveUtils.asIntegerOI(argOIs[0]);
this.zoomOI = HiveUtils.asIntegerOI(argOIs[1]);
this.result = new DoubleWritable();
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
示例11: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 4 && argOIs.length != 5) {
throw new UDFArgumentException("_FUNC_ takes 4 or 5 arguments: " + argOIs.length);
}
this.lat1OI = HiveUtils.asDoubleCompatibleOI(argOIs[0]);
this.lon1OI = HiveUtils.asDoubleCompatibleOI(argOIs[1]);
this.lat2OI = HiveUtils.asDoubleCompatibleOI(argOIs[2]);
this.lon2OI = HiveUtils.asDoubleCompatibleOI(argOIs[3]);
this.inMiles = (argOIs.length == 5) && HiveUtils.getConstBoolean(argOIs[4]);
this.result = new DoubleWritable();
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
示例12: init
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters) throws HiveException {
assert (parameters.length == 3);
super.init(mode, parameters);
// initialize input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data
this.wOI = HiveUtils.asDoubleCompatibleOI(parameters[0]);
this.vOI = HiveUtils.asListOI(parameters[1]);
this.vElemOI = HiveUtils.asDoubleCompatibleOI(vOI.getListElementObjectInspector());
this.xOI = HiveUtils.asDoubleCompatibleOI(parameters[2]);
} else {// from partial aggregation
StructObjectInspector soi = (StructObjectInspector) parameters[0];
this.internalMergeOI = soi;
this.retField = soi.getStructFieldRef("ret");
this.sumVjXjField = soi.getStructFieldRef("sumVjXj");
this.sumV2X2Field = soi.getStructFieldRef("sumV2X2");
this.retOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
this.sumVjXjOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
this.sumV2X2OI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
}
// initialize output
final ObjectInspector outputOI;
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial
outputOI = internalMergeOI();
} else {
outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
return outputOI;
}
示例13: merge
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
void merge(final double o_ret, @Nullable final Object o_sumVjXj,
@Nullable final Object o_sumV2X2,
@Nonnull final StandardListObjectInspector sumVjXjOI,
@Nonnull final StandardListObjectInspector sumV2X2OI) throws HiveException {
this.ret += o_ret;
if (o_sumVjXj == null) {
return;
}
if (o_sumV2X2 == null) {//sanity check
throw new HiveException("o_sumV2X2 should not be null");
}
final int factors = sumVjXjOI.getListLength(o_sumVjXj);
if (sumVjXj == null) {
this.sumVjXj = new double[factors];
this.sumV2X2 = new double[factors];
} else if (sumVjXj.length != factors) {//sanity check
throw new HiveException("Mismatch in the number of factors");
}
final WritableDoubleObjectInspector doubleOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
for (int f = 0; f < factors; f++) {
Object o1 = sumVjXjOI.getListElement(o_sumVjXj, f);
Object o2 = sumV2X2OI.getListElement(o_sumV2X2, f);
double d1 = doubleOI.get(o1);
double d2 = doubleOI.get(o2);
sumVjXj[f] += d1;
sumV2X2[f] += d2;
}
}
示例14: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException {
if (argOIs.length != 3 && argOIs.length != 4) {
throw new UDFArgumentException("_FUNC_ takes 3 or 4 arguments");
}
this.modelOI = HiveUtils.asStringOI(argOIs[1]);
ListObjectInspector listOI = HiveUtils.asListOI(argOIs[2]);
this.featureListOI = listOI;
ObjectInspector elemOI = listOI.getListElementObjectInspector();
if (HiveUtils.isNumberOI(elemOI)) {
this.featureElemOI = HiveUtils.asDoubleCompatibleOI(elemOI);
this.denseInput = true;
} else if (HiveUtils.isStringOI(elemOI)) {
this.featureElemOI = HiveUtils.asStringOI(elemOI);
this.denseInput = false;
} else {
throw new UDFArgumentException(
"_FUNC_ takes array<double> or array<string> for the second argument: "
+ listOI.getTypeName());
}
boolean classification = false;
if (argOIs.length == 4) {
classification = HiveUtils.getConstBoolean(argOIs[3]);
}
this.classification = classification;
if (classification) {
List<String> fieldNames = new ArrayList<String>(2);
List<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(2);
fieldNames.add("value");
fieldOIs.add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
fieldNames.add("posteriori");
fieldOIs.add(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector));
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
} else {
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
}
}
示例15: testTextDoubleWritable
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; //导入方法依赖的package包/类
@Test
public void testTextDoubleWritable() throws Exception {
ObjectInspector featureOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector weightOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
udf.initialize(new ObjectInspector[] {featureOI, weightOI});
Text ret = udf.evaluate(new GenericUDF.DeferredObject[] {
new DeferredJavaObject(new Text("f1")),
new DeferredJavaObject(new DoubleWritable(2.5d))});
Assert.assertEquals("f1:2.5", ret.toString());
}