本文整理汇总了Java中org.apache.hadoop.hive.serde2.io.DoubleWritable.get方法的典型用法代码示例。如果您正苦于以下问题:Java DoubleWritable.get方法的具体用法?Java DoubleWritable.get怎么用?Java DoubleWritable.get使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.io.DoubleWritable
的用法示例。
在下文中一共展示了DoubleWritable.get方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: iterate
import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入方法依赖的package包/类
public boolean iterate(@Nullable DoubleWritable o) {
if (o == null) {
return true;
}
if (partial == null) {
this.partial = new PartialResult();
partial.init();
}
double w = o.get();
if (w > 0) {
partial.positiveSum += w;
partial.positiveCnt++;
} else if (w < 0) {
partial.negativeSum += w;
partial.negativeCnt++;
}
return true;
}
示例2: asJavaDouble
import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入方法依赖的package包/类
public static double asJavaDouble(@Nullable final Object o) {
if (o == null) {
throw new IllegalArgumentException();
}
if (o instanceof Double) {
return ((Double) o).doubleValue();
}
if (o instanceof LazyDouble) {
DoubleWritable d = ((LazyDouble) o).getWritableObject();
return d.get();
}
if (o instanceof DoubleWritable) {
return ((DoubleWritable) o).get();
}
String s = o.toString();
return Double.parseDouble(s);
}
示例3: evalPredict
import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入方法依赖的package包/类
private static double evalPredict(RegressionTree tree, double[] x) throws HiveException,
IOException {
String opScript = tree.predictOpCodegen(StackMachine.SEP);
debugPrint(opScript);
TreePredictUDFv1 udf = new TreePredictUDFv1();
udf.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector),
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, false)});
DeferredObject[] arguments = new DeferredObject[] {new DeferredJavaObject("model_id#1"),
new DeferredJavaObject(ModelType.opscode.getId()),
new DeferredJavaObject(opScript), new DeferredJavaObject(ArrayUtils.toList(x)),
new DeferredJavaObject(false)};
DoubleWritable result = (DoubleWritable) udf.evaluate(arguments);
udf.close();
return result.get();
}
示例4: evalPredict
import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入方法依赖的package包/类
private static double evalPredict(RegressionTree tree, double[] x) throws HiveException,
IOException {
byte[] b = tree.serialize(true);
byte[] encoded = Base91.encode(b);
Text model = new Text(encoded);
TreePredictUDF udf = new TreePredictUDF();
udf.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector),
ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, false)});
DeferredObject[] arguments = new DeferredObject[] {new DeferredJavaObject("model_id#1"),
new DeferredJavaObject(model), new DeferredJavaObject(ArrayUtils.toList(x)),
new DeferredJavaObject(false)};
DoubleWritable result = (DoubleWritable) udf.evaluate(arguments);
udf.close();
return result.get();
}
示例5: evaluate
import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入方法依赖的package包/类
@Nonnull
public DoubleWritable evaluate(@Nullable List<FloatWritable> Pu,
@Nullable List<FloatWritable> Qi, @Nullable DoubleWritable mu) throws HiveException {
final double muValue = (mu == null) ? 0.d : mu.get();
if (Pu == null || Qi == null) {
return new DoubleWritable(muValue);
}
final int PuSize = Pu.size();
final int QiSize = Qi.size();
// workaround for TD
if (PuSize == 0) {
return new DoubleWritable(muValue);
} else if (QiSize == 0) {
return new DoubleWritable(muValue);
}
if (QiSize != PuSize) {
throw new HiveException("|Pu| " + PuSize + " was not equal to |Qi| " + QiSize);
}
double ret = muValue;
for (int k = 0; k < PuSize; k++) {
FloatWritable Pu_k = Pu.get(k);
if (Pu_k == null) {
continue;
}
FloatWritable Qi_k = Qi.get(k);
if (Qi_k == null) {
continue;
}
ret += Pu_k.get() * Qi_k.get();
}
return new DoubleWritable(ret);
}
示例6: merge
import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public void merge(@SuppressWarnings("deprecation") AggregationBuffer agg, Object partial)
throws HiveException {
if (partial == null) {
return;
}
AggrBuffer aggr = (AggrBuffer) agg;
DoubleWritable other = (DoubleWritable) partial;
double v = other.get();
aggr.merge(v);
}