当前位置: 首页>>代码示例>>Java>>正文


Java DoubleWritable类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.io.DoubleWritable的典型用法代码示例。如果您正苦于以下问题:Java DoubleWritable类的具体用法?Java DoubleWritable怎么用?Java DoubleWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


DoubleWritable类属于org.apache.hadoop.hive.serde2.io包,在下文中一共展示了DoubleWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: evaluate

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Override
public DoubleWritable evaluate(DeferredObject[] arguments) throws HiveException {
    Object arg0 = arguments[0].get();
    Object arg1 = arguments[1].get();

    if (arg0 == null) {
        return null;
    }
    if (arg1 == null) {
        throw new UDFArgumentException("zoom level should not be null");
    }

    int y = PrimitiveObjectInspectorUtils.getInt(arg0, yOI);
    int zoom = PrimitiveObjectInspectorUtils.getInt(arg1, zoomOI);
    Preconditions.checkArgument(zoom >= 0, "Invalid zoom level", UDFArgumentException.class);

    final double lat;
    try {
        lat = GeoSpatialUtils.tiley2lat(y, zoom);
    } catch (IllegalArgumentException ex) {
        throw new UDFArgumentException(ex);
    }

    result.set(lat);
    return result;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:27,代码来源:TileY2LatUDF.java

示例2: evaluate

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Override
public DoubleWritable evaluate(DeferredObject[] arguments) throws HiveException {
    Object arg0 = arguments[0].get();
    Object arg1 = arguments[1].get();

    if (arg0 == null) {
        return null;
    }
    if (arg1 == null) {
        throw new UDFArgumentException("zoom level should not be null");
    }

    int x = PrimitiveObjectInspectorUtils.getInt(arg0, xOI);
    int zoom = PrimitiveObjectInspectorUtils.getInt(arg1, zoomOI);
    Preconditions.checkArgument(zoom >= 0, "Invalid zoom level", UDFArgumentException.class);

    final double lon;
    try {
        lon = GeoSpatialUtils.tilex2lon(x, zoom);
    } catch (IllegalArgumentException ex) {
        throw new UDFArgumentException(ex);
    }

    result.set(lon);
    return result;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:27,代码来源:TileX2LonUDF.java

示例3: iterate

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
public boolean iterate(@Nullable DoubleWritable o) {
    if (o == null) {
        return true;
    }
    if (partial == null) {
        this.partial = new PartialResult();
        partial.init();
    }
    double w = o.get();
    if (w > 0) {
        partial.positiveSum += w;
        partial.positiveCnt++;
    } else if (w < 0) {
        partial.negativeSum += w;
        partial.negativeCnt++;
    }
    return true;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:19,代码来源:WeightVotedAvgUDAF.java

示例4: terminatePartial

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Override
public Object terminatePartial(AggregationBuffer agg) throws HiveException {
    ClassificationAUCAggregationBuffer myAggr = (ClassificationAUCAggregationBuffer) agg;

    Object[] partialResult = new Object[11];
    partialResult[0] = new DoubleWritable(myAggr.indexScore);
    partialResult[1] = new DoubleWritable(myAggr.area);
    partialResult[2] = new LongWritable(myAggr.fp);
    partialResult[3] = new LongWritable(myAggr.tp);
    partialResult[4] = new LongWritable(myAggr.fpPrev);
    partialResult[5] = new LongWritable(myAggr.tpPrev);
    partialResult[6] = myAggr.areaPartialMap;
    partialResult[7] = myAggr.fpPartialMap;
    partialResult[8] = myAggr.tpPartialMap;
    partialResult[9] = myAggr.fpPrevPartialMap;
    partialResult[10] = myAggr.tpPrevPartialMap;

    return partialResult;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:20,代码来源:AUCUDAF.java

示例5: asJavaDouble

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
public static double asJavaDouble(@Nullable final Object o) {
    if (o == null) {
        throw new IllegalArgumentException();
    }
    if (o instanceof Double) {
        return ((Double) o).doubleValue();
    }
    if (o instanceof LazyDouble) {
        DoubleWritable d = ((LazyDouble) o).getWritableObject();
        return d.get();
    }
    if (o instanceof DoubleWritable) {
        return ((DoubleWritable) o).get();
    }
    String s = o.toString();
    return Double.parseDouble(s);
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:18,代码来源:HiveUtils.java

示例6: newDoubleList

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Nonnull
public static List<DoubleWritable> newDoubleList(final int size, final double defaultValue) {
    // workaround to avoid a bug in Kryo
    // https://issues.apache.org/jira/browse/HIVE-12551
    /*
    final DoubleWritable[] array = new DoubleWritable[size];
    for (int i = 0; i < size; i++) {
        array[i] = new DoubleWritable(defaultValue);
    }
    return Arrays.asList(array);
    */
    final List<DoubleWritable> list = new ArrayList<DoubleWritable>(size);
    for (int i = 0; i < size; i++) {
        list.add(new DoubleWritable(defaultValue));
    }
    return list;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:18,代码来源:WritableUtils.java

示例7: toWritableList

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Nonnull
public static List<DoubleWritable> toWritableList(@Nonnull final double[] src) {
    // workaround to avoid a bug in Kryo
    // https://issues.apache.org/jira/browse/HIVE-12551
    /*
    final DoubleWritable[] writables = new DoubleWritable[src.length];
    for (int i = 0; i < src.length; i++) {
        writables[i] = new DoubleWritable(src[i]);
    }
    return Arrays.asList(writables);
    */
    final List<DoubleWritable> list = new ArrayList<DoubleWritable>(src.length);
    for (int i = 0; i < src.length; i++) {
        list.add(new DoubleWritable(src[i]));
    }
    return list;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:18,代码来源:WritableUtils.java

示例8: evaluate

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Nonnull
public DoubleWritable evaluate(@Nonnull final String modelId, @Nonnull final Text script,
        @Nonnull final Vector features) throws HiveException {
    if (!modelId.equals(prevModelId)) {
        this.prevModelId = modelId;
        int length = script.getLength();
        byte[] b = script.getBytes();
        b = Base91.decode(b, 0, length);
        this.rNode = RegressionTree.deserialize(b, b.length, true);
    }
    Preconditions.checkNotNull(rNode);

    double value = rNode.predict(features);
    result.set(value);
    return result;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:17,代码来源:TreePredictUDF.java

示例9: terminate

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Override
public Object terminate(AggregationBuffer agg) throws HiveException {
    RfAggregationBufferV2 buf = (RfAggregationBufferV2) agg;
    if (buf._k == -1) {
        return null;
    }

    double[] posteriori = buf._posteriori;
    int label = smile.math.Math.whichMax(posteriori);
    smile.math.Math.unitize1(posteriori);
    double proba = posteriori[label];

    Object[] result = new Object[3];
    result[0] = new IntWritable(label);
    result[1] = new DoubleWritable(proba);
    result[2] = WritableUtils.toWritableList(posteriori);
    return result;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:19,代码来源:RandomForestEnsembleUDAF.java

示例10: terminatePartial

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Override
public Object terminatePartial(@SuppressWarnings("deprecation") AggregationBuffer agg)
        throws HiveException {
    final SignalNoiseRatioAggregationBuffer myAgg = (SignalNoiseRatioAggregationBuffer) agg;

    final Object[] partialResult = new Object[3];
    partialResult[0] = WritableUtils.toWritableList(myAgg.counts);
    final List<List<DoubleWritable>> means = new ArrayList<List<DoubleWritable>>();
    for (double[] mean : myAgg.means) {
        means.add(WritableUtils.toWritableList(mean));
    }
    partialResult[1] = means;
    final List<List<DoubleWritable>> variances = new ArrayList<List<DoubleWritable>>();
    for (double[] variance : myAgg.variances) {
        variances.add(WritableUtils.toWritableList(variance));
    }
    partialResult[2] = variances;
    return partialResult;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:20,代码来源:SignalNoiseRatioUDAF.java

示例11: serialize

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
/**
 * In preparation for a Hive merge() call, serializes the current histogram object into an
 * ArrayList of DoubleWritable objects. This list is deserialized and merged by the merge
 * method.
 *
 * @return An ArrayList of Hadoop DoubleWritable objects that represents the current histogram.
 * @see #merge
 */
public ArrayList<DoubleWritable> serialize() {
    ArrayList<DoubleWritable> result = new ArrayList<DoubleWritable>();

    // Return a single ArrayList where the first element is the number of bins bins,
    // and subsequent elements represent bins (x,y) pairs.
    result.add(new DoubleWritable(nbins));
    if (bins != null) {
        for (int i = 0; i < nusedbins; i++) {
            result.add(new DoubleWritable(bins.get(i).x));
            result.add(new DoubleWritable(bins.get(i).y));
        }
    }

    return result;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:24,代码来源:NumericHistogram.java

示例12: terminatePartial

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Override
public Object terminatePartial(@SuppressWarnings("deprecation") AggregationBuffer agg)
        throws HiveException {
    PLSAPredictAggregationBuffer myAggr = (PLSAPredictAggregationBuffer) agg;
    if (myAggr.wcList.size() == 0) {
        return null;
    }

    Object[] partialResult = new Object[5];
    partialResult[0] = myAggr.wcList;
    partialResult[1] = myAggr.probMap;
    partialResult[2] = new IntWritable(myAggr.topics);
    partialResult[3] = new FloatWritable(myAggr.alpha);
    partialResult[4] = new DoubleWritable(myAggr.delta);

    return partialResult;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:18,代码来源:PLSAPredictUDAF.java

示例13: terminatePartial

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Override
public Object terminatePartial(@SuppressWarnings("deprecation") AggregationBuffer agg)
        throws HiveException {
    OnlineLDAPredictAggregationBuffer myAggr = (OnlineLDAPredictAggregationBuffer) agg;
    if (myAggr.wcList.size() == 0) {
        return null;
    }

    Object[] partialResult = new Object[5];
    partialResult[0] = myAggr.wcList;
    partialResult[1] = myAggr.lambdaMap;
    partialResult[2] = new IntWritable(myAggr.topics);
    partialResult[3] = new FloatWritable(myAggr.alpha);
    partialResult[4] = new DoubleWritable(myAggr.delta);

    return partialResult;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:18,代码来源:LDAPredictUDAF.java

示例14: testKilometers1

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Test
public void testKilometers1() throws HiveException, IOException {
    HaversineDistanceUDF udf = new HaversineDistanceUDF();
    udf.initialize(new ObjectInspector[] {
            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector});

    // Tokyo
    double lat1 = 35.6833d, lon1 = 139.7667d;
    // Osaka
    double lat2 = 34.6603d, lon2 = 135.5232d;

    DoubleWritable result1 = udf.evaluate(new DeferredObject[] {new DeferredJavaObject(lat1),
            new DeferredJavaObject(lon1), new DeferredJavaObject(lat2),
            new DeferredJavaObject(lon2)});
    Assert.assertEquals(402.092d, result1.get(), 0.001d);

    udf.close();
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:22,代码来源:HaversineDistanceUDFTest.java

示例15: testKilometers2

import org.apache.hadoop.hive.serde2.io.DoubleWritable; //导入依赖的package包/类
@Test
public void testKilometers2() throws HiveException, IOException {
    HaversineDistanceUDF udf = new HaversineDistanceUDF();
    udf.initialize(new ObjectInspector[] {
            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
            PrimitiveObjectInspectorFactory.javaDoubleObjectInspector,
            ObjectInspectorUtils.getConstantObjectInspector(
                PrimitiveObjectInspectorFactory.javaBooleanObjectInspector, false)});

    // Tokyo
    double lat1 = 35.6833d, lon1 = 139.7667d;
    // Osaka
    double lat2 = 34.6603d, lon2 = 135.5232d;

    DoubleWritable result1 = udf.evaluate(new DeferredObject[] {new DeferredJavaObject(lat1),
            new DeferredJavaObject(lon1), new DeferredJavaObject(lat2),
            new DeferredJavaObject(lon2)});
    Assert.assertEquals(402.092d, result1.get(), 0.001d);

    udf.close();
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:24,代码来源:HaversineDistanceUDFTest.java


注:本文中的org.apache.hadoop.hive.serde2.io.DoubleWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。