本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.getDouble方法的典型用法代码示例。如果您正苦于以下问题:Java PrimitiveObjectInspectorUtils.getDouble方法的具体用法?Java PrimitiveObjectInspectorUtils.getDouble怎么用?Java PrimitiveObjectInspectorUtils.getDouble使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils
的用法示例。
在下文中一共展示了PrimitiveObjectInspectorUtils.getDouble方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: process
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public void process(@Nonnull Object[] args) throws HiveException {
if (args[0] == null) {
return;
}
// TODO: Need to support dense inputs
final List<?> features = (List<?>) featureListOI.getList(args[0]);
final String[] fv = new String[features.size()];
for (int i = 0; i < features.size(); i++) {
fv[i] = (String) featureElemOI.getPrimitiveJavaObject(features.get(i));
}
double target = PrimitiveObjectInspectorUtils.getDouble(args[1], this.targetOI);
checkTargetValue(target);
final LabeledPoint point = XGBoostUtils.parseFeatures(target, fv);
if (point != null) {
this.featuresList.add(point);
}
}
示例2: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public IntWritable evaluate(DeferredObject[] arguments) throws HiveException {
Object arg0 = arguments[0].get();
Object arg1 = arguments[1].get();
if (arg0 == null) {
return null;
}
if (arg1 == null) {
throw new UDFArgumentException("zoom level should not be null");
}
double lat = PrimitiveObjectInspectorUtils.getDouble(arg0, latOI);
int zoom = PrimitiveObjectInspectorUtils.getInt(arg1, zoomOI);
Preconditions.checkArgument(zoom >= 0, "Invalid zoom level", UDFArgumentException.class);
final int y;
try {
y = GeoSpatialUtils.lat2tiley(lat, zoom);
} catch (IllegalArgumentException ex) {
throw new UDFArgumentException(ex);
}
result.set(y);
return result;
}
示例3: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public Text evaluate(DeferredObject[] arguments) throws HiveException {
Object arg0 = arguments[0].get();
Object arg1 = arguments[1].get();
Object arg2 = arguments[2].get();
if (arg0 == null || arg1 == null) {
return null;
}
if (arg2 == null) {
throw new UDFArgumentException("zoom level is null");
}
double lat = PrimitiveObjectInspectorUtils.getDouble(arg0, latOI);
double lon = PrimitiveObjectInspectorUtils.getDouble(arg1, lonOI);
int zoom = PrimitiveObjectInspectorUtils.getInt(arg2, zoomOI);
result.set(toMapURL(lat, lon, zoom, type));
return result;
}
示例4: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public IntWritable evaluate(DeferredObject[] arguments) throws HiveException {
Object arg0 = arguments[0].get();
Object arg1 = arguments[1].get();
if (arg0 == null) {
return null;
}
if (arg1 == null) {
throw new UDFArgumentException("zoom level should not be null");
}
double lon = PrimitiveObjectInspectorUtils.getDouble(arg0, lonOI);
int zoom = PrimitiveObjectInspectorUtils.getInt(arg1, zoomOI);
Preconditions.checkArgument(zoom >= 0, "Invalid zoom level", UDFArgumentException.class);
final int x;
try {
x = GeoSpatialUtils.lon2tilex(lon, zoom);
} catch (IllegalArgumentException ex) {
throw new UDFArgumentException(ex);
}
result.set(x);
return result;
}
示例5: asDoubleArray
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Nullable
public static double[] asDoubleArray(@Nullable final Object argObj,
@Nonnull final ListObjectInspector listOI,
@Nonnull final PrimitiveObjectInspector elemOI, final boolean avoidNull)
throws UDFArgumentException {
if (argObj == null) {
return null;
}
final int length = listOI.getListLength(argObj);
final double[] ary = new double[length];
for (int i = 0; i < length; i++) {
Object o = listOI.getListElement(argObj, i);
if (o == null) {
if (avoidNull) {
continue;
}
throw new UDFArgumentException("Found null at index " + i);
}
ary[i] = PrimitiveObjectInspectorUtils.getDouble(o, elemOI);
}
return ary;
}
示例6: toDoubleArray
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Nonnull
public static void toDoubleArray(@Nullable final Object argObj,
@Nonnull final ListObjectInspector listOI,
@Nonnull final PrimitiveObjectInspector elemOI, @Nonnull final double[] out,
final boolean avoidNull) throws UDFArgumentException {
if (argObj == null) {
return;
}
final int length = listOI.getListLength(argObj);
if (out.length != length) {
throw new UDFArgumentException("Dimension mismatched. Expected: " + out.length
+ ", Actual: " + length);
}
for (int i = 0; i < length; i++) {
Object o = listOI.getListElement(argObj, i);
if (o == null) {
if (avoidNull) {
continue;
}
throw new UDFArgumentException("Found null at index " + i);
}
out[i] = PrimitiveObjectInspectorUtils.getDouble(o, elemOI);
}
return;
}
示例7: iterate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public void iterate(@SuppressWarnings("deprecation") AggregationBuffer agg,
Object[] parameters) throws HiveException {
if (parameters[0] == null) {
return;
}
FMPredictAggregationBuffer buf = (FMPredictAggregationBuffer) agg;
double w = PrimitiveObjectInspectorUtils.getDouble(parameters[0], wOI);
if (parameters[1] == null || /* for TD */vOI.getListLength(parameters[1]) == 0) {// Vif was null
buf.iterate(w);
} else {
if (parameters[2] == null) {
throw new UDFArgumentException("The third argument Xj must not be null");
}
double x = PrimitiveObjectInspectorUtils.getDouble(parameters[2], xOI);
buf.iterate(w, x, parameters[1], vOI, vElemOI);
}
}
示例8: update
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public void update(@Nonnull final Object arg, @Nonnull final double[] outScores)
throws HiveException {
double x = PrimitiveObjectInspectorUtils.getDouble(arg, oi);
// [Stage#1] Outlier Detection
xRing.add(x).toArray(xSeries, false /* LIFO */);
int k1 = xRing.size() - 1;
double x_hat = sdar1.update(xSeries, k1);
double scoreX = (k1 == 0.d) ? 0.d : loss(sdar1, x, x_hat, lossFunc1);
// smoothing
double y = ChangeFinderUDF.smoothing(outlierScores.add(scoreX));
// [Stage#2] Change-point Detection
yRing.add(y).toArray(ySeries, false /* LIFO */);
int k2 = yRing.size() - 1;
double y_hat = sdar2.update(ySeries, k2);
// <LogLoss>
double lossY = (k2 == 0.d) ? 0.d : loss(sdar2, y, y_hat, lossFunc2);
double scoreY = ChangeFinderUDF.smoothing(changepointScores.add(lossY));
outScores[0] = scoreX;
outScores[1] = scoreY;
}
示例9: iterate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
RfAggregationBufferV2 buf = (RfAggregationBufferV2) agg;
Preconditions.checkNotNull(parameters[0]);
int yhat = PrimitiveObjectInspectorUtils.getInt(parameters[0], yhatOI);
Preconditions.checkNotNull(parameters[1]);
double[] posteriori = HiveUtils.asDoubleArray(parameters[1], posterioriOI,
posterioriElemOI);
double weight = 1.0d;
if (parameters.length == 3) {
Preconditions.checkNotNull(parameters[2]);
weight = PrimitiveObjectInspectorUtils.getDouble(parameters[2], weightOI);
}
buf.iterate(yhat, weight, posteriori);
}
示例10: doIterate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
void doIterate(@Nonnull final Object tuple, @Nonnull ListObjectInspector listOI,
@Nonnull PrimitiveObjectInspector elemOI) throws HiveException {
final int size = listOI.getListLength(tuple);
if (_size == -1) {
init(size);
}
if (size != _size) {// a corner case
throw new HiveException("Mismatch in the number of elements at tuple: "
+ tuple.toString());
}
final double[] sum = _sum;
final long[] count = _count;
for (int i = 0, len = size; i < len; i++) {
Object o = listOI.getListElement(tuple, i);
if (o != null) {
double v = PrimitiveObjectInspectorUtils.getDouble(o, elemOI);
sum[i] += v;
count[i] += 1L;
}
}
}
示例11: iterate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@SuppressWarnings("deprecation")
@Override
// 迭代
// 只有保存当前的对象agg和输入的参数
public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
if (parameters == null) {
return;
}
try {
double flag = PrimitiveObjectInspectorUtils.getDouble(parameters[1], inputOI2);
if (flag > 1.0) {// 条件参数
merge(agg, parameters[0]);// 这里将迭代数据放入combiner进行合并
}
} catch (Exception e) {
e.printStackTrace();
log.warn("warn -> " + e.getMessage());
}
}
示例12: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public Object evaluate(DeferredObject[] args) throws HiveException {
double binSize = PrimitiveObjectInspectorUtils.getDouble(args[0].get(), oiBinSize);
if (!binSizeIsConstant || bins == null) {
bins = new BinUtils(binSize);
}
OGCPoint point = geomHelper.getPoint(args);
if (point == null) {
return null;
}
return bins.getId(point.X(), point.Y());
}
示例13: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public LongWritable evaluate(DeferredObject[] arguments) throws HiveException {
Object arg0 = arguments[0].get();
Object arg1 = arguments[1].get();
Object arg2 = arguments[2].get();
if (arg0 == null || arg1 == null) {
return null;
}
if (arg2 == null) {
throw new UDFArgumentException("zoom level is null");
}
double lat = PrimitiveObjectInspectorUtils.getDouble(arg0, latOI);
double lon = PrimitiveObjectInspectorUtils.getDouble(arg1, lonOI);
int zoom = PrimitiveObjectInspectorUtils.getInt(arg2, zoomOI);
Preconditions.checkArgument(zoom >= 0, "Invalid zoom level", UDFArgumentException.class);
final long tile;
try {
tile = GeoSpatialUtils.tile(lat, lon, zoom);
} catch (IllegalArgumentException ex) {
throw new UDFArgumentException(ex);
}
result.set(tile);
return result;
}
示例14: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public DoubleWritable evaluate(DeferredObject[] arguments) throws HiveException {
Object arg0 = arguments[0].get();
Object arg1 = arguments[1].get();
Object arg2 = arguments[2].get();
Object arg3 = arguments[3].get();
if (arg0 == null || arg1 == null || arg2 == null || arg3 == null) {
return null;
}
double lat1 = PrimitiveObjectInspectorUtils.getDouble(arg0, lat1OI);
double lon1 = PrimitiveObjectInspectorUtils.getDouble(arg1, lon1OI);
double lat2 = PrimitiveObjectInspectorUtils.getDouble(arg2, lat2OI);
double lon2 = PrimitiveObjectInspectorUtils.getDouble(arg3, lon2OI);
final double distance;
try {
distance = GeoSpatialUtils.haversineDistance(lat1, lon1, lat2, lon2);
} catch (IllegalArgumentException ex) {
throw new UDFArgumentException(ex);
}
if (inMiles) {
double miles = distance / 1.609344d;
result.set(miles);
} else {
result.set(distance);
}
return result;
}
示例15: getConstDoubleArray
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Nullable
public static double[] getConstDoubleArray(@Nonnull final ObjectInspector oi)
throws UDFArgumentException {
if (!ObjectInspectorUtils.isConstantObjectInspector(oi)) {
throw new UDFArgumentException("argument must be a constant value: "
+ TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
}
ConstantObjectInspector constOI = (ConstantObjectInspector) oi;
if (constOI.getCategory() != Category.LIST) {
throw new UDFArgumentException("argument must be an array: "
+ TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
}
StandardConstantListObjectInspector listOI = (StandardConstantListObjectInspector) constOI;
PrimitiveObjectInspector elemOI = HiveUtils.asDoubleCompatibleOI(listOI.getListElementObjectInspector());
final List<?> lst = listOI.getWritableConstantValue();
if (lst == null) {
return null;
}
final int size = lst.size();
final double[] ary = new double[size];
for (int i = 0; i < size; i++) {
Object o = lst.get(i);
if (o == null) {
ary[i] = Double.NaN;
} else {
ary[i] = PrimitiveObjectInspectorUtils.getDouble(o, elemOI);
}
}
return ary;
}