本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector类的典型用法代码示例。如果您正苦于以下问题:Java DoubleObjectInspector类的具体用法?Java DoubleObjectInspector怎么用?Java DoubleObjectInspector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
DoubleObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector.primitive包,在下文中一共展示了DoubleObjectInspector类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: merge
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
void merge(int size, @Nonnull Object posterioriObj,
@Nonnull StandardListObjectInspector posterioriOI) throws HiveException {
if (size != _k) {
if (_k == -1) {
this._k = size;
this._posteriori = new double[size];
} else {
throw new HiveException("Mismatch in the number of elements: _k=" + _k
+ ", size=" + size);
}
}
final double[] posteriori = _posteriori;
final DoubleObjectInspector doubleOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
for (int i = 0, len = _k; i < len; i++) {
Object o2 = posterioriOI.getListElement(posterioriObj, i);
posteriori[i] += doubleOI.get(o2);
}
}
示例2: createPrimitive
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
private static Writable createPrimitive(Object obj, PrimitiveObjectInspector inspector)
throws SerDeException {
if (obj == null) {
return null;
}
switch (inspector.getPrimitiveCategory()) {
case DOUBLE:
return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
case FLOAT:
return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
case INT:
return new IntWritable(((IntObjectInspector) inspector).get(obj));
case LONG:
return new LongWritable(((LongObjectInspector) inspector).get(obj));
case STRING:
return new Text(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj));
case DATE:
return ((DateObjectInspector) inspector).getPrimitiveWritableObject(obj);
case TIMESTAMP:
return ((TimestampObjectInspector) inspector).getPrimitiveWritableObject(obj);
default:
throw new SerDeException("Can't serialize primitive : " + inspector.getPrimitiveCategory());
}
}
示例3: get
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
public static IHivePrimitiveConverter get( final ObjectInspector objectInspector ){
switch( objectInspector.getCategory() ){
case PRIMITIVE:
PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)objectInspector;
switch( primitiveInspector.getPrimitiveCategory() ){
case BINARY:
return new HiveBytesPrimitiveConverter( (BinaryObjectInspector)objectInspector );
case BOOLEAN:
return new HiveBooleanPrimitiveConverter( (BooleanObjectInspector)objectInspector );
case BYTE:
return new HiveBytePrimitiveConverter( (ByteObjectInspector)objectInspector );
case DOUBLE:
return new HiveDoublePrimitiveConverter( (DoubleObjectInspector)objectInspector );
case FLOAT:
return new HiveFloatPrimitiveConverter( (FloatObjectInspector)objectInspector );
case INT:
return new HiveIntegerPrimitiveConverter( (IntObjectInspector)objectInspector );
case LONG:
return new HiveLongPrimitiveConverter( (LongObjectInspector)objectInspector );
case SHORT:
return new HiveShortPrimitiveConverter( (ShortObjectInspector)objectInspector );
case STRING:
return new HiveStringPrimitiveConverter( (StringObjectInspector)objectInspector );
case DATE:
case TIMESTAMP:
case VOID:
case UNKNOWN:
default:
return new HiveDefaultPrimitiveConverter();
}
default :
return new HiveDefaultPrimitiveConverter();
}
}
示例4: getNumber
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
public String getNumber(Object data, ObjectInspector objectInspector) {
if (objectInspector instanceof DoubleObjectInspector) {
return Double.toString(((DoubleObjectInspector) objectInspector).get(data));
} else if (objectInspector instanceof LongObjectInspector) {
return Long.toString(((LongObjectInspector) objectInspector).get(data));
} else {
throw new RuntimeException("Unknown object inspector type: " + objectInspector.getCategory()
+ " Type name: " + objectInspector.getTypeName());
}
}
示例5: asDoubleOI
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
@Nonnull
public static DoubleObjectInspector asDoubleOI(@Nonnull final ObjectInspector argOI)
throws UDFArgumentException {
if (!DOUBLE_TYPE_NAME.equals(argOI.getTypeName())) {
throw new UDFArgumentException("Argument type must be DOUBLE: " + argOI.getTypeName());
}
return (DoubleObjectInspector) argOI;
}
示例6: createPrimitive
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
private Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
throws SerDeException {
if (obj == null) {
return null;
}
switch (inspector.getPrimitiveCategory()) {
case VOID:
return null;
case BOOLEAN:
return new BooleanWritable(((BooleanObjectInspector) inspector).get(obj) ? Boolean.TRUE : Boolean.FALSE);
case BYTE:
return new ByteWritable((byte) ((ByteObjectInspector) inspector).get(obj));
case DOUBLE:
return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
case FLOAT:
return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
case INT:
return new IntWritable(((IntObjectInspector) inspector).get(obj));
case LONG:
return new LongWritable(((LongObjectInspector) inspector).get(obj));
case SHORT:
return new ShortWritable((short) ((ShortObjectInspector) inspector).get(obj));
case STRING:
return new BinaryWritable(Binary.fromString(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj)));
default:
throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
}
}
示例7: write
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
@Override
void write(Object obj) throws IOException {
super.write(obj, RawDatasizeConst.DOUBLE_SIZE);
if (obj != null) {
double val = ((DoubleObjectInspector) inspector).get(obj);
indexStatistics.updateDouble(val);
SerializationUtils.writeDouble(stream, val);
}
}
示例8: TestCopyDouble
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
/**
* Tests that after copying a lazy double object, calling materialize on the original and the
* copy doesn't advance the tree reader twice
* @throws Exception
*/
@Test
public void TestCopyDouble() throws Exception {
ReaderWriterProfiler.setProfilerOptions(null);
OrcLazyDouble lazyDouble = new OrcLazyDouble(new LazyDoubleTreeReader(0, 0) {
int nextCalls = 0;
@Override
public Object next(Object previous) throws IOException {
if (nextCalls == 0) {
return new DoubleWritable(1.0);
}
throw new IOException("next should only be called once");
}
@Override
protected boolean seekToRow(long currentRow) throws IOException {
return true;
}
});
DoubleObjectInspector doubleOI = (DoubleObjectInspector)
OrcLazyObjectInspectorUtils.createLazyObjectInspector(TypeInfoFactory.doubleTypeInfo);
OrcLazyDouble lazyDouble2 = (OrcLazyDouble) doubleOI.copyObject(lazyDouble);
Assert.assertEquals(1.0, ((DoubleWritable) lazyDouble.materialize()).get());
Assert.assertEquals(1.0, ((DoubleWritable) lazyDouble2.materialize()).get());
}
示例9: setSafeValue
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
@Override
public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
final double value = (double) ((DoubleObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue);
((NullableFloat8Vector) outputVV).getMutator().setSafe(outputIndex, value);
}
示例10: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null || arguments[0].get() == null) {
return null;
}
Object input = arguments[0].get();
switch(inputType) {
case BOOLEAN:
return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
case BYTE:
return new Byte(((ByteObjectInspector)argumentOI).get(input));
case SHORT:
return new Short(((ShortObjectInspector)argumentOI).get(input));
case INT:
return new Integer(((IntObjectInspector)argumentOI).get(input));
case LONG:
return new Long(((LongObjectInspector)argumentOI).get(input));
case FLOAT:
return new Float(((FloatObjectInspector)argumentOI).get(input));
case DOUBLE:
return new Double(((DoubleObjectInspector)argumentOI).get(input));
case STRING:
return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
case BINARY:
return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
case VARCHAR:
return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
case DATE:
return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
case TIMESTAMP:
return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
case DECIMAL:
// return type is a HiveVarchar
HiveDecimal decimalValue =
PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
}
throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
示例11: HiveDoublePrimitiveConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
public HiveDoublePrimitiveConverter( final DoubleObjectInspector inspector ){
this.inspector = inspector;
}
示例12: OrcDoublePrimitiveConverter
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
public OrcDoublePrimitiveConverter( final DoubleObjectInspector inspector ){
this.inspector = inspector;
}
示例13: asDouble
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
public double asDouble() {
return ((DoubleObjectInspector) oi).get(obj);
}
示例14: evaluate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (arguments[0] == null || arguments[0].get() == null) {
return null;
}
Object input = arguments[0].get();
switch(inputType) {
case BOOLEAN:
return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
case BYTE:
return new Byte(((ByteObjectInspector)argumentOI).get(input));
case SHORT:
return new Short(((ShortObjectInspector)argumentOI).get(input));
case INT:
return new Integer(((IntObjectInspector)argumentOI).get(input));
case LONG:
return new Long(((LongObjectInspector)argumentOI).get(input));
case FLOAT:
return new Float(((FloatObjectInspector)argumentOI).get(input));
case DOUBLE:
return new Double(((DoubleObjectInspector)argumentOI).get(input));
case STRING:
return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
case BINARY:
return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
case VARCHAR:
if (outputType == PrimitiveCategory.CHAR) {
HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector) argumentOI);
return new HiveChar(hiveVarchar.getValue(), HiveChar.MAX_CHAR_LENGTH);
} else {
return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
}
case CHAR:
return PrimitiveObjectInspectorUtils.getHiveChar(input, (HiveCharObjectInspector) argumentOI);
case DATE:
return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
case TIMESTAMP:
return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
case DECIMAL:
// return type is a HiveVarchar
HiveDecimal decimalValue =
PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
}
throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
示例15: testClassification
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; //导入依赖的package包/类
@Test
public void testClassification() throws HiveException {
final int ROW = 10, COL = 40;
FactorizationMachineUDTF udtf = new FactorizationMachineUDTF();
ListObjectInspector xOI = ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
DoubleObjectInspector yOI = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
ObjectInspector paramOI = ObjectInspectorUtils.getConstantObjectInspector(
PrimitiveObjectInspectorFactory.javaStringObjectInspector,
"-adareg -factors 20 -classification -seed 31 -iters 10 -int_feature -p " + COL);
udtf.initialize(new ObjectInspector[] {xOI, yOI, paramOI});
FactorizationMachineModel model = udtf.initModel(udtf._params);
Assert.assertTrue("Actual class: " + model.getClass().getName(),
model instanceof FMArrayModel);
float accuracy = 0.f;
final Random rnd = new Random(201L);
for (int numberOfIteration = 0; numberOfIteration < 10000; numberOfIteration++) {
ArrayList<IntFeature[]> fArrayList = new ArrayList<IntFeature[]>();
ArrayList<Double> ans = new ArrayList<Double>();
for (int i = 0; i < ROW; i++) {
ArrayList<IntFeature> feature = new ArrayList<IntFeature>();
for (int j = 1; j <= COL; j++) {
if (i < (0.5f * ROW)) {
if (j == 1) {
feature.add(new IntFeature(j, 1.d));
} else if (j < 0.5 * COL) {
if (rnd.nextFloat() < 0.2f) {
feature.add(new IntFeature(j, rnd.nextDouble()));
}
}
} else {
if (j > 0.5f * COL) {
if (rnd.nextFloat() < 0.2f) {
feature.add(new IntFeature(j, rnd.nextDouble()));
}
}
}
}
IntFeature[] x = new IntFeature[feature.size()];
feature.toArray(x);
fArrayList.add(x);
final double y;
if (i < ROW * 0.5f) {
y = -1.0d;
} else {
y = 1.0d;
}
ans.add(y);
udtf.process(new Object[] {toStringArray(x), y});
}
int bingo = 0;
int total = fArrayList.size();
for (int i = 0; i < total; i++) {
double tmpAns = ans.get(i);
if (tmpAns < 0) {
tmpAns = 0;
} else {
tmpAns = 1;
}
double p = model.predict(fArrayList.get(i));
int predicted = p > 0.5 ? 1 : 0;
if (predicted == tmpAns) {
bingo++;
}
}
accuracy = bingo / (float) total;
println("Accuracy = " + accuracy);
}
udtf.runTrainingIteration(10);
Assert.assertTrue("Expected accuracy greather than 0.95f: " + accuracy, accuracy > 0.95f);
}