当前位置: 首页>>代码示例>>Java>>正文


Java FloatObjectInspector类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector的典型用法代码示例。如果您正苦于以下问题:Java FloatObjectInspector类的具体用法?Java FloatObjectInspector怎么用?Java FloatObjectInspector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


FloatObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector.primitive包,在下文中一共展示了FloatObjectInspector类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: createPrimitive

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
private static Writable createPrimitive(Object obj, PrimitiveObjectInspector inspector)
        throws SerDeException {
    if (obj == null) {
        return null;
    }
    switch (inspector.getPrimitiveCategory()) {
        case DOUBLE:
            return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
        case FLOAT:
            return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
        case INT:
            return new IntWritable(((IntObjectInspector) inspector).get(obj));
        case LONG:
            return new LongWritable(((LongObjectInspector) inspector).get(obj));
        case STRING:
            return new Text(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj));
        case DATE:
            return ((DateObjectInspector) inspector).getPrimitiveWritableObject(obj);
        case TIMESTAMP:
            return ((TimestampObjectInspector) inspector).getPrimitiveWritableObject(obj);
        default:
            throw new SerDeException("Can't serialize primitive : " + inspector.getPrimitiveCategory());
    }
}
 
开发者ID:shunfei,项目名称:indexr,代码行数:25,代码来源:IndexRSerde.java

示例2: get

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
public static IHivePrimitiveConverter get( final ObjectInspector objectInspector ){

    switch( objectInspector.getCategory() ){
      case PRIMITIVE:
        PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)objectInspector;
        switch( primitiveInspector.getPrimitiveCategory() ){
          case BINARY:
            return new HiveBytesPrimitiveConverter( (BinaryObjectInspector)objectInspector );
          case BOOLEAN:
            return new HiveBooleanPrimitiveConverter( (BooleanObjectInspector)objectInspector );
          case BYTE:
            return new HiveBytePrimitiveConverter( (ByteObjectInspector)objectInspector );
          case DOUBLE:
            return new HiveDoublePrimitiveConverter( (DoubleObjectInspector)objectInspector );
          case FLOAT:
            return new HiveFloatPrimitiveConverter( (FloatObjectInspector)objectInspector );
          case INT:
            return new HiveIntegerPrimitiveConverter( (IntObjectInspector)objectInspector );
          case LONG:
            return new HiveLongPrimitiveConverter( (LongObjectInspector)objectInspector );
          case SHORT:
            return new HiveShortPrimitiveConverter( (ShortObjectInspector)objectInspector );
          case STRING:
            return new HiveStringPrimitiveConverter( (StringObjectInspector)objectInspector );
          case DATE:
          case TIMESTAMP:
          case VOID:
          case UNKNOWN:
          default:
            return new HiveDefaultPrimitiveConverter();
        }
      default :
        return new HiveDefaultPrimitiveConverter();
    }
  }
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:36,代码来源:HivePrimitiveConverterFactory.java

示例3: createPrimitive

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
private Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
    throws SerDeException {
  if (obj == null) {
    return null;
  }
  switch (inspector.getPrimitiveCategory()) {
  case VOID:
    return null;
  case BOOLEAN:
    return new BooleanWritable(((BooleanObjectInspector) inspector).get(obj) ? Boolean.TRUE : Boolean.FALSE);
  case BYTE:
    return new ByteWritable((byte) ((ByteObjectInspector) inspector).get(obj));
  case DOUBLE:
    return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
  case FLOAT:
    return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
  case INT:
    return new IntWritable(((IntObjectInspector) inspector).get(obj));
  case LONG:
    return new LongWritable(((LongObjectInspector) inspector).get(obj));
  case SHORT:
    return new ShortWritable((short) ((ShortObjectInspector) inspector).get(obj));
  case STRING:
    return new BinaryWritable(Binary.fromString(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj)));
  default:
    throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
  }
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:29,代码来源:ParquetHiveSerDe.java

示例4: write

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
@Override
void write(Object obj) throws IOException {
  super.write(obj, RawDatasizeConst.FLOAT_SIZE);
  if (obj != null) {
    float val = ((FloatObjectInspector) inspector).get(obj);
    indexStatistics.updateDouble(val);
    SerializationUtils.writeFloat(stream, val);
  }
}
 
开发者ID:facebookarchive,项目名称:hive-dwrf,代码行数:10,代码来源:WriterImpl.java

示例5: TestCopyFloat

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
/**
 * Tests that after copying a lazy float object, calling materialize on the original and the
 * copy doesn't advance the tree reader twice
 * @throws Exception
 */
@Test
public void TestCopyFloat() throws Exception {
  ReaderWriterProfiler.setProfilerOptions(null);
  OrcLazyFloat lazyFloat = new OrcLazyFloat(new LazyFloatTreeReader(0, 0) {
    int nextCalls = 0;

    @Override
    public Object next(Object previous) throws IOException {
      if (nextCalls == 0) {
        return new FloatWritable(1.0f);
      }

      throw new IOException("next should only be called once");
    }

    @Override
    protected boolean seekToRow(long currentRow) throws IOException {
      return true;
    }
  });

  FloatObjectInspector floatOI = (FloatObjectInspector)
      OrcLazyObjectInspectorUtils.createLazyObjectInspector(TypeInfoFactory.floatTypeInfo);

  OrcLazyFloat lazyFloat2 = (OrcLazyFloat) floatOI.copyObject(lazyFloat);

  Assert.assertEquals(1.0f, ((FloatWritable) lazyFloat.materialize()).get());
  Assert.assertEquals(1.0f, ((FloatWritable) lazyFloat2.materialize()).get());
}
 
开发者ID:facebookarchive,项目名称:hive-dwrf,代码行数:35,代码来源:TestObjectInspector.java

示例6: setSafeValue

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
@Override
public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
  final float value = (float) ((FloatObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue);
  ((NullableFloat4Vector) outputVV).getMutator().setSafe(outputIndex, value);
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:6,代码来源:HiveFieldConverter.java

示例7: evaluate

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (arguments[0] == null || arguments[0].get() == null) {
    return null;
  }

  Object input = arguments[0].get();
  switch(inputType) {
    case BOOLEAN:
      return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
    case BYTE:
      return new Byte(((ByteObjectInspector)argumentOI).get(input));
    case SHORT:
      return new Short(((ShortObjectInspector)argumentOI).get(input));
    case INT:
      return new Integer(((IntObjectInspector)argumentOI).get(input));
    case LONG:
      return new Long(((LongObjectInspector)argumentOI).get(input));
    case FLOAT:
      return new Float(((FloatObjectInspector)argumentOI).get(input));
    case DOUBLE:
      return new Double(((DoubleObjectInspector)argumentOI).get(input));
    case STRING:
      return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
    case BINARY:
      return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
    case VARCHAR:
      return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
    case DATE:
      return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
    case TIMESTAMP:
      return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
    case DECIMAL:
      // return type is a HiveVarchar
      HiveDecimal decimalValue =
          PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
      return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
  }

  throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:42,代码来源:HiveTestUDFImpls.java

示例8: HiveFloatPrimitiveConverter

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
public HiveFloatPrimitiveConverter( final FloatObjectInspector inspector ){
  this.inspector = inspector;
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:4,代码来源:HiveFloatPrimitiveConverter.java

示例9: OrcFloatPrimitiveConverter

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
public OrcFloatPrimitiveConverter( final FloatObjectInspector inspector ){
  this.inspector = inspector;
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:4,代码来源:OrcFloatPrimitiveConverter.java

示例10: asFloat

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
public float asFloat() {
	return ((FloatObjectInspector) oi).get(obj);
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:4,代码来源:HivePath.java

示例11: evaluate

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (arguments[0] == null || arguments[0].get() == null) {
    return null;
  }

  Object input = arguments[0].get();
  switch(inputType) {
    case BOOLEAN:
      return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
    case BYTE:
      return new Byte(((ByteObjectInspector)argumentOI).get(input));
    case SHORT:
      return new Short(((ShortObjectInspector)argumentOI).get(input));
    case INT:
      return new Integer(((IntObjectInspector)argumentOI).get(input));
    case LONG:
      return new Long(((LongObjectInspector)argumentOI).get(input));
    case FLOAT:
      return new Float(((FloatObjectInspector)argumentOI).get(input));
    case DOUBLE:
      return new Double(((DoubleObjectInspector)argumentOI).get(input));
    case STRING:
      return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
    case BINARY:
      return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
    case VARCHAR:
      if (outputType == PrimitiveCategory.CHAR) {
        HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector) argumentOI);
        return new HiveChar(hiveVarchar.getValue(), HiveChar.MAX_CHAR_LENGTH);
      } else {
        return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
      }
    case CHAR:
      return PrimitiveObjectInspectorUtils.getHiveChar(input, (HiveCharObjectInspector) argumentOI);
    case DATE:
      return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
    case TIMESTAMP:
      return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
    case DECIMAL:
      // return type is a HiveVarchar
      HiveDecimal decimalValue =
          PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
      return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
  }

  throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:49,代码来源:HiveTestUDFImpls.java

示例12: loadPredictionModel

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
private long loadPredictionModel(Map<Object, PredictionModel> label2model, File file,
        PrimitiveObjectInspector labelOI, PrimitiveObjectInspector featureOI,
        WritableFloatObjectInspector weightOI) throws IOException, SerDeException {
    long count = 0L;
    if (!file.exists()) {
        return count;
    }
    if (!file.getName().endsWith(".crc")) {
        if (file.isDirectory()) {
            for (File f : file.listFiles()) {
                count += loadPredictionModel(label2model, f, labelOI, featureOI, weightOI);
            }
        } else {
            LazySimpleSerDe serde = HiveUtils.getLineSerde(labelOI, featureOI, weightOI);
            StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector();
            StructField c1ref = lineOI.getStructFieldRef("c1");
            StructField c2ref = lineOI.getStructFieldRef("c2");
            StructField c3ref = lineOI.getStructFieldRef("c3");
            PrimitiveObjectInspector c1refOI = (PrimitiveObjectInspector) c1ref.getFieldObjectInspector();
            PrimitiveObjectInspector c2refOI = (PrimitiveObjectInspector) c2ref.getFieldObjectInspector();
            FloatObjectInspector c3refOI = (FloatObjectInspector) c3ref.getFieldObjectInspector();

            BufferedReader reader = null;
            try {
                reader = HadoopUtils.getBufferedReader(file);
                String line;
                while ((line = reader.readLine()) != null) {
                    count++;
                    Text lineText = new Text(line);
                    Object lineObj = serde.deserialize(lineText);
                    List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj);
                    Object f0 = fields.get(0);
                    Object f1 = fields.get(1);
                    Object f2 = fields.get(2);
                    if (f0 == null || f1 == null || f2 == null) {
                        continue; // avoid the case that key or value is null
                    }
                    Object label = c1refOI.getPrimitiveWritableObject(c1refOI.copyObject(f0));
                    PredictionModel model = label2model.get(label);
                    if (model == null) {
                        model = createModel();
                        label2model.put(label, model);
                    }
                    Object k = c2refOI.getPrimitiveWritableObject(c2refOI.copyObject(f1));
                    float v = c3refOI.get(f2);
                    model.set(k, new WeightValue(v, false));
                }
            } finally {
                IOUtils.closeQuietly(reader);
            }
        }
    }
    return count;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:55,代码来源:MulticlassOnlineClassifierUDTF.java

示例13: serializePrimitive

import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; //导入依赖的package包/类
private static void serializePrimitive(BlockBuilder builder, Object object, PrimitiveObjectInspector inspector)
{
    requireNonNull(builder, "parent builder is null");

    if (object == null) {
        builder.appendNull();
        return;
    }

    switch (inspector.getPrimitiveCategory()) {
        case BOOLEAN:
            BooleanType.BOOLEAN.writeBoolean(builder, ((BooleanObjectInspector) inspector).get(object));
            return;
        case BYTE:
            BigintType.BIGINT.writeLong(builder, ((ByteObjectInspector) inspector).get(object));
            return;
        case SHORT:
            BigintType.BIGINT.writeLong(builder, ((ShortObjectInspector) inspector).get(object));
            return;
        case INT:
            BigintType.BIGINT.writeLong(builder, ((IntObjectInspector) inspector).get(object));
            return;
        case LONG:
            BigintType.BIGINT.writeLong(builder, ((LongObjectInspector) inspector).get(object));
            return;
        case FLOAT:
            DoubleType.DOUBLE.writeDouble(builder, ((FloatObjectInspector) inspector).get(object));
            return;
        case DOUBLE:
            DoubleType.DOUBLE.writeDouble(builder, ((DoubleObjectInspector) inspector).get(object));
            return;
        case STRING:
            VarcharType.VARCHAR.writeSlice(builder, Slices.utf8Slice(((StringObjectInspector) inspector).getPrimitiveJavaObject(object)));
            return;
        case DATE:
            DateType.DATE.writeLong(builder, formatDateAsLong(object, (DateObjectInspector) inspector));
            return;
        case TIMESTAMP:
            TimestampType.TIMESTAMP.writeLong(builder, formatTimestampAsLong(object, (TimestampObjectInspector) inspector));
            return;
        case BINARY:
            VARBINARY.writeSlice(builder, Slices.wrappedBuffer(((BinaryObjectInspector) inspector).getPrimitiveJavaObject(object)));
            return;
    }
    throw new RuntimeException("Unknown primitive type: " + inspector.getPrimitiveCategory());
}
 
开发者ID:y-lan,项目名称:presto,代码行数:47,代码来源:SerDeUtils.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。