当前位置: 首页>>代码示例>>Java>>正文


Java IntObjectInspector类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector的典型用法代码示例。如果您正苦于以下问题:Java IntObjectInspector类的具体用法?Java IntObjectInspector怎么用?Java IntObjectInspector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


IntObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector.primitive包,在下文中一共展示了IntObjectInspector类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getFeatureType

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
@Nonnull
private static FeatureType getFeatureType(@Nonnull ListObjectInspector featureListOI)
        throws UDFArgumentException {
    final ObjectInspector featureOI = featureListOI.getListElementObjectInspector();
    if (featureOI instanceof StringObjectInspector) {
        return FeatureType.STRING;
    } else if (featureOI instanceof IntObjectInspector) {
        return FeatureType.INT;
    } else if (featureOI instanceof LongObjectInspector) {
        return FeatureType.LONG;
    } else {
        throw new UDFArgumentException("Feature object inspector must be one of "
                + "[StringObjectInspector, IntObjectInspector, LongObjectInspector]: "
                + featureOI.toString());
    }
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:17,代码来源:GeneralLearnerBaseUDTF.java

示例2: initialize

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException(
            "map_tail_n only takes 2 arguments: map<object, object>, int");
    }
    if (!(arguments[0] instanceof MapObjectInspector)) {
        throw new UDFArgumentException("The first argument must be a map");
    }
    this.mapObjectInspector = (MapObjectInspector) arguments[0];
    if (!(arguments[1] instanceof IntObjectInspector)) {
        throw new UDFArgumentException("The second argument must be an int");
    }
    this.intObjectInspector = (IntObjectInspector) arguments[1];

    ObjectInspector keyOI = ObjectInspectorUtils.getStandardObjectInspector(mapObjectInspector.getMapKeyObjectInspector());
    ObjectInspector valueOI = mapObjectInspector.getMapValueObjectInspector();

    return ObjectInspectorFactory.getStandardMapObjectInspector(keyOI, valueOI);
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:21,代码来源:MapTailNUDF.java

示例3: createPrimitive

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
private static Writable createPrimitive(Object obj, PrimitiveObjectInspector inspector)
        throws SerDeException {
    if (obj == null) {
        return null;
    }
    switch (inspector.getPrimitiveCategory()) {
        case DOUBLE:
            return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
        case FLOAT:
            return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
        case INT:
            return new IntWritable(((IntObjectInspector) inspector).get(obj));
        case LONG:
            return new LongWritable(((LongObjectInspector) inspector).get(obj));
        case STRING:
            return new Text(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj));
        case DATE:
            return ((DateObjectInspector) inspector).getPrimitiveWritableObject(obj);
        case TIMESTAMP:
            return ((TimestampObjectInspector) inspector).getPrimitiveWritableObject(obj);
        default:
            throw new SerDeException("Can't serialize primitive : " + inspector.getPrimitiveCategory());
    }
}
 
开发者ID:shunfei,项目名称:indexr,代码行数:25,代码来源:IndexRSerde.java

示例4: getHiveBucket

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
public static int getHiveBucket(List<Entry<ObjectInspector, Object>> columnBindings, int bucketCount)
        throws HiveException
{
    GenericUDFHash udf = new GenericUDFHash();
    ObjectInspector[] objectInspectors = new ObjectInspector[columnBindings.size()];
    GenericUDF.DeferredObject[] deferredObjects = new GenericUDF.DeferredObject[columnBindings.size()];

    int i = 0;
    for (Entry<ObjectInspector, Object> entry : columnBindings) {
        objectInspectors[i] = entry.getKey();
        deferredObjects[i] = new GenericUDF.DeferredJavaObject(entry.getValue());
        i++;
    }

    ObjectInspector udfInspector = udf.initialize(objectInspectors);
    IntObjectInspector inspector = checkType(udfInspector, IntObjectInspector.class, "udfInspector");

    Object result = udf.evaluate(deferredObjects);
    HiveKey hiveKey = new HiveKey();
    hiveKey.setHashCode(inspector.get(result));

    return new DefaultHivePartitioner<>().getBucket(hiveKey, null, bucketCount);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:24,代码来源:TestHiveBucketing.java

示例5: get

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
public static IHivePrimitiveConverter get( final ObjectInspector objectInspector ){

    switch( objectInspector.getCategory() ){
      case PRIMITIVE:
        PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)objectInspector;
        switch( primitiveInspector.getPrimitiveCategory() ){
          case BINARY:
            return new HiveBytesPrimitiveConverter( (BinaryObjectInspector)objectInspector );
          case BOOLEAN:
            return new HiveBooleanPrimitiveConverter( (BooleanObjectInspector)objectInspector );
          case BYTE:
            return new HiveBytePrimitiveConverter( (ByteObjectInspector)objectInspector );
          case DOUBLE:
            return new HiveDoublePrimitiveConverter( (DoubleObjectInspector)objectInspector );
          case FLOAT:
            return new HiveFloatPrimitiveConverter( (FloatObjectInspector)objectInspector );
          case INT:
            return new HiveIntegerPrimitiveConverter( (IntObjectInspector)objectInspector );
          case LONG:
            return new HiveLongPrimitiveConverter( (LongObjectInspector)objectInspector );
          case SHORT:
            return new HiveShortPrimitiveConverter( (ShortObjectInspector)objectInspector );
          case STRING:
            return new HiveStringPrimitiveConverter( (StringObjectInspector)objectInspector );
          case DATE:
          case TIMESTAMP:
          case VOID:
          case UNKNOWN:
          default:
            return new HiveDefaultPrimitiveConverter();
        }
      default :
        return new HiveDefaultPrimitiveConverter();
    }
  }
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:36,代码来源:HivePrimitiveConverterFactory.java

示例6: getArg

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
private long getArg(int i, Object[] args) {
  if (inputOIs[i] instanceof LongObjectInspector) {
    return (Long) ((LongObjectInspector) inputOIs[i])
        .getPrimitiveJavaObject(args[i]);
  } else {
    return (Integer) ((IntObjectInspector) inputOIs[i])
        .getPrimitiveJavaObject(args[i]);
  }
}
 
开发者ID:t3rmin4t0r,项目名称:hive-faker,代码行数:10,代码来源:GenerateSeriesUDTF.java

示例7: asIntLabel

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
private static int asIntLabel(@Nonnull final Object o,
        @Nonnull final IntObjectInspector intOI) throws UDFArgumentException {
    final int value = intOI.get(o);
    switch (value) {
        case 1:
            return 1;
        case 0:
        case -1:
            return 0;
        default:
            throw new UDFArgumentException("Int label must be 1, 0 or -1: " + value);
    }
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:14,代码来源:FMeasureUDAF.java

示例8: asIntOI

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
@Nonnull
public static IntObjectInspector asIntOI(@Nonnull final ObjectInspector argOI)
        throws UDFArgumentException {
    if (!INT_TYPE_NAME.equals(argOI.getTypeName())) {
        throw new UDFArgumentException("Argument type must be INT: " + argOI.getTypeName());
    }
    return (IntObjectInspector) argOI;
}
 
开发者ID:apache,项目名称:incubator-hivemall,代码行数:9,代码来源:HiveUtils.java

示例9: getHiveBucket

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
public static Optional<HiveBucket> getHiveBucket(List<Entry<ObjectInspector, Object>> columnBindings, int bucketCount)
{
    try {
        @SuppressWarnings("resource")
        GenericUDFHash udf = new GenericUDFHash();
        ObjectInspector[] objectInspectors = new ObjectInspector[columnBindings.size()];
        DeferredObject[] deferredObjects = new DeferredObject[columnBindings.size()];

        int i = 0;
        for (Entry<ObjectInspector, Object> entry : columnBindings) {
            objectInspectors[i] = getJavaObjectInspector(entry.getKey());
            deferredObjects[i] = getJavaDeferredObject(entry.getValue(), entry.getKey());
            i++;
        }

        ObjectInspector udfInspector = udf.initialize(objectInspectors);
        IntObjectInspector inspector = checkType(udfInspector, IntObjectInspector.class, "udfInspector");

        Object result = udf.evaluate(deferredObjects);
        HiveKey hiveKey = new HiveKey();
        hiveKey.setHashCode(inspector.get(result));

        int bucketNumber = new DefaultHivePartitioner<>().getBucket(hiveKey, null, bucketCount);

        return Optional.of(new HiveBucket(bucketNumber, bucketCount));
    }
    catch (HiveException e) {
        log.debug(e, "Error evaluating bucket number");
        return Optional.empty();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:32,代码来源:HiveBucketing.java

示例10: createPrimitive

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
private Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
    throws SerDeException {
  if (obj == null) {
    return null;
  }
  switch (inspector.getPrimitiveCategory()) {
  case VOID:
    return null;
  case BOOLEAN:
    return new BooleanWritable(((BooleanObjectInspector) inspector).get(obj) ? Boolean.TRUE : Boolean.FALSE);
  case BYTE:
    return new ByteWritable((byte) ((ByteObjectInspector) inspector).get(obj));
  case DOUBLE:
    return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
  case FLOAT:
    return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
  case INT:
    return new IntWritable(((IntObjectInspector) inspector).get(obj));
  case LONG:
    return new LongWritable(((LongObjectInspector) inspector).get(obj));
  case SHORT:
    return new ShortWritable((short) ((ShortObjectInspector) inspector).get(obj));
  case STRING:
    return new BinaryWritable(Binary.fromString(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj)));
  default:
    throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
  }
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:29,代码来源:ParquetHiveSerDe.java

示例11: write

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
@Override
void write(Object obj) throws IOException {
  if (obj != null) {
    switch (inspector.getCategory()) {
      case PRIMITIVE:
        switch (((PrimitiveObjectInspector) inspector).getPrimitiveCategory()) {
          case SHORT:
            buffer[bufferIndex++] = new Long(((ShortObjectInspector) inspector).get(obj));
            setRawDataSize(RawDatasizeConst.SHORT_SIZE);
            break;
          case INT:
            buffer[bufferIndex++] = new Long(((IntObjectInspector) inspector).get(obj));
            setRawDataSize(RawDatasizeConst.INT_SIZE);
            break;
          case LONG:
            buffer[bufferIndex++] = new Long(((LongObjectInspector) inspector).get(obj));
            setRawDataSize(RawDatasizeConst.LONG_SIZE);
            break;
          default:
            throw new IllegalArgumentException("Bad Category: Dictionary Encoding not available for " +
                ((PrimitiveObjectInspector) inspector).getPrimitiveCategory());
        }
        break;
      default:
        throw new IllegalArgumentException("Bad Category: DictionaryEncoding not available for " + inspector.getCategory());
    }
    // Increment the total memory for the buffered long
    memoryEstimate.incrementTotalMemory(RawDatasizeConst.LONG_SIZE);
    bufferedBytes += RawDatasizeConst.LONG_SIZE;
  } else {
    buffer[bufferIndex++] = null;
    setRawDataSize(RawDatasizeConst.NULL_SIZE);
  }
  if (bufferIndex == buffer.length) {
    flush();
  }
}
 
开发者ID:facebookarchive,项目名称:hive-dwrf,代码行数:38,代码来源:WriterImpl.java

示例12: setSafeValue

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
@Override
public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
  final int value = (int) ((IntObjectInspector)oi).getPrimitiveJavaObject(hiveFieldValue);
  ((NullableIntVector) outputVV).getMutator().setSafe(outputIndex, value);
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:6,代码来源:HiveFieldConverter.java

示例13: evaluate

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (arguments[0] == null || arguments[0].get() == null) {
    return null;
  }

  Object input = arguments[0].get();
  switch(inputType) {
    case BOOLEAN:
      return ((BooleanObjectInspector)argumentOI).get(input) ? Boolean.TRUE : Boolean.FALSE;
    case BYTE:
      return new Byte(((ByteObjectInspector)argumentOI).get(input));
    case SHORT:
      return new Short(((ShortObjectInspector)argumentOI).get(input));
    case INT:
      return new Integer(((IntObjectInspector)argumentOI).get(input));
    case LONG:
      return new Long(((LongObjectInspector)argumentOI).get(input));
    case FLOAT:
      return new Float(((FloatObjectInspector)argumentOI).get(input));
    case DOUBLE:
      return new Double(((DoubleObjectInspector)argumentOI).get(input));
    case STRING:
      return PrimitiveObjectInspectorUtils.getString(input, (StringObjectInspector)argumentOI);
    case BINARY:
      return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
    case VARCHAR:
      return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
    case DATE:
      return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
    case TIMESTAMP:
      return PrimitiveObjectInspectorUtils.getTimestamp(input, (TimestampObjectInspector) argumentOI);
    case DECIMAL:
      // return type is a HiveVarchar
      HiveDecimal decimalValue =
          PrimitiveObjectInspectorUtils.getHiveDecimal(input, (HiveDecimalObjectInspector) argumentOI);
      return new HiveVarchar(decimalValue.toString(), HiveVarchar.MAX_VARCHAR_LENGTH);
  }

  throw new UnsupportedOperationException(String.format("Unexpected input type '%s' in Test UDF", inputType));
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:42,代码来源:HiveTestUDFImpls.java

示例14: HiveIntegerPrimitiveConverter

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
public HiveIntegerPrimitiveConverter( final IntObjectInspector inspector ){
  this.inspector = inspector;
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:4,代码来源:HiveIntegerPrimitiveConverter.java

示例15: OrcIntegerPrimitiveConverter

import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; //导入依赖的package包/类
public OrcIntegerPrimitiveConverter( final IntObjectInspector inspector ){
  this.inspector = inspector;
}
 
开发者ID:yahoojapan,项目名称:dataplatform-schema-lib,代码行数:4,代码来源:OrcIntegerPrimitiveConverter.java


注:本文中的org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。