当前位置: 首页>>代码示例>>Java>>正文


Java HiveDecimal类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.common.type.HiveDecimal的典型用法代码示例。如果您正苦于以下问题:Java HiveDecimal类的具体用法?Java HiveDecimal怎么用?Java HiveDecimal使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


HiveDecimal类属于org.apache.hadoop.hive.common.type包,在下文中一共展示了HiveDecimal类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: convertDecimalTypes

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
private Object convertDecimalTypes(Object val, String javaColType) {
  HiveDecimal hd = (HiveDecimal) val;
  BigDecimal bd = hd.bigDecimalValue();

  if (javaColType.equals(BIG_DECIMAL_TYPE)) {
    return bd;
  } else if (javaColType.equals(STRING_TYPE)) {
    String bdStr = null;
    if (bigDecimalFormatString) {
      bdStr = bd.toPlainString();
    } else {
      bdStr = bd.toString();
    }
    return bdStr;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:18,代码来源:SqoopHCatExportHelper.java

示例2: testNumberTypes

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
public void testNumberTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "numeric(18,2)", Types.NUMERIC, HCatFieldSchema.Type.STRING, 0, 0,
      "1000", new BigDecimal("1000"), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.STRING, 0, 0,
      "2000", new BigDecimal("2000"), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
        "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.DECIMAL, 18, 2,
        HiveDecimal.create(new BigDecimal("2000")),
        new BigDecimal("2000"), KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:20,代码来源:HCatalogImportTest.java

示例3: testNumberTypes

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
public void testNumberTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "numeric(18,2)", Types.NUMERIC, HCatFieldSchema.Type.STRING, 0, 0,
      "1000", new BigDecimal("1000"), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.STRING, 0, 0,
      "2000", new BigDecimal("2000"), KeyType.NOT_A_KEY),
      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
        "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.DECIMAL, 18, 2,
        HiveDecimal.create(new BigDecimal("2000")),
        new BigDecimal("2000"), KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:19,代码来源:HCatalogExportTest.java

示例4: process

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
@Override
public void process(WayContainer container) {
    DecimalColumnVector lat = (DecimalColumnVector) batch.cols[3];
    DecimalColumnVector lon = (DecimalColumnVector) batch.cols[4];
    ListColumnVector nds = (ListColumnVector) batch.cols[5];

    checkLimit();
    addCommonProperties(container);

    lat.isNull[row] = true;
    lon.isNull[row] = true;
    lat.set(row, (HiveDecimal) null);
    lon.set(row, (HiveDecimal) null);

    Way way = container.getEntity();

    nds.lengths[row] = way.getWayNodes().size();
    nds.childCount += nds.lengths[row];
    nds.child.ensureSize(nds.childCount, nds.offsets[row] != 0);

    for (int j = 0; j < way.getWayNodes().size(); j++) {
        StructColumnVector ndsStruct = (StructColumnVector) nds.child;

        ((LongColumnVector) ndsStruct.fields[0]).vector[(int) nds.offsets[row] + j] = way.getWayNodes().get(j).getNodeId();
    }
}
 
开发者ID:mojodna,项目名称:osm2orc,代码行数:27,代码来源:OrcWriter.java

示例5: getJavaObjectFromPrimitiveData

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
private static Object getJavaObjectFromPrimitiveData(Object data, ObjectInspector objInsp) {
    assert(objInsp.getCategory() == Category.PRIMITIVE);
    if (data == null) {
        return null;
    }
    if (data instanceof BytesWritable && objInsp instanceof WritableHiveDecimalObjectInspector) {
        // BytesWritable cannot be directly cast to HiveDecimalWritable
        WritableHiveDecimalObjectInspector oi = (WritableHiveDecimalObjectInspector) objInsp;
        data = oi.create(((BytesWritable) data).getBytes(), oi.scale());
    }
    Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
    if (obj instanceof HiveDecimal) {
        obj = ((HiveDecimal) obj).bigDecimalValue();
    } else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
        obj = obj.toString();
    } else if (obj instanceof byte[]) {
        obj = Hex.encodeHexString((byte[]) obj);
    }
    return obj;
}
 
开发者ID:EXASOL,项目名称:hadoop-etl-udfs,代码行数:21,代码来源:HdfsSerDeImportService.java

示例6: getJavaObjectFromFieldData

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
private static Object getJavaObjectFromFieldData(Object data, ObjectInspector objInsp) {
    if (data == null) {
        return null;
    }
    if (objInsp.getCategory() == Category.PRIMITIVE) {
        Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
        if (obj instanceof HiveDecimal) {
            obj = ((HiveDecimal) obj).bigDecimalValue();
        } else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
            obj = obj.toString();
        } else if (obj instanceof byte[]) {
            obj = Hex.encodeHexString((byte[]) obj);
        }
        return obj;
    } else if (objInsp.getCategory() == Category.LIST) {
        return getJsonArrayFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
    } else {
        return getJsonObjectFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
    }
}
 
开发者ID:EXASOL,项目名称:hadoop-etl-udfs,代码行数:21,代码来源:HdfsSerDeImportService.java

示例7: writeDecimal

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
@Test
public void writeDecimal() throws IOException {
  List<Object> values = new ArrayList<>();
  values.add(HiveDecimal.create(new BigDecimal("1.23")));
  values.add(new BigDecimal("2.34"));
  values.add("3.45");
  values.add(null);

  write(TypeInfoFactory.getDecimalTypeInfo(2, 1), values);

  try (OrcReader reader = getOrcReader()) {
    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveDecimal) reader.next().get(0)).bigDecimalValue(), is(new BigDecimal("1.2")));

    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveDecimal) reader.next().get(0)).bigDecimalValue(), is(new BigDecimal("2.3")));

    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveDecimal) reader.next().get(0)).bigDecimalValue(), is(new BigDecimal("3.5")));

    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is(nullValue()));

    assertThat(reader.hasNext(), is(false));
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:27,代码来源:OrcFileTest.java

示例8: readDecimalPredicatePushdown

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
@Test
public void readDecimalPredicatePushdown() throws IOException {
  TypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(2, 1);

  try (OrcWriter writer = getOrcWriter(typeInfo)) {
    writer.addRow(HiveDecimal.create("0.0"));
    writer.addRow(HiveDecimal.create("0.1"));
  }

  StructTypeInfo structTypeInfo = new StructTypeInfoBuilder().add("a", typeInfo).build();

  SearchArgument searchArgument = SearchArgumentFactory
      .newBuilder()
      .startAnd()
      .equals("a", new BigDecimal("0.1"))
      .end()
      .build();

  OrcFile orcFile = OrcFile.source().columns(structTypeInfo).schemaFromFile().searchArgument(searchArgument).build();
  Tap<?, ?, ?> tap = new Hfs(orcFile, path);

  List<Tuple> list = Plunger.readDataFromTap(tap).asTupleList();

  assertThat(list.size(), is(1));
  assertThat(list.get(0).getObject(0), is((Object) new BigDecimal("0.1")));
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:27,代码来源:OrcFileTest.java

示例9: getPrimitiveJavaObject

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
@Override
public HiveDecimal getPrimitiveJavaObject(Object o){
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableDecimal38SparseHolder h = (NullableDecimal38SparseHolder) o;
<#else>
  final Decimal38SparseHolder h = (Decimal38SparseHolder) o;
</#if>
  return HiveDecimal.create(DecimalUtility.getBigDecimalFromSparse(h.buffer, h.start, h.nDecimalDigits, h.scale));
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:13,代码来源:ObjectInspectors.java

示例10: convertStringTypes

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (hfsType == HCatFieldSchema.Type.STRING
      || hfsType == HCatFieldSchema.Type.VARCHAR
      || hfsType == HCatFieldSchema.Type.CHAR) {
    String str = val.toString();
    if (doHiveDelimsReplacement) {
      str = FieldFormatter.hiveStringReplaceDelims(str,
        hiveDelimsReplacement, hiveDelimiters);
    }
    if (hfsType == HCatFieldSchema.Type.STRING) {
      return str;
    } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
      VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
      HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
      return hvc;
    } else if (hfsType == HCatFieldSchema.Type.CHAR) {
      CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
      HiveChar hc = new HiveChar(val.toString(), cti.getLength());
      return hc;
    }
  } else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
    BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
    HiveDecimal hd = HiveDecimal.create(bd);
    return hd;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:29,代码来源:SqoopHCatImportHelper.java

示例11: getPrimitiveJavaObject

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
@Override
public HiveDecimal getPrimitiveJavaObject(Object o){
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableDecimalHolder h = (NullableDecimalHolder) o;
<#else>
  final DecimalHolder h = (DecimalHolder) o;
</#if>
  return HiveDecimal.create(DecimalUtility.getBigDecimalFromArrowBuf(h.buffer, h.start, h.scale));
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:13,代码来源:ObjectInspectors.java

示例12: toComparable

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
static Comparable<?> toComparable(PrimitiveCategory category, Object literal) {
  String stringLiteral;
  switch (category) {
  case STRING:
    return new Text((String) literal);
  case BOOLEAN:
    return new BooleanWritable((Boolean) literal);
  case BYTE:
    return new ByteWritable(((Long) literal).byteValue());
  case SHORT:
    return new ShortWritable(((Long) literal).shortValue());
  case INT:
    return new IntWritable(((Long) literal).intValue());
  case LONG:
    return new LongWritable((Long) literal);
  case FLOAT:
    return new FloatWritable(((Double) literal).floatValue());
  case DOUBLE:
    return new DoubleWritable((Double) literal);
  case TIMESTAMP:
    return new TimestampWritable((Timestamp) literal);
  case DATE:
    return (DateWritable) literal;
  case CHAR:
    stringLiteral = (String) literal;
    return new HiveCharWritable(new HiveChar(stringLiteral, stringLiteral.length()));
  case VARCHAR:
    stringLiteral = (String) literal;
    return new HiveVarcharWritable(new HiveVarchar(stringLiteral, stringLiteral.length()));
  case DECIMAL:
    return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
  default:
    throw new IllegalArgumentException("Unsupported category: " + category);
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:36,代码来源:EvaluatorFactory.java

示例13: toWritableObjectInternal

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
@Override
protected Object toWritableObjectInternal(Object value) throws UnexpectedTypeException {
  if (value instanceof String) {
    value = HiveDecimal.create((String) value);
  } else if (value instanceof BigDecimal) {
    value = HiveDecimal.create((BigDecimal) value);
  }
  return new HiveDecimalWritable((HiveDecimal) value);
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:10,代码来源:CascadingConverterFactory.java

示例14: writeOrcFile

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
private void writeOrcFile() throws IOException {
  Path path = new Path(temporaryFolder.getRoot().getCanonicalPath(), "part-00000");
  List<Object> struct = new ArrayList<>(structTypeInfo.getAllStructFieldNames().size());
  try (OrcWriter writer = new OrcWriter(new Configuration(), path, structTypeInfo)) {
    for (int i = 0; i < 1000000; i++) {
      Number n = i;

      struct.clear();
      struct.add(n.toString());
      struct.add(i % 2 == 0);
      struct.add(n.byteValue());
      struct.add(n.shortValue());
      struct.add(i);
      struct.add(n.longValue());
      struct.add(n.floatValue());
      struct.add(n.doubleValue());
      struct.add(new Timestamp(i));
      struct.add(new Date(i));
      struct.add(n.toString().getBytes());
      struct.add(HiveDecimal.create(n.toString()));
      struct.add(Arrays.asList(i));
      struct.add(createMap(i));
      struct.add(Arrays.asList(i));
      struct.add(new StandardUnion((byte) 0, n.toString()));

      writer.addRow(struct);
    }
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:30,代码来源:OrcFileSourcePerformanceTest.java

示例15: readDecimal

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入依赖的package包/类
@Test
public void readDecimal() throws IOException {
  TypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(2, 1);

  try (OrcWriter writer = getOrcWriter(typeInfo)) {
    writer.addRow(HiveDecimal.create(new BigDecimal("1.2")));
    writer.addRow((Object) null);
  }

  List<Tuple> list = read(typeInfo);
  assertThat(list.size(), is(2));
  assertThat(list.get(0).getObject(0), is((Object) new BigDecimal("1.2")));
  assertThat(list.get(1).getObject(0), is(nullValue()));
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:15,代码来源:OrcFileTest.java


注:本文中的org.apache.hadoop.hive.common.type.HiveDecimal类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。