当前位置: 首页>>代码示例>>Java>>正文


Java HiveDecimal.create方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.common.type.HiveDecimal.create方法的典型用法代码示例。如果您正苦于以下问题:Java HiveDecimal.create方法的具体用法?Java HiveDecimal.create怎么用?Java HiveDecimal.create使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.common.type.HiveDecimal的用法示例。


在下文中一共展示了HiveDecimal.create方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: convertStringTypes

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (hfsType == HCatFieldSchema.Type.STRING
      || hfsType == HCatFieldSchema.Type.VARCHAR
      || hfsType == HCatFieldSchema.Type.CHAR) {
    String str = val.toString();
    if (doHiveDelimsReplacement) {
      str = FieldFormatter.hiveStringReplaceDelims(str,
        hiveDelimsReplacement, hiveDelimiters);
    }
    if (hfsType == HCatFieldSchema.Type.STRING) {
      return str;
    } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
      VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
      HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
      return hvc;
    } else if (hfsType == HCatFieldSchema.Type.CHAR) {
      CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
      HiveChar hc = new HiveChar(val.toString(), cti.getLength());
      return hc;
    }
  } else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
    BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
    HiveDecimal hd = HiveDecimal.create(bd);
    return hd;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:29,代码来源:SqoopHCatImportHelper.java

示例2: toComparable

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
static Comparable<?> toComparable(PrimitiveCategory category, Object literal) {
  String stringLiteral;
  switch (category) {
  case STRING:
    return new Text((String) literal);
  case BOOLEAN:
    return new BooleanWritable((Boolean) literal);
  case BYTE:
    return new ByteWritable(((Long) literal).byteValue());
  case SHORT:
    return new ShortWritable(((Long) literal).shortValue());
  case INT:
    return new IntWritable(((Long) literal).intValue());
  case LONG:
    return new LongWritable((Long) literal);
  case FLOAT:
    return new FloatWritable(((Double) literal).floatValue());
  case DOUBLE:
    return new DoubleWritable((Double) literal);
  case TIMESTAMP:
    return new TimestampWritable((Timestamp) literal);
  case DATE:
    return (DateWritable) literal;
  case CHAR:
    stringLiteral = (String) literal;
    return new HiveCharWritable(new HiveChar(stringLiteral, stringLiteral.length()));
  case VARCHAR:
    stringLiteral = (String) literal;
    return new HiveVarcharWritable(new HiveVarchar(stringLiteral, stringLiteral.length()));
  case DECIMAL:
    return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
  default:
    throw new IllegalArgumentException("Unsupported category: " + category);
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:36,代码来源:EvaluatorFactory.java

示例3: toWritableObjectInternal

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Override
protected Object toWritableObjectInternal(Object value) throws UnexpectedTypeException {
  if (value instanceof String) {
    value = HiveDecimal.create((String) value);
  } else if (value instanceof BigDecimal) {
    value = HiveDecimal.create((BigDecimal) value);
  }
  return new HiveDecimalWritable((HiveDecimal) value);
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:10,代码来源:CascadingConverterFactory.java

示例4: decimalWritable

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Test
public void decimalWritable() throws UnexpectedTypeException {
  Converter converter = getConverter(TypeInfoFactory.getDecimalTypeInfo(2, 1));
  HiveDecimal hiveDecimal = HiveDecimal.create("2.1");
  assertThat(converter.toWritableObject("2.1"), is((Object) new HiveDecimalWritable(hiveDecimal)));
  assertThat(converter.toWritableObject(new BigDecimal("2.1")), is((Object) new HiveDecimalWritable(hiveDecimal)));
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:8,代码来源:CascadingConverterFactoryTest.java

示例5: getPrimitiveWritableObject

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Override
public HiveDecimalWritable getPrimitiveWritableObject(Object o) {
    if (o instanceof BigDecimal) {
        return o == null ? null : new HiveDecimalWritable(HiveDecimal.create((BigDecimal)o));
    } else { // BigInteger
        return o == null ? null : new HiveDecimalWritable(HiveDecimal.create((BigInteger)o));
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:9,代码来源:OrcUtils.java

示例6: getPrimitiveJavaObject

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Override
public HiveDecimal getPrimitiveJavaObject(Object o) {
    if (o instanceof BigDecimal) {
        return o == null ? null : HiveDecimal.create((BigDecimal)o);
    } else { // BigInteger
        return o == null ? null : HiveDecimal.create((BigInteger)o);
    }
}
 
开发者ID:sigmoidanalytics,项目名称:spork,代码行数:9,代码来源:OrcUtils.java

示例7: convert

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Override
public Object convert(@SuppressWarnings("rawtypes") Class type, Object value) {
  try {
    return HiveDecimal.create(new BigDecimal(value.toString()));
  } catch (NumberFormatException e) {
    throw new ConversionException(e);
  }
}
 
开发者ID:klarna,项目名称:HiveRunner,代码行数:9,代码来源:Converters.java

示例8: getObjectOfCorrespondingPrimitiveType

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
private Object getObjectOfCorrespondingPrimitiveType(String s, PrimitiveTypeInfo mapKeyType)
  throws IOException {
  switch (Type.getPrimitiveHType(mapKeyType)) {
  case INT:
    return Integer.valueOf(s);
  case TINYINT:
    return Byte.valueOf(s);
  case SMALLINT:
    return Short.valueOf(s);
  case BIGINT:
    return Long.valueOf(s);
  case BOOLEAN:
    return (s.equalsIgnoreCase("true"));
  case FLOAT:
    return Float.valueOf(s);
  case DOUBLE:
    return Double.valueOf(s);
  case STRING:
    return s;
  case BINARY:
    throw new IOException("JsonSerDe does not support BINARY type");
  case DATE:
    return Date.valueOf(s);
  case TIMESTAMP:
    return Timestamp.valueOf(s);
  case DECIMAL:
    return HiveDecimal.create(s);
  case VARCHAR:
    return new HiveVarchar(s, ((BaseCharTypeInfo)mapKeyType).getLength());
  case CHAR:
    return new HiveChar(s, ((BaseCharTypeInfo)mapKeyType).getLength());
  }
  throw new IOException("Could not convert from string to map type " + mapKeyType.getTypeName());
}
 
开发者ID:prestodb,项目名称:presto-hive-apache,代码行数:35,代码来源:JsonSerDe.java

示例9: trunc

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
protected HiveDecimal trunc(HiveDecimal input, int scale) {
  BigDecimal bigDecimal = trunc(input.bigDecimalValue(), scale);
  return HiveDecimal.create(bigDecimal);
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:5,代码来源:GenericUDFTrunc.java

示例10: getPrimitiveJavaObject

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Override
public HiveDecimal getPrimitiveJavaObject(Object o) {
	return HiveDecimal.create((BigDecimal)o);
}
 
开发者ID:mini666,项目名称:hive-phoenix-handler,代码行数:5,代码来源:PhoenixDecimalObjectInspector.java

示例11: castLiteral

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
Object castLiteral(Serializable literal) {

			switch (literalType) {
				case LONG:
					if (literal instanceof Byte) {
						return new Long((Byte) literal);
					} else if (literal instanceof Short) {
						return new Long((Short) literal);
					} else if (literal instanceof Integer) {
						return new Long((Integer) literal);
					} else if (literal instanceof Long) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a LONG column requires an integer " +
							"literal, i.e., Byte, Short, Integer, or Long.");
					}
				case FLOAT:
					if (literal instanceof Float) {
						return new Double((Float) literal);
					} else if (literal instanceof Double) {
						return literal;
					} else if (literal instanceof BigDecimal) {
						return ((BigDecimal) literal).doubleValue();
					} else {
						throw new IllegalArgumentException("A predicate on a FLOAT column requires a floating " +
							"literal, i.e., Float or Double.");
					}
				case STRING:
					if (literal instanceof String) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a STRING column requires a floating " +
							"literal, i.e., Float or Double.");
					}
				case BOOLEAN:
					if (literal instanceof Boolean) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a BOOLEAN column requires a Boolean literal.");
					}
				case DATE:
					if (literal instanceof Date) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a DATE column requires a java.sql.Date literal.");
					}
				case TIMESTAMP:
					if (literal instanceof Timestamp) {
						return literal;
					} else {
						throw new IllegalArgumentException("A predicate on a TIMESTAMP column requires a java.sql.Timestamp literal.");
					}
				case DECIMAL:
					if (literal instanceof BigDecimal) {
						return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
					} else {
						throw new IllegalArgumentException("A predicate on a DECIMAL column requires a BigDecimal literal.");
					}
				default:
					throw new IllegalArgumentException("Unknown literal type " + literalType);
			}
		}
 
开发者ID:axbaretto,项目名称:flink,代码行数:63,代码来源:OrcRowInputFormat.java

示例12: decimalJava

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Test
public void decimalJava() throws UnexpectedTypeException {
  Converter converter = getConverter(TypeInfoFactory.getDecimalTypeInfo(2, 1));
  HiveDecimal hiveDecimal = HiveDecimal.create("2.1");
  assertThat(converter.toJavaObject(new HiveDecimalWritable(hiveDecimal)), is((Object) hiveDecimal));
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:7,代码来源:DefaultConverterFactoryTest.java

示例13: decimalWritable

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Test
public void decimalWritable() throws UnexpectedTypeException {
  Converter converter = getConverter(TypeInfoFactory.getDecimalTypeInfo(2, 1));
  HiveDecimal hiveDecimal = HiveDecimal.create("2.1");
  assertThat(converter.toWritableObject(hiveDecimal), is((Object) new HiveDecimalWritable(hiveDecimal)));
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:7,代码来源:DefaultConverterFactoryTest.java

示例14: decimalJava

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Test
public void decimalJava() throws UnexpectedTypeException {
  Converter converter = getConverter(TypeInfoFactory.getDecimalTypeInfo(2, 1));
  HiveDecimal hiveDecimal = HiveDecimal.create("2.1");
  assertThat(converter.toJavaObject(new HiveDecimalWritable(hiveDecimal)), is((Object) new BigDecimal("2.1")));
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:7,代码来源:CascadingConverterFactoryTest.java

示例15: deserialize

import org.apache.hadoop.hive.common.type.HiveDecimal; //导入方法依赖的package包/类
@Override
	public Object deserialize(byte[] rowBytes) throws CobolSerdeException {
		byte[] temp = transcodeField(super.getBytes(rowBytes));
		String s1 = new String(temp);

		if (this.compType > 0) {
			if (this.compType == 3) {
				s1 = unpackData(super.getBytes(rowBytes), this.decimalLocation);
			}else if(this.compType == 4){
				s1 = getBinary(super.getBytes(rowBytes), this.decimalLocation);
			}
		} else if (this.decimalLocation > 0) {
			s1 = s1.substring(0, this.length * this.divideFactor
					- this.decimalLocation)
					+ "."
					+ s1.substring(this.length * this.divideFactor
							- this.decimalLocation);
		}
//		System.out.println(name + "\t - " + s1 + "\t:" + offset + "\[email protected]"
//				+ length);
		try {
			switch (((PrimitiveTypeInfo) this.typeInfo).getPrimitiveCategory()) {
			case LONG:
				return Long.parseLong(s1.trim());
			case SHORT:
				return Short.parseShort(s1.trim());
			case INT:
				return Integer.parseInt(s1.trim());
			case BYTE:
				return Byte.parseByte(s1.trim());
			case FLOAT:
				return Float.parseFloat(s1.trim());
			case DOUBLE:
				return Double.parseDouble(s1.trim());
			case DECIMAL:
				BigDecimal bd = new BigDecimal(s1);
				HiveDecimal dec = HiveDecimal.create(bd);
				JavaHiveDecimalObjectInspector oi = (JavaHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory
						.getPrimitiveJavaObjectInspector((DecimalTypeInfo) this.typeInfo);
				return oi.set(null, dec);
			}
		} catch (Exception e) {
			return null; // if cannot be converted make it null
		}
		return null;

	}
 
开发者ID:rbheemana,项目名称:Cobol-to-Hive,代码行数:48,代码来源:CobolNumberField.java


注:本文中的org.apache.hadoop.hive.common.type.HiveDecimal.create方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。