本文整理汇总了Java中org.apache.hadoop.hive.common.type.HiveChar类的典型用法代码示例。如果您正苦于以下问题:Java HiveChar类的具体用法?Java HiveChar怎么用?Java HiveChar使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
HiveChar类属于org.apache.hadoop.hive.common.type包,在下文中一共展示了HiveChar类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getTransformedWritable
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
HiveChar value = columnType.getPrimitiveJavaObject(object.get());
if(value != null) {
String transformedValue = transformer.transform(value.getValue());
if(transformedValue != null) {
writable.set(transformedValue);
return writable;
}
}
return null;
}
示例2: convertClobType
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
private Object convertClobType(Object val, HCatFieldSchema hfs) {
HCatFieldSchema.Type hfsType = hfs.getType();
ClobRef cr = (ClobRef) val;
String s = cr.isExternal() ? cr.toString() : cr.getData();
if (hfsType == HCatFieldSchema.Type.STRING) {
return s;
} else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
HiveVarchar hvc = new HiveVarchar(s, vti.getLength());
return hvc;
} else if (hfsType == HCatFieldSchema.Type.CHAR) {
CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
HiveChar hc = new HiveChar(s, cti.getLength());
return hc;
}
return null;
}
示例3: testStringTypes
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
public void testStringTypes() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"char(14)", Types.CHAR, HCatFieldSchema.Type.STRING, 0, 0,
"string to test", "string to test", KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"char(14)", Types.CHAR, HCatFieldSchema.Type.CHAR, 14, 0,
new HiveChar("string to test", 14), "string to test",
KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
"char(14)", Types.CHAR, HCatFieldSchema.Type.VARCHAR, 14, 0,
new HiveVarchar("string to test", 14), "string to test",
KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
"longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
"string to test", "string to test", KeyType.NOT_A_KEY),
};
List<String> addlArgsArray = new ArrayList<String>();
setExtraArgs(addlArgsArray);
runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
示例4: testStringTypes
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
public void testStringTypes() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"char(14)", Types.CHAR, HCatFieldSchema.Type.STRING, 0, 0,
"string to test", "string to test", KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"char(14)", Types.CHAR, HCatFieldSchema.Type.CHAR, 14, 0,
new HiveChar("string to test", 14), "string to test",
KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
"char(14)", Types.CHAR, HCatFieldSchema.Type.VARCHAR, 14, 0,
new HiveVarchar("string to test", 14), "string to test",
KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
"longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
"string to test", "string to test", KeyType.NOT_A_KEY),
};
List<String> addlArgsArray = new ArrayList<String>();
runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
}
示例5: getJavaObjectFromPrimitiveData
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
private static Object getJavaObjectFromPrimitiveData(Object data, ObjectInspector objInsp) {
assert(objInsp.getCategory() == Category.PRIMITIVE);
if (data == null) {
return null;
}
if (data instanceof BytesWritable && objInsp instanceof WritableHiveDecimalObjectInspector) {
// BytesWritable cannot be directly cast to HiveDecimalWritable
WritableHiveDecimalObjectInspector oi = (WritableHiveDecimalObjectInspector) objInsp;
data = oi.create(((BytesWritable) data).getBytes(), oi.scale());
}
Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
if (obj instanceof HiveDecimal) {
obj = ((HiveDecimal) obj).bigDecimalValue();
} else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
obj = obj.toString();
} else if (obj instanceof byte[]) {
obj = Hex.encodeHexString((byte[]) obj);
}
return obj;
}
示例6: getJavaObjectFromFieldData
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
private static Object getJavaObjectFromFieldData(Object data, ObjectInspector objInsp) {
if (data == null) {
return null;
}
if (objInsp.getCategory() == Category.PRIMITIVE) {
Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
if (obj instanceof HiveDecimal) {
obj = ((HiveDecimal) obj).bigDecimalValue();
} else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
obj = obj.toString();
} else if (obj instanceof byte[]) {
obj = Hex.encodeHexString((byte[]) obj);
}
return obj;
} else if (objInsp.getCategory() == Category.LIST) {
return getJsonArrayFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
} else {
return getJsonObjectFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
}
}
示例7: writeChar
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
@Test
public void writeChar() throws IOException {
List<Object> values = new ArrayList<>();
values.add("hello");
values.add(new HiveChar("world", 1));
values.add(null);
write(TypeInfoFactory.getCharTypeInfo(1), values);
try (OrcReader reader = getOrcReader()) {
assertThat(reader.hasNext(), is(true));
assertThat(((HiveChar) reader.next().get(0)).getValue(), is("h"));
assertThat(reader.hasNext(), is(true));
assertThat(((HiveChar) reader.next().get(0)).getValue(), is("w"));
assertThat(reader.hasNext(), is(true));
assertThat(reader.next().get(0), is(nullValue()));
assertThat(reader.hasNext(), is(false));
}
}
示例8: writeMapCharString
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
@Test
public void writeMapCharString() throws IOException {
Map<Object, Object> map = new HashMap<>();
map.put("hello", "world");
map.put("hi", "world");
List<Object> values = new ArrayList<>();
values.add(map);
values.add(null);
write(TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getCharTypeInfo(1), TypeInfoFactory.stringTypeInfo), values);
Map<Object, Object> expected = new HashMap<>();
expected.put(new HiveChar("h", 1), "world");
try (OrcReader reader = getOrcReader()) {
assertThat(reader.hasNext(), is(true));
assertThat(reader.next().get(0), is((Object) expected));
assertThat(reader.hasNext(), is(true));
assertThat(reader.next().get(0), is(nullValue()));
assertThat(reader.hasNext(), is(false));
}
}
示例9: readMapCharString
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
@Test
public void readMapCharString() throws IOException {
TypeInfo typeInfo = TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getCharTypeInfo(1),
TypeInfoFactory.stringTypeInfo);
Map<Object, Object> map = new HashMap<>();
map.put(new HiveChar("h", 1), "world");
try (OrcWriter writer = getOrcWriter(typeInfo)) {
writer.addRow(map);
writer.addRow((Object) null);
}
Map<Object, Object> expected = new HashMap<>();
expected.put("h", "world");
List<Tuple> list = read(typeInfo);
assertThat(list.size(), is(2));
assertThat(list.get(0).getObject(0), is((Object) expected));
assertThat(list.get(1).getObject(0), is(nullValue()));
}
示例10: readCharPredicatePushdown
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
@Test
public void readCharPredicatePushdown() throws IOException {
TypeInfo typeInfo = TypeInfoFactory.getCharTypeInfo(3);
try (OrcWriter writer = getOrcWriter(typeInfo)) {
writer.addRow(new HiveChar("foo", 3));
writer.addRow(new HiveChar("bar", 3));
}
StructTypeInfo structTypeInfo = new StructTypeInfoBuilder().add("a", typeInfo).build();
SearchArgument searchArgument = SearchArgumentFactory
.newBuilder()
.startAnd()
.equals("a", new HiveChar("foo", 5))
.end()
.build();
OrcFile orcFile = OrcFile.source().columns(structTypeInfo).schemaFromFile().searchArgument(searchArgument).build();
Tap<?, ?, ?> tap = new Hfs(orcFile, path);
List<Tuple> list = Plunger.readDataFromTap(tap).asTupleList();
assertThat(list.size(), is(1));
assertThat(list.get(0).getObject(0), is((Object) "foo"));
}
示例11: convertStringTypes
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
HCatFieldSchema.Type hfsType = hfs.getType();
if (hfsType == HCatFieldSchema.Type.STRING
|| hfsType == HCatFieldSchema.Type.VARCHAR
|| hfsType == HCatFieldSchema.Type.CHAR) {
String str = val.toString();
if (doHiveDelimsReplacement) {
str = FieldFormatter.hiveStringReplaceDelims(str,
hiveDelimsReplacement, hiveDelimiters);
}
if (hfsType == HCatFieldSchema.Type.STRING) {
return str;
} else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
return hvc;
} else if (hfsType == HCatFieldSchema.Type.CHAR) {
CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
HiveChar hc = new HiveChar(val.toString(), cti.getLength());
return hc;
}
} else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
HiveDecimal hd = HiveDecimal.create(bd);
return hd;
}
return null;
}
示例12: convertBooleanTypes
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
private Object convertBooleanTypes(Object val, HCatFieldSchema hfs) {
HCatFieldSchema.Type hfsType = hfs.getType();
Boolean b = (Boolean) val;
if (hfsType == HCatFieldSchema.Type.BOOLEAN) {
return b;
} else if (hfsType == HCatFieldSchema.Type.TINYINT) {
return (byte) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.SMALLINT) {
return (short) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.INT) {
return (int) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.BIGINT) {
return (long) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.FLOAT) {
return (float) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.DOUBLE) {
return (double) (b ? 1 : 0);
} else if (hfsType == HCatFieldSchema.Type.STRING) {
return val.toString();
} else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
HiveVarchar hvc = new HiveVarchar(val.toString(), vti.getLength());
return hvc;
} else if (hfsType == HCatFieldSchema.Type.CHAR) {
CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
HiveChar hChar = new HiveChar(val.toString(), cti.getLength());
return hChar;
}
return null;
}
示例13: extractValue
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
@Override
public String extractValue(final Object data, final ObjectInspector objectInspector)
throws HiveException {
final Object value = inputObjectInspector.getPrimitiveJavaObject(data);
if (value instanceof String) {
return (String) value;
} else if (value instanceof HiveChar) {
return ((HiveChar) value).getValue();
} else if (value instanceof HiveVarchar) {
return ((HiveVarchar) value).getValue();
} else {
throw new UDFArgumentTypeException(0, "unsupported type " + value.getClass().getName());
}
}
示例14: toComparable
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
static Comparable<?> toComparable(PrimitiveCategory category, Object literal) {
String stringLiteral;
switch (category) {
case STRING:
return new Text((String) literal);
case BOOLEAN:
return new BooleanWritable((Boolean) literal);
case BYTE:
return new ByteWritable(((Long) literal).byteValue());
case SHORT:
return new ShortWritable(((Long) literal).shortValue());
case INT:
return new IntWritable(((Long) literal).intValue());
case LONG:
return new LongWritable((Long) literal);
case FLOAT:
return new FloatWritable(((Double) literal).floatValue());
case DOUBLE:
return new DoubleWritable((Double) literal);
case TIMESTAMP:
return new TimestampWritable((Timestamp) literal);
case DATE:
return (DateWritable) literal;
case CHAR:
stringLiteral = (String) literal;
return new HiveCharWritable(new HiveChar(stringLiteral, stringLiteral.length()));
case VARCHAR:
stringLiteral = (String) literal;
return new HiveVarcharWritable(new HiveVarchar(stringLiteral, stringLiteral.length()));
case DECIMAL:
return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
default:
throw new IllegalArgumentException("Unsupported category: " + category);
}
}
示例15: toJava
import org.apache.hadoop.hive.common.type.HiveChar; //导入依赖的package包/类
@Test
public void toJava() throws UnexpectedTypeException {
StructTypeInfo nested = new StructTypeInfoBuilder().add("char1", TypeInfoFactory.getCharTypeInfo(1)).build();
TypeInfo typeInfo = new StructTypeInfoBuilder()
.add("char1", TypeInfoFactory.getCharTypeInfo(1))
.add("struct_char1", nested)
.build();
SettableStructObjectInspector inspector = (SettableStructObjectInspector) OrcStruct.createObjectInspector(typeInfo);
Object struct = inspector.create();
inspector.setStructFieldData(struct, inspector.getStructFieldRef("char1"),
new HiveCharWritable(new HiveChar("a", -1)));
SettableStructObjectInspector nestedInspector = (SettableStructObjectInspector) OrcStruct
.createObjectInspector(nested);
Object nestedStruct = inspector.create();
nestedInspector.setStructFieldData(nestedStruct, nestedInspector.getStructFieldRef("char1"),
new HiveCharWritable(new HiveChar("b", -1)));
inspector.setStructFieldData(struct, inspector.getStructFieldRef("struct_char1"), nestedStruct);
List<Object> list = new ArrayList<>();
list.add(new HiveChar("a", -1));
list.add(Arrays.asList(new HiveChar("b", -1)));
Converter converter = factory.newConverter(inspector);
Object convertedList = converter.toJavaObject(struct);
assertThat(convertedList, is((Object) list));
Object convertedStruct = converter.toWritableObject(list);
assertThat(convertedStruct, is(struct));
}