本文整理汇总了Java中org.apache.hadoop.hive.common.type.HiveVarchar类的典型用法代码示例。如果您正苦于以下问题:Java HiveVarchar类的具体用法?Java HiveVarchar怎么用?Java HiveVarchar使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
HiveVarchar类属于org.apache.hadoop.hive.common.type包,在下文中一共展示了HiveVarchar类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getTransformedWritable
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
HiveVarchar value = columnType.getPrimitiveJavaObject(object.get());
if(value != null) {
String transformedValue = transformer.transform(value.getValue());
if(transformedValue != null) {
writable.set(transformedValue);
return writable;
}
}
return null;
}
示例2: convertClobType
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
private Object convertClobType(Object val, HCatFieldSchema hfs) {
HCatFieldSchema.Type hfsType = hfs.getType();
ClobRef cr = (ClobRef) val;
String s = cr.isExternal() ? cr.toString() : cr.getData();
if (hfsType == HCatFieldSchema.Type.STRING) {
return s;
} else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
HiveVarchar hvc = new HiveVarchar(s, vti.getLength());
return hvc;
} else if (hfsType == HCatFieldSchema.Type.CHAR) {
CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
HiveChar hc = new HiveChar(s, cti.getLength());
return hc;
}
return null;
}
示例3: testStringTypes
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testStringTypes() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"char(14)", Types.CHAR, HCatFieldSchema.Type.STRING, 0, 0,
"string to test", "string to test", KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"char(14)", Types.CHAR, HCatFieldSchema.Type.CHAR, 14, 0,
new HiveChar("string to test", 14), "string to test",
KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
"char(14)", Types.CHAR, HCatFieldSchema.Type.VARCHAR, 14, 0,
new HiveVarchar("string to test", 14), "string to test",
KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
"longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
"string to test", "string to test", KeyType.NOT_A_KEY),
};
List<String> addlArgsArray = new ArrayList<String>();
setExtraArgs(addlArgsArray);
runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
示例4: testTableCreation
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testTableCreation() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
new HiveVarchar("1", 20), "1", KeyType.STATIC_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
new HiveVarchar("2", 20), "2", KeyType.DYNAMIC_KEY),
};
List<String> addlArgsArray = new ArrayList<String>();
addlArgsArray.add("--create-hcatalog-table");
setExtraArgs(addlArgsArray);
runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols,
null, true, false);
}
示例5: testTableCreationWithPartition
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testTableCreationWithPartition() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
};
List<String> addlArgsArray = new ArrayList<String>();
addlArgsArray.add("--hive-partition-key");
addlArgsArray.add("col1");
addlArgsArray.add("--hive-partition-value");
addlArgsArray.add("2");
addlArgsArray.add("--create-hcatalog-table");
setExtraArgs(addlArgsArray);
runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
示例6: testTableCreationWithMultipleStaticPartKeys
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testTableCreationWithMultipleStaticPartKeys() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
new HiveVarchar("1", 20), "1", KeyType.STATIC_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
};
List<String> addlArgsArray = new ArrayList<String>();
addlArgsArray.add("--hcatalog-partition-keys");
addlArgsArray.add("col0,col1");
addlArgsArray.add("--hcatalog-partition-values");
addlArgsArray.add("1,2");
addlArgsArray.add("--create-hcatalog-table");
setExtraArgs(addlArgsArray);
runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
示例7: testTableCreationWithStorageStanza
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testTableCreationWithStorageStanza() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
};
List<String> addlArgsArray = new ArrayList<String>();
addlArgsArray.add("--hive-partition-key");
addlArgsArray.add("col1");
addlArgsArray.add("--hive-partition-value");
addlArgsArray.add("2");
addlArgsArray.add("--create-hcatalog-table");
addlArgsArray.add("--hcatalog-storage-stanza");
addlArgsArray.add(HCatalogTestUtils.STORED_AS_TEXT);
setExtraArgs(addlArgsArray);
runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
示例8: testCreateTableWithPreExistingTable
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testCreateTableWithPreExistingTable() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
new HiveVarchar("2", 20), "2", KeyType.DYNAMIC_KEY), };
List<String> addlArgsArray = new ArrayList<String>();
addlArgsArray.add("--create-hcatalog-table");
setExtraArgs(addlArgsArray);
try {
// Precreate table
utils.createHCatTable(CreateMode.CREATE, TOTAL_RECORDS, table, cols);
runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols,
null, true, false);
fail("HCatalog job with --create-hcatalog-table and pre-existing"
+ " table should fail");
} catch (Exception e) {
LOG.debug("Caught expected exception while running "
+ " create-hcatalog-table with pre-existing table test", e);
}
}
示例9: testStringTypes
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testStringTypes() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
ColumnGenerator[] cols = new ColumnGenerator[] {
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
"char(14)", Types.CHAR, HCatFieldSchema.Type.STRING, 0, 0,
"string to test", "string to test", KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
"char(14)", Types.CHAR, HCatFieldSchema.Type.CHAR, 14, 0,
new HiveChar("string to test", 14), "string to test",
KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
"char(14)", Types.CHAR, HCatFieldSchema.Type.VARCHAR, 14, 0,
new HiveVarchar("string to test", 14), "string to test",
KeyType.NOT_A_KEY),
HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
"longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
"string to test", "string to test", KeyType.NOT_A_KEY),
};
List<String> addlArgsArray = new ArrayList<String>();
runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
}
示例10: testStringColumnsMetadata
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
@Test
public void testStringColumnsMetadata() throws Exception {
String query = "select varchar_field, char_field, string_field from hive.readtest";
Map<String, Integer> expectedResult = Maps.newHashMap();
expectedResult.put("varchar_field", 50);
expectedResult.put("char_field", 10);
expectedResult.put("string_field", HiveVarchar.MAX_VARCHAR_LENGTH);
verifyColumnsMetadata(client.createPreparedStatement(query).get()
.getPreparedStatement().getColumnsList(), expectedResult);
try {
test("alter session set `%s` = true", ExecConstants.EARLY_LIMIT0_OPT_KEY);
verifyColumnsMetadata(client.createPreparedStatement(String.format("select * from (%s) t limit 0", query)).get()
.getPreparedStatement().getColumnsList(), expectedResult);
} finally {
test("alter session reset `%s`", ExecConstants.EARLY_LIMIT0_OPT_KEY);
}
}
示例11: getJavaObjectFromPrimitiveData
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
private static Object getJavaObjectFromPrimitiveData(Object data, ObjectInspector objInsp) {
assert(objInsp.getCategory() == Category.PRIMITIVE);
if (data == null) {
return null;
}
if (data instanceof BytesWritable && objInsp instanceof WritableHiveDecimalObjectInspector) {
// BytesWritable cannot be directly cast to HiveDecimalWritable
WritableHiveDecimalObjectInspector oi = (WritableHiveDecimalObjectInspector) objInsp;
data = oi.create(((BytesWritable) data).getBytes(), oi.scale());
}
Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
if (obj instanceof HiveDecimal) {
obj = ((HiveDecimal) obj).bigDecimalValue();
} else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
obj = obj.toString();
} else if (obj instanceof byte[]) {
obj = Hex.encodeHexString((byte[]) obj);
}
return obj;
}
示例12: getJavaObjectFromFieldData
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
private static Object getJavaObjectFromFieldData(Object data, ObjectInspector objInsp) {
if (data == null) {
return null;
}
if (objInsp.getCategory() == Category.PRIMITIVE) {
Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
if (obj instanceof HiveDecimal) {
obj = ((HiveDecimal) obj).bigDecimalValue();
} else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
obj = obj.toString();
} else if (obj instanceof byte[]) {
obj = Hex.encodeHexString((byte[]) obj);
}
return obj;
} else if (objInsp.getCategory() == Category.LIST) {
return getJsonArrayFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
} else {
return getJsonObjectFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
}
}
示例13: writeVarchar
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
@Test
public void writeVarchar() throws IOException {
List<Object> values = new ArrayList<>();
values.add("hello");
values.add(new HiveVarchar("world", 1));
values.add(null);
write(TypeInfoFactory.getVarcharTypeInfo(1), values);
try (OrcReader reader = getOrcReader()) {
assertThat(reader.hasNext(), is(true));
assertThat(((HiveVarchar) reader.next().get(0)).getValue(), is("h"));
assertThat(reader.hasNext(), is(true));
assertThat(((HiveVarchar) reader.next().get(0)).getValue(), is("w"));
assertThat(reader.hasNext(), is(true));
assertThat(reader.next().get(0), is(nullValue()));
assertThat(reader.hasNext(), is(false));
}
}
示例14: getPrimitiveJavaObject
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
@Override
public HiveVarchar getPrimitiveJavaObject(Object o) {
<#if mode == "Optional">
if (o == null) {
return null;
}
final NullableVarCharHolder h = (NullableVarCharHolder)o;
<#else>
final VarCharHolder h = (VarCharHolder)o;
</#if>
final String s = StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer);
return new HiveVarchar(s, HiveVarchar.MAX_VARCHAR_LENGTH);
}
示例15: convertStringTypes
import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
HCatFieldSchema.Type hfsType = hfs.getType();
if (hfsType == HCatFieldSchema.Type.STRING
|| hfsType == HCatFieldSchema.Type.VARCHAR
|| hfsType == HCatFieldSchema.Type.CHAR) {
String str = val.toString();
if (doHiveDelimsReplacement) {
str = FieldFormatter.hiveStringReplaceDelims(str,
hiveDelimsReplacement, hiveDelimiters);
}
if (hfsType == HCatFieldSchema.Type.STRING) {
return str;
} else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
return hvc;
} else if (hfsType == HCatFieldSchema.Type.CHAR) {
CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
HiveChar hc = new HiveChar(val.toString(), cti.getLength());
return hc;
}
} else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
HiveDecimal hd = HiveDecimal.create(bd);
return hd;
}
return null;
}