本文整理汇总了Java中org.apache.hadoop.hive.ql.metadata.HiveException类的典型用法代码示例。如果您正苦于以下问题:Java HiveException类的具体用法?Java HiveException怎么用?Java HiveException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
HiveException类属于org.apache.hadoop.hive.ql.metadata包,在下文中一共展示了HiveException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: constructAvroTable
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner)
throws HiveMetaStoreException {
Table table = newTable(database, tableName);
table.setTableType(TableType.EXTERNAL_TABLE);
table.getParameters().put("EXTERNAL", "TRUE");
String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
table.setDataLocation(new Path(tablePath));
table.setSerializationLib(avroSerde);
try {
table.setInputFormatClass(avroInputFormat);
table.setOutputFormatClass(avroOutputFormat);
} catch (HiveException e) {
throw new HiveMetaStoreException("Cannot find input/output format:", e);
}
List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
table.setFields(columns);
table.setPartCols(partitioner.partitionFields());
table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString());
return table;
}
示例2: constructParquetTable
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
Table table = newTable(database, tableName);
table.setTableType(TableType.EXTERNAL_TABLE);
table.getParameters().put("EXTERNAL", "TRUE");
String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
table.setDataLocation(new Path(tablePath));
table.setSerializationLib(getHiveParquetSerde());
try {
table.setInputFormatClass(getHiveParquetInputFormat());
table.setOutputFormatClass(getHiveParquetOutputFormat());
} catch (HiveException e) {
throw new HiveMetaStoreException("Cannot find input/output format:", e);
}
// convert copycat schema schema to Hive columns
List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
table.setFields(columns);
table.setPartCols(partitioner.partitionFields());
return table;
}
示例3: process
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public void process(Object[] record) throws HiveException {
final String document = (String) stringOI.getPrimitiveJavaObject(record[0]);
if (document == null) {
return;
}
String[] tokens = document.split(",");
String[] results = tokens[1].split(" ");
try {
hTable = new HTable(conf, "bi");
Get get = new Get(Bytes.toBytes(tokens[0]));
result = hTable.exists(get);
} catch (Exception e) {
e.printStackTrace();
}
if (!result) {
for (String r : results) {
forward(new Object[]{tokens[0], r});
}
}
}
示例4: evaluate
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
int dayOfWeekInt;
if (isDayOfWeekConst) {
dayOfWeekInt = dayOfWeekIntConst;
} else {
String dayOfWeek = getStringValue(arguments, 1, converters);
dayOfWeekInt = getIntDayOfWeek(dayOfWeek);
}
if (dayOfWeekInt == -1) {
return null;
}
Date date = getDateValue(arguments, 0, inputTypes, converters);
if (date == null) {
return null;
}
nextDay(date, dayOfWeekInt);
Date newDate = calendar.getTime();
output.set(BackportUtils.getDateFormat().format(newDate));
return output;
}
示例5: evaluate
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (formatter == null) {
return null;
}
// the function should support both short date and full timestamp format
// time part of the timestamp should not be skipped
Date date = getTimestampValue(arguments, 0, tsConverters);
if (date == null) {
date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
if (date == null) {
return null;
}
}
String res = formatter.format(date);
if (res == null) {
return null;
}
output.set(res);
return output;
}
示例6: evaluate
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (digest == null) {
return null;
}
digest.reset();
if (isStr) {
Text n = getTextValue(arguments, 0, converters);
if (n == null) {
return null;
}
digest.update(n.getBytes(), 0, n.getLength());
} else {
BytesWritable bWr = getBinaryValue(arguments, 0, converters);
if (bWr == null) {
return null;
}
digest.update(bWr.getBytes(), 0, bWr.getLength());
}
byte[] resBin = digest.digest();
String resStr = Hex.encodeHexString(resBin);
output.set(resStr);
return output;
}
示例7: evaluate
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Integer numMonthV;
if (isNumMonthsConst) {
numMonthV = numMonthsConst;
} else {
numMonthV = getIntValue(arguments, 1, converters);
}
if (numMonthV == null) {
return null;
}
int numMonthInt = numMonthV.intValue();
Date date = getDateValue(arguments, 0, inputTypes, converters);
if (date == null) {
return null;
}
addMonth(date, numMonthInt);
Date newDate = calendar.getTime();
output.set(getDateFormat().format(newDate));
return output;
}
示例8: getTransformedWritable
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
Byte value = (Byte)columnType.getPrimitiveJavaObject(object.get());
if(value != null) {
Byte transformedValue = transformer.transform(value);
if(transformedValue != null) {
writable.set(transformedValue);
return writable;
}
}
return null;
}
示例9: evaluate
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object obj0;
if ((obj0 = arguments[0].get()) == null) {
return null;
}
String str0 = textConverters[0].convert(obj0).toString();
String soundexCode;
try {
soundexCode = soundex.soundex(str0);
} catch (IllegalArgumentException e) {
return null;
}
output.set(soundexCode);
return output;
}
示例10: testSingleColumn2
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testSingleColumn2() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
toConstantOI("struct<name:string,offset:int>"),
});
assertEquals("struct<col1:struct<name:string,offset:int>>", oi.getTypeName());
final List<Object> results = evaluate(sut, toObject(TEST_JSON));
assertEquals(3, results.size());
final HivePath namePath = new HivePath(oi, ".col1.name");
final HivePath offsetPath = new HivePath(oi, ".col1.offset");
assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
assertEquals(540, offsetPath.extract(results.get(0)).asInt());
assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
assertEquals(480, offsetPath.extract(results.get(1)).asInt());
assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
示例11: testMultiColumn
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testMultiColumn() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
toConstantOI("name:string"),
toConstantOI("offset:int"),
});
assertEquals("struct<name:string,offset:int>", oi.getTypeName());
final List<Object> results = evaluate(sut, toObject(TEST_JSON));
assertEquals(3, results.size());
final HivePath namePath = new HivePath(oi, ".name");
final HivePath offsetPath = new HivePath(oi, ".offset");
assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
assertEquals(540, offsetPath.extract(results.get(0)).asInt());
assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
assertEquals(480, offsetPath.extract(results.get(1)).asInt());
assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
示例12: testAbortOnError
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testAbortOnError() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
@SuppressWarnings("unused")
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("if $error then error($error.message) else . end"),
toConstantOI("string"),
});
try {
evaluate(sut, toObject("\"corrupt \"string"));
fail("should fail");
} catch (final HiveException e) {
assertTrue(e.getMessage().contains("Unrecognized token 'string'"));
}
}
示例13: testMoreOnStringOutputConversions
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testMoreOnStringOutputConversions() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("{foo: {a: 1}, bar: null, baz: \"baz\"}"),
toConstantOI("foo:string"),
toConstantOI("bar:string"),
toConstantOI("baz:string"),
});
final List<Object> results = evaluate(sut, toObject("null"));
assertEquals(1, results.size());
final Object obj = results.get(0);
assertEquals("{\"a\":1}", new HivePath(oi, ".foo").extract(obj).asString());
assertTrue(new HivePath(oi, ".bar").extract(obj).isNull());
assertEquals("baz", new HivePath(oi, ".baz").extract(obj).asString());
}
示例14: testNullInputs
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testNullInputs() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("."),
toConstantOI("string"),
});
final List<Object> results = evaluate(sut, toObject("null"), null, toObject(null));
assertEquals(3, results.size());
assertTrue(new HivePath(oi, ".col1").extract(results.get(0)).isNull());
assertTrue(new HivePath(oi, ".col1").extract(results.get(1)).isNull());
assertTrue(new HivePath(oi, ".col1").extract(results.get(2)).isNull());
}
示例15: testMissingFieldsInConversions
import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testMissingFieldsInConversions() throws HiveException {
final JsonQueryUDTF sut = new JsonQueryUDTF();
final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
toConstantOI("{foo: 10}"),
toConstantOI("foo:int"),
toConstantOI("bar:int"),
});
final List<Object> results = evaluate(sut, toObject(null));
assertEquals(1, results.size());
assertEquals(10, new HivePath(oi, ".foo").extract(results.get(0)).asInt());
assertTrue(new HivePath(oi, ".bar").extract(results.get(0)).isNull());
}