当前位置: 首页>>代码示例>>Java>>正文


Java HiveException类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.ql.metadata.HiveException的典型用法代码示例。如果您正苦于以下问题:Java HiveException类的具体用法?Java HiveException怎么用?Java HiveException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


HiveException类属于org.apache.hadoop.hive.ql.metadata包,在下文中一共展示了HiveException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: constructAvroTable

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner)
    throws HiveMetaStoreException {
  Table table = newTable(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(avroSerde);
  try {
    table.setInputFormatClass(avroInputFormat);
    table.setOutputFormatClass(avroOutputFormat);
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString());
  return table;
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:21,代码来源:AvroHiveUtil.java

示例2: constructParquetTable

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
  Table table = newTable(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(getHiveParquetSerde());
  try {
    table.setInputFormatClass(getHiveParquetInputFormat());
    table.setOutputFormatClass(getHiveParquetOutputFormat());
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  // convert copycat schema schema to Hive columns
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  return table;
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:20,代码来源:ParquetHiveUtil.java

示例3: process

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public void process(Object[] record) throws HiveException {
    final String document = (String) stringOI.getPrimitiveJavaObject(record[0]);

    if (document == null) {
        return;
    }

    String[] tokens = document.split(",");
    String[] results = tokens[1].split(" ");

    try {
        hTable = new HTable(conf, "bi");
        Get get = new Get(Bytes.toBytes(tokens[0]));
        result = hTable.exists(get);
    } catch (Exception e) {
        e.printStackTrace();
    }

    if (!result) {
        for (String r : results) {
            forward(new Object[]{tokens[0], r});
        }
    }
}
 
开发者ID:Transwarp-DE,项目名称:Transwarp-Sample-Code,代码行数:26,代码来源:udtfCheck.java

示例4: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  int dayOfWeekInt;
  if (isDayOfWeekConst) {
    dayOfWeekInt = dayOfWeekIntConst;
  } else {
    String dayOfWeek = getStringValue(arguments, 1, converters);
    dayOfWeekInt = getIntDayOfWeek(dayOfWeek);
  }
  if (dayOfWeekInt == -1) {
    return null;
  }

  Date date = getDateValue(arguments, 0, inputTypes, converters);
  if (date == null) {
    return null;
  }

  nextDay(date, dayOfWeekInt);
  Date newDate = calendar.getTime();
  output.set(BackportUtils.getDateFormat().format(newDate));
  return output;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:24,代码来源:GenericUDFNextDay.java

示例5: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (formatter == null) {
    return null;
  }
  // the function should support both short date and full timestamp format
  // time part of the timestamp should not be skipped
  Date date = getTimestampValue(arguments, 0, tsConverters);
  if (date == null) {
    date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
    if (date == null) {
      return null;
    }
  }

  String res = formatter.format(date);
  if (res == null) {
    return null;
  }
  output.set(res);
  return output;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:23,代码来源:GenericUDFDateFormat.java

示例6: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (digest == null) {
    return null;
  }

  digest.reset();
  if (isStr) {
    Text n = getTextValue(arguments, 0, converters);
    if (n == null) {
      return null;
    }
    digest.update(n.getBytes(), 0, n.getLength());
  } else {
    BytesWritable bWr = getBinaryValue(arguments, 0, converters);
    if (bWr == null) {
      return null;
    }
    digest.update(bWr.getBytes(), 0, bWr.getLength());
  }
  byte[] resBin = digest.digest();
  String resStr = Hex.encodeHexString(resBin);

  output.set(resStr);
  return output;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:27,代码来源:GenericUDFSha2.java

示例7: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  Integer numMonthV;
  if (isNumMonthsConst) {
    numMonthV = numMonthsConst;
  } else {
    numMonthV = getIntValue(arguments, 1, converters);
  }

  if (numMonthV == null) {
    return null;
  }

  int numMonthInt = numMonthV.intValue();
  Date date = getDateValue(arguments, 0, inputTypes, converters);
  if (date == null) {
    return null;
  }

  addMonth(date, numMonthInt);
  Date newDate = calendar.getTime();
  output.set(getDateFormat().format(newDate));
  return output;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:25,代码来源:GenericUDFAddMonths.java

示例8: getTransformedWritable

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  Byte value = (Byte)columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    Byte transformedValue = transformer.transform(value);

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:17,代码来源:BaseMaskUDF.java

示例9: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  Object obj0;
  if ((obj0 = arguments[0].get()) == null) {
    return null;
  }

  String str0 = textConverters[0].convert(obj0).toString();
  String soundexCode;
  try {
    soundexCode = soundex.soundex(str0);
  } catch (IllegalArgumentException e) {
    return null;
  }
  output.set(soundexCode);
  return output;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:18,代码来源:GenericUDFSoundex.java

示例10: testSingleColumn2

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testSingleColumn2() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
			toConstantOI("struct<name:string,offset:int>"),
	});
	assertEquals("struct<col1:struct<name:string,offset:int>>", oi.getTypeName());

	final List<Object> results = evaluate(sut, toObject(TEST_JSON));
	assertEquals(3, results.size());

	final HivePath namePath = new HivePath(oi, ".col1.name");
	final HivePath offsetPath = new HivePath(oi, ".col1.offset");

	assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
	assertEquals(540, offsetPath.extract(results.get(0)).asInt());

	assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
	assertEquals(480, offsetPath.extract(results.get(1)).asInt());

	assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
	assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:26,代码来源:JsonQueryUDTFTest.java

示例11: testMultiColumn

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testMultiColumn() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
			toConstantOI("name:string"),
			toConstantOI("offset:int"),
	});
	assertEquals("struct<name:string,offset:int>", oi.getTypeName());

	final List<Object> results = evaluate(sut, toObject(TEST_JSON));
	assertEquals(3, results.size());

	final HivePath namePath = new HivePath(oi, ".name");
	final HivePath offsetPath = new HivePath(oi, ".offset");

	assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
	assertEquals(540, offsetPath.extract(results.get(0)).asInt());

	assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
	assertEquals(480, offsetPath.extract(results.get(1)).asInt());

	assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
	assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:27,代码来源:JsonQueryUDTFTest.java

示例12: testAbortOnError

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testAbortOnError() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	@SuppressWarnings("unused")
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("if $error then error($error.message) else . end"),
			toConstantOI("string"),
	});

	try {
		evaluate(sut, toObject("\"corrupt \"string"));
		fail("should fail");
	} catch (final HiveException e) {
		assertTrue(e.getMessage().contains("Unrecognized token 'string'"));
	}
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:18,代码来源:JsonQueryUDTFTest.java

示例13: testMoreOnStringOutputConversions

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testMoreOnStringOutputConversions() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("{foo: {a: 1}, bar: null, baz: \"baz\"}"),
			toConstantOI("foo:string"),
			toConstantOI("bar:string"),
			toConstantOI("baz:string"),
	});

	final List<Object> results = evaluate(sut, toObject("null"));
	assertEquals(1, results.size());

	final Object obj = results.get(0);
	assertEquals("{\"a\":1}", new HivePath(oi, ".foo").extract(obj).asString());
	assertTrue(new HivePath(oi, ".bar").extract(obj).isNull());
	assertEquals("baz", new HivePath(oi, ".baz").extract(obj).asString());
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:21,代码来源:JsonQueryUDTFTest.java

示例14: testNullInputs

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testNullInputs() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("."),
			toConstantOI("string"),
	});

	final List<Object> results = evaluate(sut, toObject("null"), null, toObject(null));
	assertEquals(3, results.size());

	assertTrue(new HivePath(oi, ".col1").extract(results.get(0)).isNull());
	assertTrue(new HivePath(oi, ".col1").extract(results.get(1)).isNull());
	assertTrue(new HivePath(oi, ".col1").extract(results.get(2)).isNull());
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:18,代码来源:JsonQueryUDTFTest.java

示例15: testMissingFieldsInConversions

import org.apache.hadoop.hive.ql.metadata.HiveException; //导入依赖的package包/类
@Test
public void testMissingFieldsInConversions() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("{foo: 10}"),
			toConstantOI("foo:int"),
			toConstantOI("bar:int"),
	});

	final List<Object> results = evaluate(sut, toObject(null));
	assertEquals(1, results.size());

	assertEquals(10, new HivePath(oi, ".foo").extract(results.get(0)).asInt());
	assertTrue(new HivePath(oi, ".bar").extract(results.get(0)).isNull());
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:18,代码来源:JsonQueryUDTFTest.java


注:本文中的org.apache.hadoop.hive.ql.metadata.HiveException类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。