當前位置: 首頁>>代碼示例>>Java>>正文


Java HiveException類代碼示例

本文整理匯總了Java中org.apache.hadoop.hive.ql.metadata.HiveException的典型用法代碼示例。如果您正苦於以下問題:Java HiveException類的具體用法?Java HiveException怎麽用?Java HiveException使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


HiveException類屬於org.apache.hadoop.hive.ql.metadata包,在下文中一共展示了HiveException類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: constructAvroTable

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner)
    throws HiveMetaStoreException {
  Table table = newTable(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(avroSerde);
  try {
    table.setInputFormatClass(avroInputFormat);
    table.setOutputFormatClass(avroOutputFormat);
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString());
  return table;
}
 
開發者ID:jiangxiluning,項目名稱:kafka-connect-hdfs,代碼行數:21,代碼來源:AvroHiveUtil.java

示例2: constructParquetTable

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
  Table table = newTable(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(getHiveParquetSerde());
  try {
    table.setInputFormatClass(getHiveParquetInputFormat());
    table.setOutputFormatClass(getHiveParquetOutputFormat());
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  // convert copycat schema schema to Hive columns
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  return table;
}
 
開發者ID:jiangxiluning,項目名稱:kafka-connect-hdfs,代碼行數:20,代碼來源:ParquetHiveUtil.java

示例3: process

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Override
public void process(Object[] record) throws HiveException {
    final String document = (String) stringOI.getPrimitiveJavaObject(record[0]);

    if (document == null) {
        return;
    }

    String[] tokens = document.split(",");
    String[] results = tokens[1].split(" ");

    try {
        hTable = new HTable(conf, "bi");
        Get get = new Get(Bytes.toBytes(tokens[0]));
        result = hTable.exists(get);
    } catch (Exception e) {
        e.printStackTrace();
    }

    if (!result) {
        for (String r : results) {
            forward(new Object[]{tokens[0], r});
        }
    }
}
 
開發者ID:Transwarp-DE,項目名稱:Transwarp-Sample-Code,代碼行數:26,代碼來源:udtfCheck.java

示例4: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  int dayOfWeekInt;
  if (isDayOfWeekConst) {
    dayOfWeekInt = dayOfWeekIntConst;
  } else {
    String dayOfWeek = getStringValue(arguments, 1, converters);
    dayOfWeekInt = getIntDayOfWeek(dayOfWeek);
  }
  if (dayOfWeekInt == -1) {
    return null;
  }

  Date date = getDateValue(arguments, 0, inputTypes, converters);
  if (date == null) {
    return null;
  }

  nextDay(date, dayOfWeekInt);
  Date newDate = calendar.getTime();
  output.set(BackportUtils.getDateFormat().format(newDate));
  return output;
}
 
開發者ID:myui,項目名稱:hive-udf-backports,代碼行數:24,代碼來源:GenericUDFNextDay.java

示例5: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (formatter == null) {
    return null;
  }
  // the function should support both short date and full timestamp format
  // time part of the timestamp should not be skipped
  Date date = getTimestampValue(arguments, 0, tsConverters);
  if (date == null) {
    date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
    if (date == null) {
      return null;
    }
  }

  String res = formatter.format(date);
  if (res == null) {
    return null;
  }
  output.set(res);
  return output;
}
 
開發者ID:myui,項目名稱:hive-udf-backports,代碼行數:23,代碼來源:GenericUDFDateFormat.java

示例6: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (digest == null) {
    return null;
  }

  digest.reset();
  if (isStr) {
    Text n = getTextValue(arguments, 0, converters);
    if (n == null) {
      return null;
    }
    digest.update(n.getBytes(), 0, n.getLength());
  } else {
    BytesWritable bWr = getBinaryValue(arguments, 0, converters);
    if (bWr == null) {
      return null;
    }
    digest.update(bWr.getBytes(), 0, bWr.getLength());
  }
  byte[] resBin = digest.digest();
  String resStr = Hex.encodeHexString(resBin);

  output.set(resStr);
  return output;
}
 
開發者ID:myui,項目名稱:hive-udf-backports,代碼行數:27,代碼來源:GenericUDFSha2.java

示例7: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  Integer numMonthV;
  if (isNumMonthsConst) {
    numMonthV = numMonthsConst;
  } else {
    numMonthV = getIntValue(arguments, 1, converters);
  }

  if (numMonthV == null) {
    return null;
  }

  int numMonthInt = numMonthV.intValue();
  Date date = getDateValue(arguments, 0, inputTypes, converters);
  if (date == null) {
    return null;
  }

  addMonth(date, numMonthInt);
  Date newDate = calendar.getTime();
  output.set(getDateFormat().format(newDate));
  return output;
}
 
開發者ID:myui,項目名稱:hive-udf-backports,代碼行數:25,代碼來源:GenericUDFAddMonths.java

示例8: getTransformedWritable

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  Byte value = (Byte)columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    Byte transformedValue = transformer.transform(value);

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
開發者ID:myui,項目名稱:hive-udf-backports,代碼行數:17,代碼來源:BaseMaskUDF.java

示例9: evaluate

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  Object obj0;
  if ((obj0 = arguments[0].get()) == null) {
    return null;
  }

  String str0 = textConverters[0].convert(obj0).toString();
  String soundexCode;
  try {
    soundexCode = soundex.soundex(str0);
  } catch (IllegalArgumentException e) {
    return null;
  }
  output.set(soundexCode);
  return output;
}
 
開發者ID:myui,項目名稱:hive-udf-backports,代碼行數:18,代碼來源:GenericUDFSoundex.java

示例10: testSingleColumn2

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Test
public void testSingleColumn2() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
			toConstantOI("struct<name:string,offset:int>"),
	});
	assertEquals("struct<col1:struct<name:string,offset:int>>", oi.getTypeName());

	final List<Object> results = evaluate(sut, toObject(TEST_JSON));
	assertEquals(3, results.size());

	final HivePath namePath = new HivePath(oi, ".col1.name");
	final HivePath offsetPath = new HivePath(oi, ".col1.offset");

	assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
	assertEquals(540, offsetPath.extract(results.get(0)).asInt());

	assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
	assertEquals(480, offsetPath.extract(results.get(1)).asInt());

	assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
	assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
 
開發者ID:CyberAgent,項目名稱:hive-jq-udtf,代碼行數:26,代碼來源:JsonQueryUDTFTest.java

示例11: testMultiColumn

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Test
public void testMultiColumn() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
			toConstantOI("name:string"),
			toConstantOI("offset:int"),
	});
	assertEquals("struct<name:string,offset:int>", oi.getTypeName());

	final List<Object> results = evaluate(sut, toObject(TEST_JSON));
	assertEquals(3, results.size());

	final HivePath namePath = new HivePath(oi, ".name");
	final HivePath offsetPath = new HivePath(oi, ".offset");

	assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
	assertEquals(540, offsetPath.extract(results.get(0)).asInt());

	assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
	assertEquals(480, offsetPath.extract(results.get(1)).asInt());

	assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
	assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
 
開發者ID:CyberAgent,項目名稱:hive-jq-udtf,代碼行數:27,代碼來源:JsonQueryUDTFTest.java

示例12: testAbortOnError

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Test
public void testAbortOnError() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	@SuppressWarnings("unused")
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("if $error then error($error.message) else . end"),
			toConstantOI("string"),
	});

	try {
		evaluate(sut, toObject("\"corrupt \"string"));
		fail("should fail");
	} catch (final HiveException e) {
		assertTrue(e.getMessage().contains("Unrecognized token 'string'"));
	}
}
 
開發者ID:CyberAgent,項目名稱:hive-jq-udtf,代碼行數:18,代碼來源:JsonQueryUDTFTest.java

示例13: testMoreOnStringOutputConversions

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Test
public void testMoreOnStringOutputConversions() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("{foo: {a: 1}, bar: null, baz: \"baz\"}"),
			toConstantOI("foo:string"),
			toConstantOI("bar:string"),
			toConstantOI("baz:string"),
	});

	final List<Object> results = evaluate(sut, toObject("null"));
	assertEquals(1, results.size());

	final Object obj = results.get(0);
	assertEquals("{\"a\":1}", new HivePath(oi, ".foo").extract(obj).asString());
	assertTrue(new HivePath(oi, ".bar").extract(obj).isNull());
	assertEquals("baz", new HivePath(oi, ".baz").extract(obj).asString());
}
 
開發者ID:CyberAgent,項目名稱:hive-jq-udtf,代碼行數:21,代碼來源:JsonQueryUDTFTest.java

示例14: testNullInputs

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Test
public void testNullInputs() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("."),
			toConstantOI("string"),
	});

	final List<Object> results = evaluate(sut, toObject("null"), null, toObject(null));
	assertEquals(3, results.size());

	assertTrue(new HivePath(oi, ".col1").extract(results.get(0)).isNull());
	assertTrue(new HivePath(oi, ".col1").extract(results.get(1)).isNull());
	assertTrue(new HivePath(oi, ".col1").extract(results.get(2)).isNull());
}
 
開發者ID:CyberAgent,項目名稱:hive-jq-udtf,代碼行數:18,代碼來源:JsonQueryUDTFTest.java

示例15: testMissingFieldsInConversions

import org.apache.hadoop.hive.ql.metadata.HiveException; //導入依賴的package包/類
@Test
public void testMissingFieldsInConversions() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("{foo: 10}"),
			toConstantOI("foo:int"),
			toConstantOI("bar:int"),
	});

	final List<Object> results = evaluate(sut, toObject(null));
	assertEquals(1, results.size());

	assertEquals(10, new HivePath(oi, ".foo").extract(results.get(0)).asInt());
	assertTrue(new HivePath(oi, ".bar").extract(results.get(0)).isNull());
}
 
開發者ID:CyberAgent,項目名稱:hive-jq-udtf,代碼行數:18,代碼來源:JsonQueryUDTFTest.java


注:本文中的org.apache.hadoop.hive.ql.metadata.HiveException類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。