本文整理匯總了Java中org.apache.hadoop.hive.serde2.SerDeException類的典型用法代碼示例。如果您正苦於以下問題:Java SerDeException類的具體用法?Java SerDeException怎麽用?Java SerDeException使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
SerDeException類屬於org.apache.hadoop.hive.serde2包,在下文中一共展示了SerDeException類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createMultiStripeFile
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
private static void createMultiStripeFile(File file)
throws IOException, ReflectiveOperationException, SerDeException
{
FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, OrcTester.Compression.NONE, javaLongObjectInspector);
@SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", javaLongObjectInspector);
Object row = objectInspector.create();
StructField field = objectInspector.getAllStructFieldRefs().get(0);
for (int i = 0; i < 300; i += 3) {
if ((i > 0) && (i % 60 == 0)) {
flushWriter(writer);
}
objectInspector.setStructFieldData(row, field, (long) i);
Writable record = serde.serialize(row, objectInspector);
writer.write(record);
}
writer.close(false);
}
示例2: initialize
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
@Override
public void initialize( final Configuration conf, final Properties table , final Properties part ) throws SerDeException{
LOG.info( table.toString() );
if( part != null ){
LOG.info( part.toString() );
}
String columnNameProperty = table.getProperty(serdeConstants.LIST_COLUMNS);
String columnTypeProperty = table.getProperty(serdeConstants.LIST_COLUMN_TYPES);
String projectionColumnNames = conf.get( ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR , "" );
StructTypeInfo rootType;
if( projectionColumnNames.isEmpty() ){
rootType = getAllReadTypeInfo( columnNameProperty , columnTypeProperty );
}
else{
rootType = getColumnProjectionTypeInfo( columnNameProperty , columnTypeProperty , projectionColumnNames );
}
inspector = MDSObjectInspectorFactory.craeteObjectInspectorFromTypeInfo( rootType );
}
示例3: initialize
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
tableProperties = tbl;
if (LOG.isDebugEnabled()) {
LOG.debug("<<<<<<<<<< serde initialize : " + tbl.getProperty("name"));
}
serdeParams = new LazySerDeParameters(conf, tbl, getClass().getName());
objectInspector = createLazyPhoenixInspector(conf, tbl);
String inOutWork = tbl.getProperty(PhoenixStorageHandlerConstants.IN_OUT_WORK);
if (inOutWork == null) {
return;
}
serializer = new PhoenixSerializer(conf, tbl);
row = new PhoenixRow(Lists.transform(serdeParams.getColumnNames(), new Function<String, String>() {
@Override
public String apply(String input) {
return input.toUpperCase();
}
}));
}
示例4: initialize
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
LOG.debug("Initializing");
// get objectinspector with introspection for class BitcoinBlockStruct to reuse functionality
ethereumBlockObjectInspector = ObjectInspectorFactory
.getReflectionObjectInspector(EthereumBlock.class,
ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
// pass tbl properties to Configuration
String maxBlockSizeStr=tbl.getProperty(EthereumBlockSerde.CONF_MAXBLOCKSIZE);
if (maxBlockSizeStr!=null) {
conf.setInt(EthereumBlockSerde.CONF_MAXBLOCKSIZE, Integer.parseInt(maxBlockSizeStr));
LOG.info("Setting max block size: "+maxBlockSizeStr);
}
String useDirectBufferStr=tbl.getProperty(EthereumBlockSerde.CONF_USEDIRECTBUFFER);
if (useDirectBufferStr!=null) {
conf.setBoolean(EthereumBlockSerde.CONF_USEDIRECTBUFFER, Boolean.parseBoolean(useDirectBufferStr));
LOG.info("Use direct buffer: "+useDirectBufferStr);
}
LOG.debug("Finish initializion");
}
示例5: deserialize
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
@Test
public void deserialize() throws IOException, EthereumBlockReadException, SerDeException{
EthereumBlockSerde testSerde = new EthereumBlockSerde();
// create a BitcoinBlock based on the genesis block test data
ClassLoader classLoader = getClass().getClassLoader();
String fileName="eth1346406.bin";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();
File file = new File(fullFileNameString);
FileInputStream fin = new FileInputStream(file);
boolean direct=false;
EthereumBlockReader ebr = new EthereumBlockReader(fin,EthereumHiveSerdeTest.DEFAULT_MAXSIZE_ETHEREUMBLOCK,EthereumHiveSerdeTest.DEFAULT_BUFFERSIZE,direct );
EthereumBlock block = ebr.readBlock();
Object deserializedObject = testSerde.deserialize(block);
assertTrue( deserializedObject instanceof EthereumBlock,"Deserialized Object is of type EthereumBlock");
EthereumBlock deserializedBitcoinBlockStruct = (EthereumBlock)deserializedObject;
assertEquals( 6, deserializedBitcoinBlockStruct.getEthereumTransactions().size(),"Block contains 6 transactions");
assertEquals(0, deserializedBitcoinBlockStruct.getUncleHeaders().size(),"Block contains 0 uncleHeaders");
byte[] expectedParentHash = new byte[] {(byte)0xBA,(byte)0x6D,(byte)0xD2,(byte)0x60,(byte)0x12,(byte)0xB3,(byte)0x71,(byte)0x90,(byte)0x48,(byte)0xF3,(byte)0x16,(byte)0xC6,(byte)0xED,(byte)0xB3,(byte)0x34,(byte)0x9B,(byte)0xDF,(byte)0xBD,(byte)0x61,(byte)0x31,(byte)0x9F,(byte)0xA9,(byte)0x7C,(byte)0x61,(byte)0x6A,(byte)0x61,(byte)0x31,(byte)0x18,(byte)0xA1,(byte)0xAF,(byte)0x30,(byte)0x67};
assertArrayEquals( expectedParentHash, deserializedBitcoinBlockStruct.getEthereumBlockHeader().getParentHash(),"Block contains a correct 32 byte parent hash");
}
示例6: deserialize
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
@Override
public Object deserialize(Writable writable)
throws SerDeException {
Row row = (Row) writable;
// Since this implementation uses a StructObjectInspector return a list of deserialized values in the same
// order as the original properties.
int i = 0;
for (Map.Entry<String, TypeInfo> column : _columns) {
String columnName = column.getKey();
TypeInfo type = column.getValue();
// Get the raw value from traversing the JSON map
Object rawValue = getRawValue(columnName, row);
// Deserialize the value to the expected type
Object value = deserialize(type, rawValue);
_values.set(i++, value);
}
return _values;
}
示例7: deserializePrimitive
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
/**
* Deserializes a primitive to its corresponding Java type, doing a best-effort conversion when necessary.
*/
private Object deserializePrimitive(PrimitiveTypeInfo type, Object value)
throws SerDeException {
switch (type.getPrimitiveCategory()) {
case VOID:
return null;
case STRING:
return deserializeString(value);
case BOOLEAN:
return deserializeBoolean(value);
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
return deserializeNumber(value, type);
case DATE:
case TIMESTAMP:
return deserializeDate(value, type);
default:
throw new SerDeException("Unsupported type: " + type.getPrimitiveCategory());
}
}
示例8: deserializeNumber
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
private Object deserializeNumber(Object value, PrimitiveTypeInfo type)
throws SerDeException {
// Note that only numbers and booleans are supported. All other types cannot be deserialized. In particular
// String representations of numbers are not parsed.
Number number;
if (value instanceof Number) {
number = (Number) value;
} else if (value instanceof Boolean) {
number = ((Boolean) value) ? (byte) 1 : 0;
} else {
throw new SerDeException("Value is not a " + type + ": " + value);
}
switch (type.getPrimitiveCategory()) {
case BYTE: return number.byteValue();
case SHORT: return number.shortValue();
case INT: return number.intValue();
case LONG: return number.longValue();
case FLOAT: return number.floatValue();
case DOUBLE: return number.doubleValue();
}
throw new SerDeException("Primitive number did not match any expected categories"); // Unreachable
}
示例9: deserializeDate
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
private Object deserializeDate(Object value, PrimitiveTypeInfo type)
throws SerDeException {
long ts;
// Dates can be either ISO8601 Strings or numeric timestamps. Any other data type or format cannot be
// deserialized.
if (value instanceof String) {
try {
ts = JsonHelper.parseTimestamp((String) value).getTime();
} catch (Exception e) {
throw new SerDeException("Invalid time string: " + value);
}
} else if (value instanceof Number) {
ts = ((Number) value).longValue();
} else if (value instanceof java.util.Date) {
ts = ((java.util.Date) value).getTime();
} else {
throw new SerDeException("Invalid time value: " + value);
}
if (type.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE) {
return new Date(ts);
} else {
return new Timestamp(ts);
}
}
示例10: deserializeStruct
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
private Object deserializeStruct(StructTypeInfo type, Object data)
throws SerDeException {
if (!(data instanceof Map)) {
throw new SerDeException("Value not of type map");
}
//noinspection unchecked
Map<String, Object> map = (Map<String, Object>) data;
List<String> fieldNames = type.getAllStructFieldNames();
List<TypeInfo> fieldTypes = type.getAllStructFieldTypeInfos();
// When deserializing a struct the returned value is a list of values in the same order as the field names.
List<Object> values = Lists.newArrayListWithCapacity(fieldNames.size());
for (int i=0; i < fieldNames.size(); i++) {
Object rawValue = getRawValueOrNullIfAbsent(fieldNames.get(i), map);
Object value = deserialize(fieldTypes.get(i), rawValue);
values.add(value);
}
return values;
}
示例11: deserializeMap
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
private Object deserializeMap(MapTypeInfo type, Object data)
throws SerDeException {
if (!(data instanceof Map)) {
throw new SerDeException("Value not of type map");
}
//noinspection unchecked
Map<String, Object> map = (Map<String, Object>) data;
Map<Object, Object> values = Maps.newHashMap();
for (Map.Entry<String, Object> entry : map.entrySet()) {
Object key = deserialize(type.getMapKeyTypeInfo(), entry.getKey());
Object value = deserialize(type.getMapValueTypeInfo(), entry.getValue());
values.put(key, value);
}
return values;
}
示例12: deserializeList
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
private Object deserializeList(ListTypeInfo type, Object data)
throws SerDeException {
if (!(data instanceof List)) {
throw new SerDeException("Value not of type list");
}
//noinspection unchecked
List<Object> list = (List<Object>) data;
List<Object> values = Lists.newArrayListWithCapacity(list.size());
for (Object entry : list) {
Object value = deserialize(type.getListElementTypeInfo(), entry);
values.add(value);
}
return values;
}
示例13: initialize
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
serdeParams = ShimsLoader.getHiveShims()
.getSerDeParametersShim(conf, tbl, getClass().getName());
String specifiedColumnMapping = tbl.getProperty(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING);
for (TypeInfo type : serdeParams.getColumnTypes()) {
if (HiveDynamoDBTypeFactory.getTypeObjectFromHiveType(type.getTypeName()) == null) {
throw new SerDeException("Unsupported type: " + type.getTypeName());
}
}
log.info("Provided column mapping: " + specifiedColumnMapping);
columnMappings = Maps.newHashMap();
if (!Strings.isNullOrEmpty(specifiedColumnMapping)) {
columnMappings = HiveDynamoDBUtil.getHiveToDynamoDBSchemaMapping(specifiedColumnMapping);
}
addDefaultColumnMappings(serdeParams.getColumnNames());
log.info("Final column mapping: " + columnMappings);
objectInspector = new DynamoDBObjectInspector(serdeParams.getColumnNames(), serdeParams
.getColumnTypes(), columnMappings);
}
示例14: getLineSerde
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
public static LazySimpleSerDe getLineSerde(@Nonnull final PrimitiveObjectInspector... OIs)
throws SerDeException {
if (OIs.length == 0) {
throw new IllegalArgumentException("OIs must be specified");
}
LazySimpleSerDe serde = new LazySimpleSerDe();
Configuration conf = new Configuration();
Properties tbl = new Properties();
StringBuilder columnNames = new StringBuilder();
StringBuilder columnTypes = new StringBuilder();
for (int i = 0; i < OIs.length; i++) {
columnNames.append('c').append(i + 1).append(',');
columnTypes.append(OIs[i].getTypeName()).append(',');
}
columnNames.deleteCharAt(columnNames.length() - 1);
columnTypes.deleteCharAt(columnTypes.length() - 1);
tbl.setProperty("columns", columnNames.toString());
tbl.setProperty("columns.types", columnTypes.toString());
serde.initialize(conf, tbl);
return serde;
}
示例15: serialize
import org.apache.hadoop.hive.serde2.SerDeException; //導入依賴的package包/類
@Override
public Writable serialize(Object obj, ObjectInspector objectInspector) throws SerDeException {
if (!objectInspector.getCategory().equals(ObjectInspector.Category.STRUCT)) {
throw new SerDeException("Cannot serialize " + objectInspector.getCategory() + ". Can only serialize a struct");
}
StructObjectInspector inspector = (StructObjectInspector) objectInspector;
List<? extends StructField> fields = inspector.getAllStructFieldRefs();
Writable[] arr = new Writable[fields.size()];
for (int i = 0; i < fields.size(); i++) {
StructField field = fields.get(i);
Object subObj = inspector.getStructFieldData(obj, field);
ObjectInspector subInspector = field.getFieldObjectInspector();
arr[i] = createPrimitive(subObj, (PrimitiveObjectInspector) subInspector);
}
serdeSize = arr.length;
return new ArrayWritable(Writable.class, arr);
}