本文整理汇总了Java中org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable类的典型用法代码示例。如果您正苦于以下问题:Java AvroGenericRecordWritable类的具体用法?Java AvroGenericRecordWritable怎么用?Java AvroGenericRecordWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
AvroGenericRecordWritable类属于org.apache.hadoop.hive.serde2.avro包,在下文中一共展示了AvroGenericRecordWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: map
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
protected void map(RecordKey key, AvroGenericRecordWritable value,
org.apache.hadoop.mapreduce.Mapper<RecordKey, AvroGenericRecordWritable, RecordKey, AvroValue<Record>>.Context
context)
throws IOException, InterruptedException {
if (key.isValid()) {
key.setHash(recordValue.hashCode());
recordWrapped.datum((Record) value.getRecord());
context.write(recordKey, recordWrapped);
} else {
context.getCounter(RecordCounter.RECORDS).increment(1);
context.getCounter(RecordCounter.RECORDS_MALFORMED).increment(1);
textValue.set(key.getSource());
string.setLength(0);
multipleOutputs.write(OUTPUT_TEXT, NullWritable.get(), textValue,
string.append(MALFORMED_PATH_PREFIX).append(key.getBatch()).toString());
}
}
示例2: decode
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
public AvroGenericRecordWritable decode(byte[] payload) {
try {
MessageDecoderHelper helper = new MessageDecoderHelper(registry,
topicName, payload).invoke();
DatumReader<Record> reader = new GenericDatumReader<Record>(helper.getTargetSchema());
log.debug("Trying to read kafka payload");
log.debug("buffer: " + helper.getBuffer());
log.debug("start: " + helper.getStart());
log.debug("length: " + helper.getLength());
log.debug("target schema: " + helper.getTargetSchema());
log.debug("schema: " + helper.getSchema());
GenericRecord record = reader.read(null, decoderFactory.binaryDecoder(helper.getBuffer().array(),
helper.getStart(), helper.getLength(), null));
log.debug("Read kafka payload as " + record);
AvroGenericRecordWritable grw = new AvroGenericRecordWritable(record);
grw.setFileSchema(latestSchema);
return grw;
} catch (IOException e) {
throw new MessageDecoderException(e);
}
}
示例3: getRecordSequenceInputFormat
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
/**
* Get a {@link SequenceFileInputFormat} from a <code>type</code>
*
* @param type the type to lookup
* @return the {@link SequenceFileInputFormat} instance
*/
public static Class<? extends SequenceFileInputFormat<RecordKey, AvroGenericRecordWritable>> getRecordSequenceInputFormat(String type)
throws IOException {
if (!RECORD_INPUT_FORMATS.containsKey(type)) {
throw new IOException("Could not find [RecordSequenceInputFormat] for type [" + type + "]");
}
return RECORD_INPUT_FORMATS.get(type);
}
示例4: setup
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
protected void setup(
org.apache.hadoop.mapreduce.Mapper<RecordKey, AvroGenericRecordWritable, RecordKey, AvroValue<Record>>.Context context)
throws IOException, InterruptedException {
multipleOutputs = new MultipleOutputs(context);
}
示例5: getWrappedRecord
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
private AvroGenericRecordWritable getWrappedRecord(String topicName, byte[] payload) throws IOException {
AvroGenericRecordWritable r = null;
try {
r = decoder.decode(payload);
} catch (Exception e) {
if (!skipSchemaErrors) {
throw new IOException(e);
}
}
return r;
}
示例6: convertRecord
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
public Iterable<AvroGenericRecordWritable> convertRecord(Schema outputSchema, GenericRecord inputRecord,
WorkUnitState workUnit) throws DataConversionException {
AvroGenericRecordWritable avroWritable = new AvroGenericRecordWritable();
avroWritable.setRecord(inputRecord);
avroWritable.setFileSchema(outputSchema);
avroWritable.setRecordReaderID(this.uid);
return Lists.newArrayList(avroWritable);
}
示例7: getCurrentValue
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
public AvroGenericRecordWritable getCurrentValue() throws IOException, InterruptedException {
return recordWriteable;
}
示例8: createRecordReader
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
public RecordReader<RecordKey, AvroGenericRecordWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
throws IOException {
return new RecordReaderTextCsv(split, context);
}
示例9: createRecordReader
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
public RecordReader<RecordKey, AvroGenericRecordWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
throws IOException {
return new RecordReaderSequenceCsv();
}
示例10: createRecordReader
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
public RecordReader<RecordKey, AvroGenericRecordWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
throws IOException {
return new RecordReaderSequenceXml();
}
示例11: createRecordReader
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
public RecordReader<RecordKey, AvroGenericRecordWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
throws IOException {
return new RecordReaderTextXml(split, context);
}
示例12: cleanup
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
protected void cleanup(
org.apache.hadoop.mapreduce.Mapper<RecordKey, AvroGenericRecordWritable, RecordKey, AvroValue<Record>>.Context context)
throws IOException, InterruptedException {
multipleOutputs.close();
}
示例13: getRecordReader
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
public RecordReader<KafkaKey, AvroGenericRecordWritable> getRecordReader(
InputSplit split, JobConf conf, Reporter reporter) throws IOException {
return new KafkaRecordReader(split, conf, reporter);
}
示例14: createValue
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
@Override
public AvroGenericRecordWritable createValue() {
return new AvroGenericRecordWritable();
}
示例15: decode
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable; //导入依赖的package包/类
public abstract AvroGenericRecordWritable decode(M message) ;