当前位置: 首页>>代码示例>>Java>>正文


Java DecoderFactory类代码示例

本文整理汇总了Java中org.apache.avro.io.DecoderFactory的典型用法代码示例。如果您正苦于以下问题:Java DecoderFactory类的具体用法?Java DecoderFactory怎么用?Java DecoderFactory使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


DecoderFactory类属于org.apache.avro.io包,在下文中一共展示了DecoderFactory类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: convertAvroToJson

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
static void convertAvroToJson(InputStream inputStream, OutputStream outputStream, Schema schema)
        throws IOException {
    DatumReader<Object> reader = new GenericDatumReader<>(schema);
    DatumWriter<Object> writer = new GenericDatumWriter<>(schema);

    BinaryDecoder binaryDecoder = DecoderFactory.get().binaryDecoder(inputStream, null);

    JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, outputStream, true);
    Object datum = null;
    while (!binaryDecoder.isEnd()) {
        datum = reader.read(datum, binaryDecoder);
        writer.write(datum, jsonEncoder);
        jsonEncoder.flush();
    }
    outputStream.flush();
}
 
开发者ID:rkluszczynski,项目名称:avro-cli,代码行数:17,代码来源:RawConverterUtil.java

示例2: initialize

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
private void initialize(InputStream in) throws IOException {
    this.vin = DecoderFactory.get().binaryDecoder(in, vin);
    byte[] magic = new byte[DataFileConstants.MAGIC.length];
    try {
        vin.readFixed(magic);
    } catch (IOException e) {
        throw new IOException("Not a data file.", e);
    }
    if (!Arrays.equals(DataFileConstants.MAGIC, magic))
        throw new IOException("Not a data file.");

    long l = vin.readMapStart();
    if (l > 0) {
        do {
            for (long i = 0; i < l; i++) {
                vin.skipString();
                vin.skipBytes();
            }
        } while ((l = vin.mapNext()) != 0);
    }
    vin.readFixed(expectedSync);
}
 
开发者ID:jwoschitz,项目名称:avrocount,代码行数:23,代码来源:CountableSkipDataFileStream.java

示例3: jsonReadWriteExample

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
public void jsonReadWriteExample() throws IOException {
	Employee employee = Employee.newBuilder().setFirstName("Gaurav")
			.setLastName("Mazra").setSex(SEX.MALE).build();

	DatumWriter<Employee> employeeWriter = new SpecificDatumWriter<>(Employee.class);
	byte[] data;
	try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
		Encoder jsonEncoder = EncoderFactory.get().jsonEncoder(Employee.getClassSchema(), baos);
		employeeWriter.write(employee, jsonEncoder);
		jsonEncoder.flush();
		data = baos.toByteArray();
	}
	
	// serialized data
	System.out.println(new String(data));
	
	DatumReader<Employee> employeeReader = new SpecificDatumReader<>(Employee.class);
	Decoder decoder = DecoderFactory.get().jsonDecoder(Employee.getClassSchema(), new String(data));
	employee = employeeReader.read(null, decoder);
	//data after deserialization
	System.out.println(employee);
}
 
开发者ID:gauravrmazra,项目名称:gauravbytes,代码行数:23,代码来源:AvroDatumExample.java

示例4: binaryReadWriteExample

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
public void binaryReadWriteExample() throws IOException {
	Employee employee = Employee.newBuilder().setFirstName("Gaurav")
			.setLastName("Mazra").setSex(SEX.MALE).build();

	DatumWriter<Employee> employeeWriter = new SpecificDatumWriter<>(Employee.class);
	byte[] data;
	try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
		Encoder binaryEncoder = EncoderFactory.get().binaryEncoder(baos, null);
		employeeWriter.write(employee, binaryEncoder);
		binaryEncoder.flush();
		data = baos.toByteArray();
	}
	
	// serialized data
	System.out.println(data);
	
	DatumReader<Employee> employeeReader = new SpecificDatumReader<>(Employee.class);
	Decoder binaryDecoder = DecoderFactory.get().binaryDecoder(data, null);
	employee = employeeReader.read(null, binaryDecoder);
	//data after deserialization
	System.out.println(employee);
}
 
开发者ID:gauravrmazra,项目名称:gauravbytes,代码行数:23,代码来源:AvroDatumExample.java

示例5: processSinglex

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
/**
 * Process singlex.
 *
 * @throws Exception the exception
 */
public void processSinglex() throws Exception {
	int base = (int) System.currentTimeMillis();
	User user = User.newBuilder().setName("name" + base).setFavoriteColor("color" + base).setFavoriteNumber(base)
			.build();
	DatumWriter<GenericRecord> datumWriterUser = new GenericDatumWriter<GenericRecord>(User.getClassSchema());
	ByteArrayOutputStream baos = new ByteArrayOutputStream();
	byte[] byteData = null;
	try {
		BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(baos, null);
		datumWriterUser.write(user, binaryEncoder);
		binaryEncoder.flush();
		byteData = baos.toByteArray();
	} finally {
		baos.close();
	}
	System.out.println(byteData.length);
	
	DatumReader<GenericRecord> datumReaderUser = new GenericDatumReader<GenericRecord>( User.getClassSchema());
	GenericRecord genericRecord = datumReaderUser.read(null, DecoderFactory.get().binaryDecoder(byteData, null) );
	System.out.println(genericRecord);
	System.out.println( genericRecord.get("name"));
}
 
开发者ID:petezybrick,项目名称:iote2e,代码行数:28,代码来源:AvroByteArray.java

示例6: deserialize

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public T deserialize(String topic, byte[] data) {
	try {
		T result = null;

		if (data != null) {
			LOGGER.debug("data='{}'", DatatypeConverter.printHexBinary(data));

			DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>(
					targetType.newInstance().getSchema());
			Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);

			result = (T) datumReader.read(null, decoder);
			LOGGER.debug("deserialized data='{}'", result);
		}
		return result;
	} catch (Exception ex) {
		throw new SerializationException(
				"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
	}
}
 
开发者ID:italia,项目名称:daf-replicate-ingestion,代码行数:23,代码来源:AvroDeserializer.java

示例7: deserializeValue

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
private Event deserializeValue(byte[] value, boolean parseAsFlumeEvent) throws IOException {
  Event e;
  if (parseAsFlumeEvent) {
    ByteArrayInputStream in =
            new ByteArrayInputStream(value);
    decoder = DecoderFactory.get().directBinaryDecoder(in, decoder);
    if (!reader.isPresent()) {
      reader = Optional.of(
              new SpecificDatumReader<AvroFlumeEvent>(AvroFlumeEvent.class));
    }
    AvroFlumeEvent event = reader.get().read(null, decoder);
    e = EventBuilder.withBody(event.getBody().array(),
            toStringMap(event.getHeaders()));
  } else {
    e = EventBuilder.withBody(value, Collections.EMPTY_MAP);
  }
  return e;
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:19,代码来源:KafkaChannel.java

示例8: serialize

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
/**
 * Converts the avro binary data to the json format
 */
@Override
public XContentBuilder serialize(Event event) {
    XContentBuilder builder = null;
    try {
        if (datumReader != null) {
            Decoder decoder = new DecoderFactory().binaryDecoder(event.getBody(), null);
            GenericRecord data = datumReader.read(null, decoder);
            logger.trace("Record in event " + data);
            XContentParser parser = XContentFactory
                    .xContent(XContentType.JSON)
                    .createParser(NamedXContentRegistry.EMPTY, data.toString());
            builder = jsonBuilder().copyCurrentStructure(parser);
            parser.close();
        } else {
            logger.error("Schema File is not configured");
        }
    } catch (IOException e) {
        logger.error("Exception in parsing avro format data but continuing serialization to process further records",
                e.getMessage(), e);
    }
    return builder;
}
 
开发者ID:cognitree,项目名称:flume-elasticsearch-sink,代码行数:26,代码来源:AvroSerializer.java

示例9: testReflect

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
public static void testReflect(Object value, Type type, String schema)
  throws Exception {

  // check that schema matches expected
  Schema s = ReflectData.get().getSchema(type);
  assertEquals(Schema.parse(schema), s);

  // check that value is serialized correctly
  ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
  ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
  Object after =
    reader.read(null,
                DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
  assertEquals(value, after);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:AvroTestUtil.java

示例10: EventReader

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
/**
 * Create a new Event Reader
 * @param in
 * @throws IOException
 */
@SuppressWarnings("deprecation")
public EventReader(DataInputStream in) throws IOException {
  this.in = in;
  this.version = in.readLine();

  Schema myschema = new SpecificData(Event.class.getClassLoader()).getSchema(Event.class);
  Schema.Parser parser = new Schema.Parser();
  this.schema = parser.parse(in.readLine());
  this.reader = new SpecificDatumReader(schema, myschema);
  if (EventWriter.VERSION.equals(version)) {
    this.decoder = DecoderFactory.get().jsonDecoder(schema, in);
  } else if (EventWriter.VERSION_BINARY.equals(version)) {
    this.decoder = DecoderFactory.get().binaryDecoder(in, null);
  } else {
    throw new IOException("Incompatible event log version: " + version);
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:23,代码来源:EventReader.java

示例11: convertJsonToAvro

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
static void convertJsonToAvro(InputStream inputStream, OutputStream outputStream, Schema schema)
        throws IOException {
    DatumReader<Object> reader = new GenericDatumReader<>(schema);
    DatumWriter<Object> writer = new GenericDatumWriter<>(schema);

    Encoder binaryEncoder = EncoderFactory.get().binaryEncoder(outputStream, null);

    JsonDecoder jsonDecoder = DecoderFactory.get().jsonDecoder(schema, inputStream);
    Object datum = null;
    while (true) {
        try {
            datum = reader.read(datum, jsonDecoder);
        } catch (EOFException eofException) {
            break;
        }
        writer.write(datum, binaryEncoder);
        binaryEncoder.flush();
    }
    outputStream.flush();
}
 
开发者ID:rkluszczynski,项目名称:avro-cli,代码行数:21,代码来源:RawConverterUtil.java

示例12: fromBytes

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
public Object fromBytes(Schema schema, byte data[]) throws GoraException {
  Schema fromSchema = null;
  if (schema.getType() == Type.UNION) {
    try {
      Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
      int unionIndex = decoder.readIndex();
      List<Schema> possibleTypes = schema.getTypes();
      fromSchema = possibleTypes.get(unionIndex);
      Schema effectiveSchema = possibleTypes.get(unionIndex);
      if (effectiveSchema.getType() == Type.NULL) {
        decoder.readNull();
        return null;
      } else {
        data = decoder.readBytes(null).array();
      }
    } catch (IOException e) {
      LOG.error(e.getMessage());
      throw new GoraException("Error decoding union type: ", e);
    }
  } else {
    fromSchema = schema;
  }
  return fromBytes(encoder, fromSchema, data);
}
 
开发者ID:jianglibo,项目名称:gora-boot,代码行数:25,代码来源:AccumuloStore.java

示例13: processAvroMessage

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
/**
 * Processes an Avro Blob containing a single message and with no embedded
 * schema. This is the pattern when Avro objects are passed over messaging
 * infrastructure such as Apache Kafka.
 * 
 * @param avroMessage
 *            The Blob that holds the single Avro message object
 * @param avroKey
 *            The Blob that holds the single Avro key object (if passed)
 * @param outStream
 *            The stream to which the JSON string must be submitted
 * @param outTuple
 *            The tuple holding the JSON string
 * @param messageSchema
 *            The schema of the Avro messsage object
 * @param keySchema
 *            The schema of the Avro key object
 * @throws Exception
 */
private void processAvroMessage(Blob avroMessage, Blob avroKey, StreamingOutput<OutputTuple> outStream,
		OutputTuple outTuple, Schema messageSchema, Schema keySchema) throws Exception {
	// Deserialize message
	GenericDatumReader<GenericRecord> consumer = new GenericDatumReader<GenericRecord>(messageSchema);
	ByteArrayInputStream consumedByteArray = new ByteArrayInputStream(avroMessage.getData());
	Decoder consumedDecoder = DecoderFactory.get().binaryDecoder(consumedByteArray, null);
	GenericRecord consumedDatum = consumer.read(null, consumedDecoder);
	if (LOGGER.isTraceEnabled())
		LOGGER.log(TraceLevel.TRACE, "JSON representation of Avro message: " + consumedDatum.toString());
	outTuple.setString(outputJsonMessage, consumedDatum.toString());
	// Deserialize key (if specified)
	if (avroKey != null) {
		consumer = new GenericDatumReader<GenericRecord>(keySchema);
		consumedByteArray = new ByteArrayInputStream(avroKey.getData());
		consumedDecoder = DecoderFactory.get().binaryDecoder(consumedByteArray, null);
		consumedDatum = consumer.read(null, consumedDecoder);
		if (LOGGER.isTraceEnabled())
			LOGGER.log(TraceLevel.TRACE, "JSON representation of Avro key: " + consumedDatum.toString());
		if (outputJsonKey != null)
			outTuple.setString(outputJsonKey, consumedDatum.toString());
	}
	// Submit new tuple to output port 0
	outStream.submit(outTuple);
}
 
开发者ID:IBMStreams,项目名称:streamsx.avro,代码行数:44,代码来源:AvroToJSON.java

示例14: decode

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
@Override
public <T> T decode(byte[] bytes, Class<T> type) throws IOException {
	Assert.notNull(bytes, "'bytes' cannot be null");
	Assert.notNull(bytes, "Class can not be null");
	ByteBuffer buf = ByteBuffer.wrap(bytes);
	byte[] payload = new byte[bytes.length-4];
	Integer schemaId = buf.getInt();
	buf.get(payload);
	Schema schema = schemaRegistryClient.fetch(schemaId);
	DatumReader reader = getDatumReader(type,schema);
	Decoder decoder = DecoderFactory.get().binaryDecoder(payload,null);
	return (T) reader.read(null,decoder);
}
 
开发者ID:viniciusccarvalho,项目名称:schema-evolution-samples,代码行数:14,代码来源:AvroCodec.java

示例15: AvroBlock

import org.apache.avro.io.DecoderFactory; //导入依赖的package包/类
AvroBlock(
    byte[] data,
    long numRecords,
    Mode<T> mode,
    String writerSchemaString,
    String codec)
    throws IOException {
  this.mode = mode;
  this.numRecords = numRecords;
  checkNotNull(writerSchemaString, "writerSchemaString");
  Schema writerSchema = internOrParseSchemaString(writerSchemaString);
  Schema readerSchema =
      internOrParseSchemaString(
          MoreObjects.firstNonNull(mode.readerSchemaString, writerSchemaString));
  this.reader =
      (mode.type == GenericRecord.class)
          ? new GenericDatumReader<T>(writerSchema, readerSchema)
          : new ReflectDatumReader<T>(writerSchema, readerSchema);
  this.decoder = DecoderFactory.get().binaryDecoder(decodeAsInputStream(data, codec), null);
}
 
开发者ID:apache,项目名称:beam,代码行数:21,代码来源:AvroSource.java


注:本文中的org.apache.avro.io.DecoderFactory类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。