本文整理汇总了Java中org.apache.kafka.common.errors.SerializationException类的典型用法代码示例。如果您正苦于以下问题:Java SerializationException类的具体用法?Java SerializationException怎么用?Java SerializationException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
SerializationException类属于org.apache.kafka.common.errors包,在下文中一共展示了SerializationException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: send
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
public Status send(String key, Task data) {
try {
log.debug("Publishing Request data to Kafka. Topic:{}, Key: {}, value: {}", topic, key, data);
kafkaProducer.send(topic, key, data).get(); // send data right now.
log.info("Publishing SUCCESSFUL");
} catch (InterruptException ie) {
log.error("Publisher thread interrupted. Exception: {}. Value: {}", ie, data);
return Status.FAILURE;
} catch (SerializationException se) {
log.error("Supplied object could not be published due to serialization issues. Exception: {}", se);
return Status.FAILURE;
} catch (Exception e) {
log.error("Error occurred while publishing task on Kafka. Exception: {}. Key: {}. Value{}", e, key, data);
return Status.FAILURE;
}
return Status.SUCCESS;
}
示例2: deserialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public T deserialize(String topic, byte[] data) {
if (data == null) {
return null;
}
try {
return JsonUtil.decode(data, type);
} catch (Exception ex) {
throw new SerializationException(ex);
}
}
示例3: serialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public byte[] serialize(String topic, T payload) {
try {
byte[] result = null;
if (payload != null) {
LOGGER.debug("data='{}'", payload);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(payload.getSchema());
datumWriter.write(payload, binaryEncoder);
binaryEncoder.flush();
byteArrayOutputStream.close();
result = byteArrayOutputStream.toByteArray();
LOGGER.debug("serialized data='{}'", DatatypeConverter.printHexBinary(result));
}
return result;
} catch (IOException ex) {
throw new SerializationException("Can't serialize payload='" + payload + "' for topic='" + topic + "'", ex);
}
}
示例4: deserialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public T deserialize(String topic, byte[] data) {
try {
T result = null;
if (data != null) {
LOGGER.debug("data='{}'", DatatypeConverter.printHexBinary(data));
DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>(
targetType.newInstance().getSchema());
Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
result = (T) datumReader.read(null, decoder);
LOGGER.debug("deserialized data='{}'", result);
}
return result;
} catch (Exception ex) {
throw new SerializationException(
"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
}
}
示例5: serialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public byte[] serialize(final String topic, final CoffeeEvent event) {
try {
if (event == null)
return null;
final JsonbConfig config = new JsonbConfig()
.withAdapters(new UUIDAdapter())
.withSerializers(new EventJsonbSerializer());
final Jsonb jsonb = JsonbBuilder.create(config);
return jsonb.toJson(event, CoffeeEvent.class).getBytes(StandardCharsets.UTF_8);
} catch (Exception e) {
logger.severe("Could not serialize event: " + e.getMessage());
throw new SerializationException("Could not serialize event", e);
}
}
示例6: deserialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
public T deserialize(String topic, byte[] data) {
if (this.reader == null) {
this.reader = this.objectMapper.readerFor(this.targetType);
}
try {
T result = null;
if (data != null) {
result = this.reader.readValue(data);
}
return result;
}
catch (IOException e) {
throw new SerializationException("Can't deserialize data [" + Arrays.toString(data) +
"] from topic [" + topic + "]", e);
}
}
示例7: toConnectData
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public SchemaAndValue toConnectData(String topic, byte[] value) {
JsonNode jsonValue;
try {
jsonValue = deserializer.deserialize(topic, value);
} catch (SerializationException e) {
throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e);
}
if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload")))
throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." +
" If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration.");
// The deserialized data should either be an envelope object containing the schema and the payload or the schema
// was stripped during serialization and we need to fill in an all-encompassing schema.
if (!enableSchemas) {
ObjectNode envelope = JsonNodeFactory.instance.objectNode();
envelope.set("schema", null);
envelope.set("payload", jsonValue);
jsonValue = envelope;
}
return jsonToConnect(jsonValue);
}
示例8: serializer
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public Serializer<T> serializer() {
return new Serializer<T>() {
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
@Override
public byte[] serialize(String topic, T data) {
try {
return mapper.writeValueAsBytes(data);
} catch (Exception e) {
throw new SerializationException("Error serializing JSON message", e);
}
}
@Override
public void close() {
}
};
}
示例9: deserializer
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public Deserializer<T> deserializer() {
return new Deserializer<T>() {
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
}
@Override
public T deserialize(String topic, byte[] data) {
T result;
try {
result = mapper.readValue(data, cls);
} catch (Exception e) {
throw new SerializationException(e);
}
return result;
}
@Override
public void close() {
}
};
}
示例10: serializeVal
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
private byte[] serializeVal(String topic, Mutation op) {
Struct record = getRecord(op);
byte[] bytes = null;
if (record != null){
try{
bytes = converter.fromConnectData(topic, record.schema(), record);
}catch (Exception e){
logger.error(" KafkaAvroSerializer serialization error: " , e);
throw new SerializationException("Failed to serialze Avro object, with error: " , e);
}
}
return bytes;
}
示例11: deserialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public T deserialize(String topic, byte[] data) {
LOGGER.debug("data to deserialize='{}'", DatatypeConverter.printHexBinary(data));
try {
// get the schema
Schema schema = targetType.newInstance().getSchema();
Injection<GenericRecord, byte[]> genericRecordInjection = GenericAvroCodecs.toBinary(schema);
GenericRecord genericRecord = genericRecordInjection.invert((byte[]) data).get();
T result = (T) SpecificData.get().deepCopy(schema, genericRecord);
LOGGER.debug("data='{}'", result);
return result;
} catch (Exception e) {
throw new SerializationException(
"Can't deserialize data [" + Arrays.toString(data) + "] from topic [" + topic + "]", e);
}
}
示例12: deserialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public T deserialize(String topic, byte[] data) {
try {
T result = null;
if (data != null) {
LOGGER.debug("data='{}'", DatatypeConverter.printHexBinary(data));
DatumReader<GenericRecord> datumReader =
new SpecificDatumReader<>(targetType.newInstance().getSchema());
Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
result = (T) datumReader.read(null, decoder);
LOGGER.debug("deserialized data='{}'", result);
}
return result;
} catch (Exception ex) {
throw new SerializationException(
"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
}
}
示例13: deserialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public T deserialize(String topic, byte[] data) {
if (data == null)
return null;
ByteArrayInputStream b = new ByteArrayInputStream(data);
ObjectInputStream o = null;
try {
o = new ObjectInputStream(b);
return (T) o.readObject();
} catch (Exception e) {
throw new SerializationException("Error when deserializing", e);
} finally {
try {
b.close();
if (o != null) {
o.close();
}
} catch (IOException ioEx) {
}
}
}
示例14: serialize
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public byte[] serialize(final String topic, final GenericRow genericRow) {
if (genericRow == null) {
return null;
}
try {
GenericRecord avroRecord = new GenericData.Record(avroSchema);
for (int i = 0; i < genericRow.getColumns().size(); i++) {
if (fields.get(i).schema().getType() == Schema.Type.ARRAY) {
avroRecord.put(fields.get(i).name(), Arrays.asList((Object[]) genericRow.getColumns().get(i)));
} else {
avroRecord.put(fields.get(i).name(), genericRow.getColumns().get(i));
}
}
return kafkaAvroSerializer.serialize(topic, avroRecord);
} catch (Exception e) {
throw new SerializationException(e);
}
}
示例15: toConnectData
import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
/** {@inheritDoc} */
@Override public SchemaAndValue toConnectData(String topic, byte[] bytes) {
CacheEvent evt;
try {
evt = deserializer.deserialize(topic, bytes);
}
catch (SerializationException e) {
throw new DataException("Failed to convert to Kafka Connect data due to a serialization error", e);
}
if (evt == null) {
return SchemaAndValue.NULL;
}
return new SchemaAndValue(null, evt);
}