当前位置: 首页>>代码示例>>Java>>正文


Java SerializationException类代码示例

本文整理汇总了Java中org.apache.kafka.common.errors.SerializationException的典型用法代码示例。如果您正苦于以下问题:Java SerializationException类的具体用法?Java SerializationException怎么用?Java SerializationException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


SerializationException类属于org.apache.kafka.common.errors包,在下文中一共展示了SerializationException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: send

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
public Status send(String key, Task data) {
    try {
        log.debug("Publishing Request data to Kafka. Topic:{}, Key: {}, value: {}", topic, key, data);
        kafkaProducer.send(topic, key, data).get(); // send data right now.
        log.info("Publishing SUCCESSFUL");
    } catch (InterruptException ie) {
        log.error("Publisher thread interrupted. Exception: {}. Value: {}", ie, data);
        return Status.FAILURE;
    } catch (SerializationException se) {
        log.error("Supplied object could not be published due to serialization issues. Exception: {}", se);
        return Status.FAILURE;
    } catch (Exception e) {
        log.error("Error occurred while publishing task on Kafka. Exception: {}. Key: {}. Value{}", e, key, data);
        return Status.FAILURE;
    }
    return Status.SUCCESS;
}
 
开发者ID:dixantmittal,项目名称:scalable-task-scheduler,代码行数:18,代码来源:RequestProducer.java

示例2: deserialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public T deserialize(String topic, byte[] data) {
	if (data == null) {
		return null;
	}
	try {
		return JsonUtil.decode(data, type);
	} catch (Exception ex) {
		throw new SerializationException(ex);
	}
}
 
开发者ID:jiumao-org,项目名称:wechat-mall,代码行数:12,代码来源:GenericDeserializer.java

示例3: serialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public byte[] serialize(String topic, T payload) {
	try {
		byte[] result = null;

		if (payload != null) {
			LOGGER.debug("data='{}'", payload);

			ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
			BinaryEncoder binaryEncoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);

			DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(payload.getSchema());
			datumWriter.write(payload, binaryEncoder);

			binaryEncoder.flush();
			byteArrayOutputStream.close();

			result = byteArrayOutputStream.toByteArray();
			
			LOGGER.debug("serialized data='{}'", DatatypeConverter.printHexBinary(result));
		}
		return result;
	} catch (IOException ex) {
		throw new SerializationException("Can't serialize payload='" + payload + "' for topic='" + topic + "'", ex);
	}
}
 
开发者ID:italia,项目名称:daf-replicate-ingestion,代码行数:27,代码来源:AvroSerializer.java

示例4: deserialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public T deserialize(String topic, byte[] data) {
	try {
		T result = null;

		if (data != null) {
			LOGGER.debug("data='{}'", DatatypeConverter.printHexBinary(data));

			DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>(
					targetType.newInstance().getSchema());
			Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);

			result = (T) datumReader.read(null, decoder);
			LOGGER.debug("deserialized data='{}'", result);
		}
		return result;
	} catch (Exception ex) {
		throw new SerializationException(
				"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
	}
}
 
开发者ID:italia,项目名称:daf-replicate-ingestion,代码行数:23,代码来源:AvroDeserializer.java

示例5: serialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public byte[] serialize(final String topic, final CoffeeEvent event) {
    try {
        if (event == null)
            return null;

        final JsonbConfig config = new JsonbConfig()
                .withAdapters(new UUIDAdapter())
                .withSerializers(new EventJsonbSerializer());

        final Jsonb jsonb = JsonbBuilder.create(config);

        return jsonb.toJson(event, CoffeeEvent.class).getBytes(StandardCharsets.UTF_8);
    } catch (Exception e) {
        logger.severe("Could not serialize event: " + e.getMessage());
        throw new SerializationException("Could not serialize event", e);
    }
}
 
开发者ID:sdaschner,项目名称:scalable-coffee-shop,代码行数:19,代码来源:EventSerializer.java

示例6: deserialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
public T deserialize(String topic, byte[] data) {
	if (this.reader == null) {
		this.reader = this.objectMapper.readerFor(this.targetType);
	}
	try {
		T result = null;
		if (data != null) {
			result = this.reader.readValue(data);
		}
		return result;
	}
	catch (IOException e) {
		throw new SerializationException("Can't deserialize data [" + Arrays.toString(data) +
				"] from topic [" + topic + "]", e);
	}
}
 
开发者ID:kloiasoft,项目名称:eventapis,代码行数:17,代码来源:JsonDeserializer.java

示例7: toConnectData

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public SchemaAndValue toConnectData(String topic, byte[] value) {
    JsonNode jsonValue;
    try {
        jsonValue = deserializer.deserialize(topic, value);
    } catch (SerializationException e) {
        throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e);
    }

    if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload")))
        throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." +
                " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration.");

    // The deserialized data should either be an envelope object containing the schema and the payload or the schema
    // was stripped during serialization and we need to fill in an all-encompassing schema.
    if (!enableSchemas) {
        ObjectNode envelope = JsonNodeFactory.instance.objectNode();
        envelope.set("schema", null);
        envelope.set("payload", jsonValue);
        jsonValue = envelope;
    }

    return jsonToConnect(jsonValue);
}
 
开发者ID:YMCoding,项目名称:kafka-0.11.0.0-src-with-comment,代码行数:25,代码来源:JsonConverter.java

示例8: serializer

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public Serializer<T> serializer() {
    return new Serializer<T>() {

        @Override
        public void configure(Map<String, ?> configs, boolean isKey) {

        }

        @Override
        public byte[] serialize(String topic, T data) {
            try {
                return mapper.writeValueAsBytes(data);
            } catch (Exception e) {
                throw new SerializationException("Error serializing JSON message", e);
            }
        }

        @Override
        public void close() {

        }
    };

}
 
开发者ID:amient,项目名称:hello-kafka-streams,代码行数:26,代码来源:JsonPOJOSerde.java

示例9: deserializer

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public Deserializer<T> deserializer() {
    return new Deserializer<T>() {
        @Override
        public void configure(Map<String, ?> configs, boolean isKey) {

        }

        @Override
        public T deserialize(String topic, byte[] data) {
            T result;
            try {
                result = mapper.readValue(data, cls);
            } catch (Exception e) {
                throw new SerializationException(e);
            }

            return result;
        }

        @Override
        public void close() {

        }
    };
}
 
开发者ID:amient,项目名称:hello-kafka-streams,代码行数:27,代码来源:JsonPOJOSerde.java

示例10: serializeVal

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
private byte[] serializeVal(String topic, Mutation op) {
 
 Struct record = getRecord(op);
 byte[] bytes = null;
   	   
          if (record != null){
    try{ 
       bytes = converter.fromConnectData(topic, record.schema(), record);
    }catch (Exception e){
      	   logger.error(" KafkaAvroSerializer serialization error: " , e);
	 throw new SerializationException("Failed to serialze Avro object, with error: " , e);
    
    }
          }
    return bytes; 

}
 
开发者ID:rogers,项目名称:change-data-capture,代码行数:18,代码来源:SpecificAvroMutationSerializer.java

示例11: deserialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public T deserialize(String topic, byte[] data) {
  LOGGER.debug("data to deserialize='{}'", DatatypeConverter.printHexBinary(data));
  try {
    // get the schema
    Schema schema = targetType.newInstance().getSchema();

    Injection<GenericRecord, byte[]> genericRecordInjection = GenericAvroCodecs.toBinary(schema);
    GenericRecord genericRecord = genericRecordInjection.invert((byte[]) data).get();
    T result = (T) SpecificData.get().deepCopy(schema, genericRecord);

    LOGGER.debug("data='{}'", result);
    return result;
  } catch (Exception e) {
    throw new SerializationException(
        "Can't deserialize data [" + Arrays.toString(data) + "] from topic [" + topic + "]", e);
  }
}
 
开发者ID:code-not-found,项目名称:spring-kafka,代码行数:20,代码来源:AvroDeserializer.java

示例12: deserialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@SuppressWarnings("unchecked")
@Override
public T deserialize(String topic, byte[] data) {
  try {
    T result = null;

    if (data != null) {
      LOGGER.debug("data='{}'", DatatypeConverter.printHexBinary(data));

      DatumReader<GenericRecord> datumReader =
          new SpecificDatumReader<>(targetType.newInstance().getSchema());
      Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);

      result = (T) datumReader.read(null, decoder);
      LOGGER.debug("deserialized data='{}'", result);
    }
    return result;
  } catch (Exception ex) {
    throw new SerializationException(
        "Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
  }
}
 
开发者ID:code-not-found,项目名称:spring-kafka,代码行数:23,代码来源:AvroDeserializer.java

示例13: deserialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public T deserialize(String topic, byte[] data) {

    if (data == null)
        return null;

    ByteArrayInputStream b = new ByteArrayInputStream(data);
    ObjectInputStream o = null;

    try {
        o = new ObjectInputStream(b);
        return (T) o.readObject();
    } catch (Exception e) {
        throw new SerializationException("Error when deserializing", e);
    } finally {

        try {
            b.close();
            if (o != null) {
                o.close();
            }
        } catch (IOException ioEx) {

        }
    }
}
 
开发者ID:strimzi,项目名称:amqp-kafka-bridge,代码行数:27,代码来源:DefaultDeserializer.java

示例14: serialize

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
@Override
public byte[] serialize(final String topic, final GenericRow genericRow) {
  if (genericRow == null) {
    return null;
  }
  try {
    GenericRecord avroRecord = new GenericData.Record(avroSchema);
    for (int i = 0; i < genericRow.getColumns().size(); i++) {
      if (fields.get(i).schema().getType() == Schema.Type.ARRAY) {
        avroRecord.put(fields.get(i).name(), Arrays.asList((Object[]) genericRow.getColumns().get(i)));
      } else {
        avroRecord.put(fields.get(i).name(), genericRow.getColumns().get(i));
      }
    }
    return kafkaAvroSerializer.serialize(topic, avroRecord);
  } catch (Exception e) {
    throw new SerializationException(e);
  }
}
 
开发者ID:confluentinc,项目名称:ksql,代码行数:20,代码来源:KsqlGenericRowAvroSerializer.java

示例15: toConnectData

import org.apache.kafka.common.errors.SerializationException; //导入依赖的package包/类
/** {@inheritDoc} */
@Override public SchemaAndValue toConnectData(String topic, byte[] bytes) {
    CacheEvent evt;

    try {
        evt = deserializer.deserialize(topic, bytes);
    }
    catch (SerializationException e) {
        throw new DataException("Failed to convert to Kafka Connect data due to a serialization error", e);
    }

    if (evt == null) {
        return SchemaAndValue.NULL;
    }
    return new SchemaAndValue(null, evt);
}
 
开发者ID:apache,项目名称:ignite,代码行数:17,代码来源:CacheEventConverter.java


注:本文中的org.apache.kafka.common.errors.SerializationException类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。