本文整理汇总了Java中io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient类的典型用法代码示例。如果您正苦于以下问题:Java CachedSchemaRegistryClient类的具体用法?Java CachedSchemaRegistryClient怎么用?Java CachedSchemaRegistryClient使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
CachedSchemaRegistryClient类属于io.confluent.kafka.schemaregistry.client包,在下文中一共展示了CachedSchemaRegistryClient类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: configureKafkaProducer
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
@Override public KafkaProducer configureKafkaProducer(Properties producerProps) {
String schemaRegistryUrl;
Integer identityMapCapacity;
if (producerProps.containsKey(KAFKA_SCHEMA_REGISTRY_URL_FIELD)) {
schemaRegistryUrl = (String) producerProps.get(KAFKA_SCHEMA_REGISTRY_URL_FIELD);
}
else {
throw new IllegalArgumentException("Field " + KAFKA_SCHEMA_REGISTRY_URL_FIELD + " required.");
}
if (producerProps.containsKey(KAFKA_SCHEMA_REGISTRY_IDENTITY_MAP_CAPACITY_FIELD)) {
identityMapCapacity = (Integer) producerProps.get(KAFKA_SCHEMA_REGISTRY_IDENTITY_MAP_CAPACITY_FIELD);
}
else {
identityMapCapacity = 100;
}
CachedSchemaRegistryClient client = new CachedSchemaRegistryClient(schemaRegistryUrl, identityMapCapacity);
KafkaAvroSerializer serializer = new KafkaAvroSerializer(client);
return new KafkaProducer<Object, Object>(producerProps, serializer, serializer);
}
示例2: AvroSchemaHelper
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
/**
* AvroSchemaHelper constructor. DataFactory settings should be passed in for parsing.
* @param settings DataFactory settings.
*/
public AvroSchemaHelper(DataFactory.Settings settings) {
final List<String> schemaRepoUrls = settings.getConfig(SCHEMA_REPO_URLS_KEY);
final Object schemaSource = settings.getConfig(SCHEMA_SOURCE_KEY);
final boolean registerSchema = settings.getConfig(REGISTER_SCHEMA_KEY);
final boolean schemaFromRegistry =
schemaSource == DestinationAvroSchemaSource.REGISTRY ||
schemaSource == OriginAvroSchemaSource.REGISTRY;
// KafkaTargetConfig passes schema repo URLs in SCHEMA_REPO_URLS_KEY regardless of whether they are
// for schema source or schema registration, since the two are mutually exclusive
if ((schemaFromRegistry || registerSchema) && !schemaRepoUrls.isEmpty()) {
registryClient = new CachedSchemaRegistryClient(schemaRepoUrls, 1000);
} else {
registryClient = null;
}
// Small cache to avoid going to Schema repository all the time
schemaIdCache = CacheBuilder.newBuilder()
.maximumSize(100)
.build();
}
示例3: deserialize
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
@Override
public String deserialize(byte[] message) {
if (kafkaAvroDecoder == null) {
SchemaRegistryClient schemaRegistry = new CachedSchemaRegistryClient(this.schemaRegistryUrl, this.identityMapCapacity);
this.kafkaAvroDecoder = new KafkaAvroDecoder(schemaRegistry);
}
return this.kafkaAvroDecoder.fromBytes(message).toString();
}
示例4: KafkaRegistry
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
public KafkaRegistry() {
super();
// get properties
PropertyManagement properties = PropertyManagement.getProperties();
targetURL = properties.getProperty(KafkaPublisherPropertyValues.KAFKA_REGISTRY_URL, "http://localhost:8081");
maxSchemas = properties.asInt(KafkaPublisherPropertyValues.KAFKA_REGISTRY_MAX_SCHEMAS, "1000");
magic_byte = (byte)(properties.asInt(KafkaPublisherPropertyValues.KAFKA_REGISTRY_MAGIC_BYTE, "0"));
idSize = properties.asInt(KafkaPublisherPropertyValues.KAFKA_REGISTRY_ID_SIZE, "4");
client = new CachedSchemaRegistryClient(targetURL, maxSchemas);
}
示例5: configure
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
@Override
public void configure(Map<String, String> properties) {
SchemaRegistrySchemaRetrieverConfig config =
new SchemaRegistrySchemaRetrieverConfig(properties);
schemaRegistryClient =
new CachedSchemaRegistryClient(config.getString(config.LOCATION_CONFIG), 0);
avroData = new AvroData(config.getInt(config.AVRO_DATA_CACHE_SIZE_CONFIG));
}
示例6: AvroProducer
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
public AvroProducer(KsqlConfig ksqlConfig) {
if (ksqlConfig.getString(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY) == null) {
throw new KsqlException("Schema registry url is not set.");
}
this.ksqlConfig = ksqlConfig;
this.schemaRegistryClient = new CachedSchemaRegistryClient(ksqlConfig.getString(KsqlConfig
.SCHEMA_REGISTRY_URL_PROPERTY), 100);
}
示例7: SecorSchemaRegistryClient
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
public SecorSchemaRegistryClient(SecorConfig config) {
try {
Properties props = new Properties();
props.put("schema.registry.url", config.getSchemaRegistryUrl());
schemaRegistryClient = new CachedSchemaRegistryClient(config.getSchemaRegistryUrl(), 30);
init(config);
} catch (Exception e){
LOG.error("Error initalizing schema registry", e);
throw new RuntimeException(e);
}
}
示例8: ConfluentKafkaSchemaRegistry
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
public ConfluentKafkaSchemaRegistry(Properties props) {
this(props, new CachedSchemaRegistryClient(props.getProperty(KAFKA_SCHEMA_REGISTRY_URL),
Integer.parseInt(props.getProperty(CONFLUENT_MAX_SCHEMAS_PER_SUBJECT, String.valueOf(Integer.MAX_VALUE)))));
}
示例9: KsqlEngine
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; //导入依赖的package包/类
public KsqlEngine(final KsqlConfig ksqlConfig, final KafkaTopicClient topicClient) {
this(ksqlConfig, topicClient, new CachedSchemaRegistryClient((String) ksqlConfig.get(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY), 1000));
}