当前位置: 首页>>代码示例>>Java>>正文


Java StringSerializer类代码示例

本文整理汇总了Java中org.apache.kafka.common.serialization.StringSerializer的典型用法代码示例。如果您正苦于以下问题:Java StringSerializer类的具体用法?Java StringSerializer怎么用?Java StringSerializer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


StringSerializer类属于org.apache.kafka.common.serialization包,在下文中一共展示了StringSerializer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: init

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
@Override
public void init(AbstractConfiguration config, String brokerId, BrokerListenerFactory factory) {
    init(config);

    BROKER_TOPIC = BROKER_TOPIC_PREFIX + "." + brokerId;

    logger.trace("Initializing Kafka consumer ...");

    // consumer config
    Properties props = new Properties();
    props.put("bootstrap.servers", config.getString("bootstrap.servers"));
    props.put("group.id", UUIDs.shortUuid());
    props.put("enable.auto.commit", "true");
    props.put("key.serializer", StringSerializer.class.getName());
    props.put("value.serializer", InternalMessageSerializer.class.getName());

    // consumer
    this.consumer = new KafkaConsumer<>(props);

    // consumer worker
    this.worker = new KafkaBrokerWorker(this.consumer, BROKER_TOPIC, factory.newListener());
    this.executor.submit(this.worker);
}
 
开发者ID:12315jack,项目名称:j1st-mqtt,代码行数:24,代码来源:KafkaBrokerCommunicator.java

示例2: run

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
public void run(Configuration configuration, Environment environment) throws Exception {
  final CollectorRegistry collectorRegistry = new CollectorRegistry();
  collectorRegistry.register(new DropwizardExports(environment.metrics()));
  environment.admin()
      .addServlet("metrics", new MetricsServlet(collectorRegistry))
      .addMapping("/metrics");

  final PrometheusMetricsReporter reporter = PrometheusMetricsReporter.newMetricsReporter()
      .withCollectorRegistry(collectorRegistry)
      .withConstLabel("service", getName())
      .build();

  final Tracer tracer = getTracer();
  final Tracer metricsTracer = io.opentracing.contrib.metrics.Metrics.decorate(tracer, reporter);
  GlobalTracer.register(metricsTracer);

  final DynamicFeature tracing = new ServerTracingDynamicFeature.Builder(metricsTracer).build();
  environment.jersey().register(tracing);

  final Properties producerConfigs = new Properties();
  producerConfigs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "tweets-kafka:9092");
  producerConfigs.put(ProducerConfig.ACKS_CONFIG, "all");
  producerConfigs.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);
  final KafkaProducer<Long, String> kafkaProducer =
      new KafkaProducer<>(producerConfigs, new LongSerializer(), new StringSerializer());
  final Producer<Long, String> tracingKafkaProducer =
      new TracingKafkaProducer<>(kafkaProducer, metricsTracer);
  final ObjectMapper objectMapper = environment.getObjectMapper();
  final TweetEventRepository tweetRepository = new KafkaTweetEventRepository(tracingKafkaProducer, objectMapper);
  final TweetsService tweetsService = new TweetsService(tweetRepository);
  final TweetsResource tweetsResource = new TweetsResource(tweetsService);
  environment.jersey().register(tweetsResource);
}
 
开发者ID:jeqo,项目名称:talk-observing-distributed-systems,代码行数:34,代码来源:WorkerServiceApplication.java

示例3: createProducer

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
public void createProducer(String bootstrapServer) {
  long numberOfEvents = 5;

  Properties props = new Properties();
  props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
  props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
  props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

  KafkaProducer<String, String> producer = new KafkaProducer<>(
      props);

  for (int i = 0; i < numberOfEvents; i++) {
    String key = "testContainers";
    String value = "AreAwesome";
    ProducerRecord<String, String> record = new ProducerRecord<>(
        "hello_world_topic", key, value);
    try {
      producer.send(record).get();
    } catch (InterruptedException | ExecutionException e) {
      e.printStackTrace();
    }
    System.out.printf("key = %s, value = %s\n", key, value);
  }

  producer.close();
}
 
开发者ID:gAmUssA,项目名称:testcontainers-java-module-confluent-platform,代码行数:27,代码来源:HelloProducer.java

示例4: initialize

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
public void initialize(String servers) {
    if (isInitialized.get()) {
        logger.warn("Already initialized");
        return;
    }

    Properties props = new Properties();
    props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, servers);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, SixtPartitioner.class.getName());
    props.put(ProducerConfig.RETRIES_CONFIG, "3");
    props.put(ProducerConfig.ACKS_CONFIG, "all");

    properties.forEach(props::put);

    realProducer = new KafkaProducer<>(props);
    isInitialized.set(true);
}
 
开发者ID:Sixt,项目名称:ja-micro,代码行数:20,代码来源:KafkaPublisher.java

示例5: getMachineMetric

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
/**
 * Metrics for a machine
 *
 * @param machine
 * @return the metric
 */
@GET
@Path("{machine}")
@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
public Response getMachineMetric(@PathParam("machine") String machine) {
    LOGGER.log(Level.INFO, "Fetching metrics for machine {0}", machine);

    KafkaStreams ks = GlobalAppState.getInstance().getKafkaStreams();
    HostInfo thisInstance = GlobalAppState.getInstance().getHostPortInfo();

    Metrics metrics = null;

    StreamsMetadata metadataForMachine = ks.metadataForKey(storeName, machine, new StringSerializer());

    if (metadataForMachine.host().equals(thisInstance.host()) && metadataForMachine.port() == thisInstance.port()) {
        LOGGER.log(Level.INFO, "Querying local store for machine {0}", machine);
        metrics = getLocalMetrics(machine);
    } else {
        //LOGGER.log(Level.INFO, "Querying remote store for machine {0}", machine);
        String url = "http://" + metadataForMachine.host() + ":" + metadataForMachine.port() + "/metrics/remote/" + machine;
        metrics = Utils.getRemoteStoreState(url, 2, TimeUnit.SECONDS);
        LOGGER.log(Level.INFO, "Metric from remote store at {0} == {1}", new Object[]{url, metrics});
    }

    return Response.ok(metrics).build();
}
 
开发者ID:abhirockzz,项目名称:docker-kafka-streams,代码行数:32,代码来源:MetricsResource.java

示例6: init

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
@Override
public void init(AbstractConfiguration config, ApplicationListenerFactory factory) {
    init(config);

    logger.trace("Initializing Kafka consumer ...");

    // consumer config
    Properties props = new Properties();
    props.put("bootstrap.servers", config.getString("bootstrap.servers"));
    props.put("group.id", config.getString("group.id"));
    props.put("enable.auto.commit", "true");
    props.put("key.serializer", StringSerializer.class.getName());
    props.put("value.serializer", InternalMessageSerializer.class.getName());

    // consumer
    this.consumer = new KafkaConsumer<>(props);

    // consumer worker
    this.worker = new KafkaApplicationWorker(this.consumer, APPLICATION_TOPIC, factory.newListener());
    this.executor.submit(this.worker);
}
 
开发者ID:12315jack,项目名称:j1st-mqtt,代码行数:22,代码来源:KafkaApplicationCommunicator.java

示例7: init

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
protected void init(AbstractConfiguration config) {
    BROKER_TOPIC_PREFIX = config.getString("communicator.broker.topic");
    APPLICATION_TOPIC = config.getString("communicator.application.topic");

    logger.trace("Initializing Kafka producer ...");

    // producer config
    Properties props = new Properties();
    props.put("bootstrap.servers", config.getString("bootstrap.servers"));
    props.put("acks", config.getString("acks"));
    props.put("key.serializer", StringSerializer.class.getName());
    props.put("value.serializer", InternalMessageSerializer.class.getName());

    // producer
    this.producer = new KafkaProducer<>(props);

    // consumer executor
    this.executor = Executors.newSingleThreadExecutor();
}
 
开发者ID:12315jack,项目名称:j1st-mqtt,代码行数:20,代码来源:KafkaCommunicator.java

示例8: publishDummyData

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
public void publishDummyData() {
    final String topic = "TestTopic";

    // Create publisher
    final Map<String, Object> config = new HashMap<>();
    config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");

    final KafkaProducer<String, String> producer = new KafkaProducer<>(config);
    for (int charCode = 65; charCode < 91; charCode++) {
        final char[] key = new char[1];
        key[0] = (char) charCode;

        producer.send(new ProducerRecord<>(topic, new String(key), new String(key)));
    }
    producer.flush();
    producer.close();
}
 
开发者ID:SourceLabOrg,项目名称:kafka-webview,代码行数:20,代码来源:WebKafkaConsumerTest.java

示例9: main

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
public static void main(String[] args) {
    Configuration config = ConfigurationProvider.getConfiguration();

    String bootstrapServers = config.getOrDefault("kafka.bootstrap_servers", "localhost:9092");

    Properties properties = new Properties();
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

    Producer<String, String> producer = new KafkaProducer<>(properties);

    IntStream.rangeClosed(1, 100)
            .boxed()
            .map(number -> new ProducerRecord<>(
                    "topic-1",
                    number.toString(),
                    number.toString()))
            .map(record -> producer.send(record))
            .forEach(result -> printMetadata(result));
    producer.close();
}
 
开发者ID:jeqo,项目名称:post-kafka-rewind-consumer-offset,代码行数:23,代码来源:KafkaSimpleProducer.java

示例10: produceRecords

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
private static void produceRecords(String bootstrapServers) {
    Properties properties = new Properties();
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());

    Producer<String, byte[]> producer = new KafkaProducer<>(properties);

    IntStream.rangeClosed(1, 100).boxed()
            .map(number -> new ProducerRecord<>(
                    TOPIC, //topic
                    number.toString(), //key
                    UserAvroSerdes.serialize(new User(String.format("user-%s", number.toString()))))) //value
            .forEach(record -> producer.send(record));
    producer.close();
}
 
开发者ID:jeqo,项目名称:talk-kafka-messaging-logs,代码行数:17,代码来源:ProduceConsumeStringAvroRecord.java

示例11: produceRecords

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
private static void produceRecords(String bootstrapServers) {
    Properties properties = new Properties();
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

    Producer<Integer, String> producer = new KafkaProducer<>(properties);

    IntStream.rangeClosed(1, 100).boxed()
            .map(number ->
                    new ProducerRecord<>(
                            TOPIC,
                            number, //Key
                            String.format("record-%s", number))) //Value
            .forEach(record -> producer.send(record));
    producer.close();
}
 
开发者ID:jeqo,项目名称:talk-kafka-messaging-logs,代码行数:18,代码来源:ProduceConsumeIntegerStringRecord.java

示例12: main

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
public static void main(String[] args) {
    final ActorSystem system = ActorSystem.create("KafkaProducerSystem");

    final Materializer materializer = ActorMaterializer.create(system);

    final ProducerSettings<byte[], String> producerSettings =
            ProducerSettings
                    .create(system, new ByteArraySerializer(), new StringSerializer())
                    .withBootstrapServers("localhost:9092");

    CompletionStage<Done> done =
            Source.range(1, 100)
                    .map(n -> n.toString())
                    .map(elem ->
                            new ProducerRecord<byte[], String>(
                                    "topic1-ts",
                                    0,
                                    Instant.now().getEpochSecond(),
                                    null,
                                    elem))
                    .runWith(Producer.plainSink(producerSettings), materializer);

    done.whenComplete((d, ex) -> System.out.println("sent"));
}
 
开发者ID:jeqo,项目名称:talk-kafka-messaging-logs,代码行数:25,代码来源:KafkaProducer.java

示例13: produceRecords

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
private static void produceRecords(String bootstrapServers) {
    Properties properties = new Properties();
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

    Producer<Integer, String> producer = new KafkaProducer<>(properties);

    IntStream.rangeClosed(1, 10000).boxed()
            .map(number ->
                    new ProducerRecord<>(
                            TOPIC,
                            1, //Key
                            String.format("record-%s", number))) //Value
            .forEach(record -> producer.send(record));
    producer.close();
}
 
开发者ID:jeqo,项目名称:talk-kafka-messaging-logs,代码行数:18,代码来源:Compaction.java

示例14: produceRecords

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
private static void produceRecords(String bootstrapServers) {
    Properties properties = new Properties();
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

    Producer<Integer, String> producer = new KafkaProducer<>(properties);

    IntStream
            .rangeClosed(1, 100000).boxed()
            .map(number ->
                    new ProducerRecord<>(
                            TOPIC,
                            1, //Key
                            String.format("record-%s", number))) //Value
            .forEach(record -> producer.send(record));
    producer.close();
}
 
开发者ID:jeqo,项目名称:talk-kafka-messaging-logs,代码行数:19,代码来源:Retention.java

示例15: produceRecords

import org.apache.kafka.common.serialization.StringSerializer; //导入依赖的package包/类
private static void produceRecords(String bootstrapServers) {
    Properties properties = new Properties();
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    properties.put(ProducerConfig.ACKS_CONFIG, "all");
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName());
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

    Producer<Integer, String> producer = new KafkaProducer<>(properties);

    IntStream.rangeClosed(1, 100).boxed()
            .map(number ->
                    new ProducerRecord<>(
                            TOPIC,
                            number, //Key
                            String.format("record-%s", number))) //Value
            .forEach(record -> {
                try {
                    Thread.sleep(1000);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                producer.send(record);
            });
    producer.close();
}
 
开发者ID:jeqo,项目名称:talk-kafka-messaging-logs,代码行数:26,代码来源:KafkaSlowProducer.java


注:本文中的org.apache.kafka.common.serialization.StringSerializer类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。