当前位置: 首页>>代码示例>>Java>>正文


Java Producer.close方法代码示例

本文整理汇总了Java中kafka.javaapi.producer.Producer.close方法的典型用法代码示例。如果您正苦于以下问题:Java Producer.close方法的具体用法?Java Producer.close怎么用?Java Producer.close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在kafka.javaapi.producer.Producer的用法示例。


在下文中一共展示了Producer.close方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: produceMessages

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void produceMessages(String brokerList, String topic, int msgCount, String msgPayload) throws JSONException, IOException {
    
    // Add Producer properties and created the Producer
    ProducerConfig config = new ProducerConfig(setKafkaBrokerProps(brokerList));
    Producer<String, String> producer = new Producer<String, String>(config);

    LOG.info("KAFKA: Preparing To Send " + msgCount + " Events.");
    for (int i=0; i<msgCount; i++){

        // Create the JSON object
        JSONObject obj = new JSONObject();
        obj.put("id", String.valueOf(i));
        obj.put("msg", msgPayload);
        obj.put("dt", GenerateRandomDay.genRandomDay());
        String payload = obj.toString();

        KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, null, payload);
        producer.send(data);
        LOG.info("Sent message: " + data.toString());
    }
    LOG.info("KAFKA: Sent " + msgCount + " Events.");

    // Stop the producer
    producer.close();
}
 
开发者ID:sakserv,项目名称:storm-topology-examples,代码行数:26,代码来源:KafkaProducerTest.java

示例2: sendMulitThread

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void sendMulitThread() {
	Producer<String, String> producer = buildSyncProducer();
	Random random = new Random();
	List<Thread> produceThreads = IntStream.range(0, 20).mapToObj(i -> {
		return new Thread(() -> {
			final String threadName = Thread.currentThread().getName();
			for(int j = 0; j < 10000; j++) {
				sendMessage(producer, Constants.TOPIC_NAME, random.nextInt(10000) + "", threadName + " message " + j);
			}
		});
	}).peek(Thread::start).collect(toList());
	
	produceThreads.stream().forEach(t -> {
		try {
			t.join();
		} catch (Exception e) {
			e.printStackTrace();
		}
	});
	
	producer.close();
}
 
开发者ID:walle-liao,项目名称:jaf-examples,代码行数:23,代码来源:ProducerDemo.java

示例3: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException {
    BasicConfigurator.configure();
    String topic = Marathon.App.DEFAULT_ID;

    Properties props = new Properties();
    props.put("metadata.broker.list", "master:9092");
    Producer<byte[], byte[]> producer = new Producer<>(new ProducerConfig(props));

    for (int i = 0; i < 10; i++) {
        Requests requests = new Requests();
        for (int j = 0; j < 10000; j++) {
            Requests.Request request = new Requests.Request();
            request.method = "GET";
            request.url = "http://192.168.3.1:80";
            requests.add(request);
        }

        KeyedMessage<byte[], byte[]> message = new KeyedMessage<>(topic, (i + "").getBytes(), requests.toByteArray());
        producer.send(message);
    }

    producer.close();
}
 
开发者ID:stealthly,项目名称:punxsutawney,代码行数:24,代码来源:SampleEmitter.java

示例4: sendMessage

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public void sendMessage(String topic, String message) {
    if (!doesTopicExist(topic)) {
        log.debug("Cannot send message {}. Topic {} does not exist!", message, topic);
        return;
    }

    Properties properties = new Properties();
    properties.put("metadata.broker.list", "localhost:9092");
    properties.put("serializer.class", "kafka.serializer.StringEncoder");
    properties.put("partitioner.class", "com.javabilities.producer.SimplePartitioner");
    properties.put("request.required.acks", "1");
    ProducerConfig config = new ProducerConfig(properties);
    Producer<String, String> producer = new Producer<>(config);
    KeyedMessage<String, String> data = new KeyedMessage<>(topic, message);
    producer.send(data);
    producer.close();
}
 
开发者ID:javabilities,项目名称:producer,代码行数:18,代码来源:MessageService.java

示例5: run

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
@Override
public void run() {
  long relayed = 0;
  
  LOG.info("Starting relay");
  final ConsumerConnector consumer = Consumer.createJavaConsumerConnector(createConsumerConfig());
  final KafkaStream<byte[], byte[]> stream = createConsumerStream(consumer);
  
  final Producer<byte[], byte[]> producer = new Producer<>(createProducerConfig());
  final ConsumerIterator<byte[], byte[]> it = stream.iterator();
  while (it.hasNext()) {
    final MessageAndMetadata<byte[], byte[]> rx = it.next();
    relayed++;
    if (LOG.isTraceEnabled()) LOG.trace("Relaying {}/{}: key={}, value={}",
                                        relayed,
                                        maxRecords != 0 ? maxRecords : "\u221E",
                                        new String(rx.key()),
                                        new String(rx.message()));
    final KeyedMessage<byte[], byte[]> tx = new KeyedMessage<>(config.sink.topic, rx.key(), rx.message());
    producer.send(tx);
    
    if (maxRecords != 0 && relayed >= maxRecords) {
      LOG.info("Shutting down");
      break;
    }
  }

  producer.close();
  consumer.shutdown();
}
 
开发者ID:William-Hill-Community,项目名称:rekafka,代码行数:31,代码来源:Relay.java

示例6: syncProducerBatchSend

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void syncProducerBatchSend() {
	Producer<String, String> producer = buildSyncProducer();
	IntStream.range(0, 9).forEach(x -> {
		sendMessage(producer, Constants.TOPIC_NAME, x + "", "message : syncProducerBatchSend " + x);
	});
	producer.close();
}
 
开发者ID:walle-liao,项目名称:jaf-examples,代码行数:8,代码来源:ProducerDemo.java

示例7: asyncProducerBatchSend

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void asyncProducerBatchSend() {
	Producer<String, String> producer = buildAsyncProducer();
	IntStream.range(0, 20).forEach(x -> {
		sendMessage(producer, Constants.TOPIC_NAME, x + "", "message : asyncProducerBatchSend " + x);
	});
	producer.close();
}
 
开发者ID:walle-liao,项目名称:jaf-examples,代码行数:8,代码来源:ProducerDemo.java

示例8: shutdown

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public void shutdown() {
	Producer<String, String> producer = getProducer();
	System.out.println("send <- producer:" + producer);
	if (producer != null)
		producer.close();

}
 
开发者ID:osswangxining,项目名称:another-rule-based-analytics-on-spark,代码行数:8,代码来源:KafkaProducer.java

示例9: changed

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
@Override
public void changed(BrokerService brokerService) {
  if (listenerCancelled.get()) {
    return;
  }

  String newBrokerList = brokerService.getBrokerList();
  if (newBrokerList.isEmpty()) {
    LOG.warn("Broker list is empty. No Kafka producer is created.");
    return;
  }

  if (Objects.equal(brokerList, newBrokerList)) {
    return;
  }

  Properties props = new Properties();
  props.put("metadata.broker.list", newBrokerList);
  props.put("serializer.class", ByteBufferEncoder.class.getName());
  props.put("key.serializer.class", IntegerEncoder.class.getName());
  props.put("partitioner.class", IntegerPartitioner.class.getName());
  props.put("request.required.acks", Integer.toString(ack.getAck()));
  props.put("compression.codec", compression.getCodec());

  ProducerConfig config = new ProducerConfig(props);
  Producer<Integer, ByteBuffer> oldProducer = producer.getAndSet(new Producer<Integer, ByteBuffer>(config));
  if (oldProducer != null) {
    oldProducer.close();
  }

  LOG.info("Update Kafka producer broker list: {}", newBrokerList);
  brokerList = newBrokerList;
}
 
开发者ID:apache,项目名称:twill,代码行数:34,代码来源:SimpleKafkaPublisher.java

示例10: run

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
@Override
public void run() {
  // Call from cancel() through executor only.
  cancelChangeListener.cancel();
  Producer<Integer, ByteBuffer> kafkaProducer = producer.get();
  kafkaProducer.close();
  executor.shutdownNow();
}
 
开发者ID:apache,项目名称:twill,代码行数:9,代码来源:SimpleKafkaPublisher.java

示例11: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) {
        long events = 100;

        Properties props = new Properties();
        props.put("metadata.broker.list", "10.50.12.11:9092,10.50.12.12:9092,10.50.12.13:9092");
        props.put("serializer.class", "kafka.serializer.StringEncoder");
        //props.put("partitioner.class", "org.n3r.sandbox.kafka.KafkaProducerDemo$SimplePartitioner");
        props.put("request.required.acks", "1");
        props.put("client.id", "bingoo.producer");
//        props.put("retry.backoff.ms", "30000");
//        props.put("reconnect.backoff.ms", "30000");

        ProducerConfig config = new ProducerConfig(props);

        Producer<String, String> producer = new Producer<>(config);
        long start = System.currentTimeMillis();

        for (long nEvents = 0; nEvents < events; nEvents++) {
            long runtime = new Date().getTime();
            String ip = "192.168.2." + (nEvents + start);
            String msg = runtime + ",www.example.com," + ip;
            KeyedMessage<String, String> data = new KeyedMessage<>("bingoo-visits", msg);
            producer.send(data);
        }

        producer.close();
    }
 
开发者ID:bingoohuang,项目名称:javacode-demo,代码行数:28,代码来源:KafkaProducerDemo.java

示例12: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) {
    Properties props = new Properties();
    props.put("zookeeper.connect",
            "wxb-1:2181,wxb-1:2181,wxb-12181");
    props.put("serializer.class", "kafka.serializer.StringEncoder");
    props.put("producer.type", "async");
    props.put("compression.codec", "1");
    props.put(
            "metadata.broker.list",
            "wxb-1:6667,wxb-2:6667,wxb-3:6667");

    ProducerConfig config = new ProducerConfig(props);
    Producer<String, String> producer = new Producer<String, String>(config);

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    Random r = new Random();
    for (int i = 0; i < 1000; i++) {
        int id = r.nextInt(10000000);
        int memberid = r.nextInt(100000);
        int totalprice = r.nextInt(1000) + 100;
        int preferential = r.nextInt(100);
        int sendpay = r.nextInt(3);

        StringBuffer data = new StringBuffer();
        data.append(String.valueOf(id)).append("\t")
                .append(String.valueOf(memberid)).append("\t")
                .append(String.valueOf(totalprice)).append("\t")
                .append(String.valueOf(preferential)).append("\t")
                .append(String.valueOf(sendpay)).append("\t")
                .append(df.format(new Date()));
        System.out.println(data.toString());
        producer.send(new KeyedMessage<String, String>("order", data
                .toString()));
    }
    producer.close();
    System.out.println("send over ------------------");
}
 
开发者ID:realxujiang,项目名称:storm-kafka-examples,代码行数:38,代码来源:SendMessageKafka.java

示例13: sendOne

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void sendOne(Producer<String, String> producer, String topic) throws InterruptedException {
boolean sleepFlag = false;
   KeyedMessage<String, String> message1 = new KeyedMessage<String, String>(topic, "0", "test 0");
   producer.send(message1);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message2 = new KeyedMessage<String, String>(topic, "1", "test 1");
   producer.send(message2);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message3 = new KeyedMessage<String, String>(topic, "2", "test 2");
   producer.send(message3);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message4 = new KeyedMessage<String, String>(topic, "3", "test 3");
   producer.send(message4);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message5 = new KeyedMessage<String, String>(topic, "4", "test 4");
   producer.send(message5);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message6 = new KeyedMessage<String, String>(topic, "5", "test 5");
   producer.send(message6);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message7 = new KeyedMessage<String, String>(topic, "6", "test 6");
   producer.send(message7);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message8 = new KeyedMessage<String, String>(topic, "7", "test 7");
   producer.send(message8);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message9 = new KeyedMessage<String, String>(topic, "8", "test 8");
   producer.send(message9);
   if(sleepFlag) Thread.sleep(5000);
   producer.close();
 }
 
开发者ID:habren,项目名称:KafkaExample,代码行数:32,代码来源:ProducerDemo.java

示例14: run

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
/**
 * The MapReduce driver - setup and launch the job.
 *
 * @param args the command-line arguments
 * @return the process exit code
 * @throws Exception if something goes wrong
 */
public int run(final String[] args) throws Exception {

  Cli cli = Cli.builder().setArgs(args).addOptions(Options.values()).build();
  int result = cli.runCmd();

  if (result != 0) {
    return result;
  }

  File inputFile = new File(cli.getArgValueAsString(Options.STOCKSFILE));
  String brokerList = cli.getArgValueAsString(Options.BROKER_LIST);
  String kTopic = cli.getArgValueAsString(Options.TOPIC);

  Properties props = new Properties();
  props.put("metadata.broker.list", brokerList);
  props.put("serializer.class", kafka.serializer.DefaultEncoder.class.getName());

  ProducerConfig config = new ProducerConfig(props);

  Producer<Integer, byte[]> producer = new Producer<Integer, byte[]>(config);

  for (Stock stock : AvroStockUtils.fromCsvFile(inputFile)) {
    KeyedMessage<Integer, byte[]> msg = new KeyedMessage<Integer, byte[]>(kTopic, toBytes(stock));
    System.out.println("Sending " + msg + " to kafka @ topic " + kTopic);
    producer.send(msg);

  }
  producer.close();
  System.out.println("done!");
  return 0;
}
 
开发者ID:Hanmourang,项目名称:hiped2,代码行数:39,代码来源:KafkaAvroWriter.java

示例15: getKafkaProducer

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static Producer<String, String> getKafkaProducer(String actorId, ProducerConfig producerConfig, boolean refreshProducer){
	Producer<String, String> producer = kafkaProducerPool.get(actorId);
	if(producer == null){
		producer = new Producer<>(producerConfig);
		addKafkaProducer(actorId, producer);
	} else {
		if(refreshProducer){
			System.out.println("### refreshProducer for actorId: " + actorId);
			producer.close(); producer = null;
			producer = new Producer<>(producerConfig);
			addKafkaProducer(actorId, producer);
		}
	}
	return producer;
}
 
开发者ID:trieu,项目名称:kafka-producer-util,代码行数:16,代码来源:KafkaProducerUtil.java


注:本文中的kafka.javaapi.producer.Producer.close方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。