当前位置: 首页>>代码示例>>Java>>正文


Java Producer.send方法代码示例

本文整理汇总了Java中kafka.javaapi.producer.Producer.send方法的典型用法代码示例。如果您正苦于以下问题:Java Producer.send方法的具体用法?Java Producer.send怎么用?Java Producer.send使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在kafka.javaapi.producer.Producer的用法示例。


在下文中一共展示了Producer.send方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: produceMessages

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void produceMessages(String brokerList, String topic, int msgCount, String msgPayload) throws JSONException, IOException {
    
    // Add Producer properties and created the Producer
    ProducerConfig config = new ProducerConfig(setKafkaBrokerProps(brokerList));
    Producer<String, String> producer = new Producer<String, String>(config);

    LOG.info("KAFKA: Preparing To Send " + msgCount + " Events.");
    for (int i=0; i<msgCount; i++){

        // Create the JSON object
        JSONObject obj = new JSONObject();
        obj.put("id", String.valueOf(i));
        obj.put("msg", msgPayload);
        obj.put("dt", GenerateRandomDay.genRandomDay());
        String payload = obj.toString();

        KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, null, payload);
        producer.send(data);
        LOG.info("Sent message: " + data.toString());
    }
    LOG.info("KAFKA: Sent " + msgCount + " Events.");

    // Stop the producer
    producer.close();
}
 
开发者ID:sakserv,项目名称:storm-topology-examples,代码行数:26,代码来源:KafkaProducerTest.java

示例2: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) {
    Properties props = new Properties();
    props.put("metadata.broker.list", "127.0.0.1:9092");
    props.put("serializer.class", "kafka.serializer.StringEncoder");
    props.put("key.serializer.class", "kafka.serializer.StringEncoder");
    props.put("request.required.acks","-1");

    Producer<String, String> producer = new Producer<String, String>(new ProducerConfig(props));

    int messageNo = 100;
    final int COUNT = 1000;
    while (messageNo < COUNT) {
        String key = String.valueOf(messageNo);
        String data = "hello kafka message " + key;
        producer.send(new KeyedMessage<String, String>("TestTopic", key ,data));
        System.out.println(data);
        messageNo ++;
    }
}
 
开发者ID:javahongxi,项目名称:whatsmars,代码行数:20,代码来源:KafkaProducer.java

示例3: sample

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
@Override
public SampleResult sample(Entry entry) {
	SampleResult result = new SampleResult();
	result.setSampleLabel(getName());
	try {
		result.sampleStart();
		Producer<String, String> producer = getProducer();
		KeyedMessage<String, String> msg = new KeyedMessage<String, String>(getTopic(), getMessage());
		producer.send(msg);
		result.sampleEnd(); 
		result.setSuccessful(true);
		result.setResponseCodeOK();
	} catch (Exception e) {
		result.sampleEnd(); // stop stopwatch
		result.setSuccessful(false);
		result.setResponseMessage("Exception: " + e);
		// get stack trace as a String to return as document data
		java.io.StringWriter stringWriter = new java.io.StringWriter();
		e.printStackTrace(new java.io.PrintWriter(stringWriter));
		result.setResponseData(stringWriter.toString(), null);
		result.setDataType(org.apache.jmeter.samplers.SampleResult.TEXT);
		result.setResponseCode("FAILED");
	}
	return result;
}
 
开发者ID:XMeterSaaSService,项目名称:kafka_jmeter,代码行数:26,代码来源:KafkaSampler.java

示例4: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) {
	String brokers = "localhost:9092";
	Producer<String, String> producer = KafkaProducer.getInstance(brokers).getProducer();

	KafkaDataProducer instance = new KafkaDataProducer();

	String topic = "test-topic";

	for (int i = 0; i < 100; i++) {
		String message = instance.get(i);
		KeyedMessage<String, String> keyedMessage = new KeyedMessage<String, String>(topic, "device001", message);
		producer.send(keyedMessage);
		System.out.println("message[" + (i + 1) + "] is sent.");
		try {
			Thread.sleep(1000);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
}
 
开发者ID:osswangxining,项目名称:another-rule-based-analytics-on-spark,代码行数:21,代码来源:KafkaDataProducer.java

示例5: send

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
@Override
public ListenableFuture<Integer> send() {
  try {
    int size = messages.size();
    Producer<Integer, ByteBuffer> kafkaProducer = producer.get();
    if (kafkaProducer == null) {
      return Futures.immediateFailedFuture(new IllegalStateException("No kafka producer available."));
    }
    kafkaProducer.send(messages);
    return Futures.immediateFuture(size);
  } catch (Exception e) {
    return Futures.immediateFailedFuture(e);
  } finally {
    messages.clear();
  }
}
 
开发者ID:apache,项目名称:twill,代码行数:17,代码来源:SimpleKafkaPublisher.java

示例6: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) {
    Properties props = new Properties();
    props.put("serializer.class", "kafka.serializer.StringEncoder");
    props.put("metadata.broker.list", "localhost:9092");

    Producer<String,String> producer = new Producer<String, String>(new ProducerConfig(props));

    int number = 1;
    for(; number < MESSAGES_NUMBER; number++)
    {
        String messageStr =
                String.format("{\"message\": %d, \"uid\":\"%s\"}",
                        number, uId.get(rand.nextInt(uNum)));

        producer.send(new KeyedMessage<String, String>(SparkStreamingConsumer.KAFKA_TOPIC,
                null, messageStr));
        if (number % 10000 == 0)
            System.out.println("Messages pushed: " + number);
    }
    System.out.println("Messages pushed: " + number);
}
 
开发者ID:rssdev10,项目名称:spark-kafka-streaming,代码行数:22,代码来源:KafkaDataProducer.java

示例7: send

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
/** 发送单条消息 [指定Topic]
 * 
 * @param topicName 队列名称
 * @param msg 消息内容 
 * @param partKey 分区对象 为空,表示使用消息内容作为Key分区
 * */
static void send(String topicName, String msg, Object partKey) {
	Producer<Object, String> producer = KafkaHelper.getProducer();
	
	KeyedMessage<Object, String> message = null;
	if (partKey == null) {
		// 将消息内容作为分区Key
		message = new KeyedMessage<Object, String>(topicName, null, msg, msg);
	} else {
		message = new KeyedMessage<Object, String>(topicName, null, partKey, msg);
	}
	
	//发送数据到单个topic, 使用同步或异步的方式, 可以由Key分区
	long start = System.currentTimeMillis();
	producer.send(message);
	if(LOG.isDebugEnabled()){
		long end = System.currentTimeMillis();
		LOG.debug("Sent [" + message + "]" + ", cost = [" + (end-start) + "]");
	}
}
 
开发者ID:linzhaoming,项目名称:easyframe-msg,代码行数:26,代码来源:KafkaHelper.java

示例8: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException {
    BasicConfigurator.configure();
    String topic = Marathon.App.DEFAULT_ID;

    Properties props = new Properties();
    props.put("metadata.broker.list", "master:9092");
    Producer<byte[], byte[]> producer = new Producer<>(new ProducerConfig(props));

    for (int i = 0; i < 10; i++) {
        Requests requests = new Requests();
        for (int j = 0; j < 10000; j++) {
            Requests.Request request = new Requests.Request();
            request.method = "GET";
            request.url = "http://192.168.3.1:80";
            requests.add(request);
        }

        KeyedMessage<byte[], byte[]> message = new KeyedMessage<>(topic, (i + "").getBytes(), requests.toByteArray());
        producer.send(message);
    }

    producer.close();
}
 
开发者ID:stealthly,项目名称:punxsutawney,代码行数:24,代码来源:SampleEmitter.java

示例9: sendMessage

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public void sendMessage(String topic, String message) {
    if (!doesTopicExist(topic)) {
        log.debug("Cannot send message {}. Topic {} does not exist!", message, topic);
        return;
    }

    Properties properties = new Properties();
    properties.put("metadata.broker.list", "localhost:9092");
    properties.put("serializer.class", "kafka.serializer.StringEncoder");
    properties.put("partitioner.class", "com.javabilities.producer.SimplePartitioner");
    properties.put("request.required.acks", "1");
    ProducerConfig config = new ProducerConfig(properties);
    Producer<String, String> producer = new Producer<>(config);
    KeyedMessage<String, String> data = new KeyedMessage<>(topic, message);
    producer.send(data);
    producer.close();
}
 
开发者ID:javabilities,项目名称:producer,代码行数:18,代码来源:MessageService.java

示例10: run

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
@Override
public void run() {
  long relayed = 0;
  
  LOG.info("Starting relay");
  final ConsumerConnector consumer = Consumer.createJavaConsumerConnector(createConsumerConfig());
  final KafkaStream<byte[], byte[]> stream = createConsumerStream(consumer);
  
  final Producer<byte[], byte[]> producer = new Producer<>(createProducerConfig());
  final ConsumerIterator<byte[], byte[]> it = stream.iterator();
  while (it.hasNext()) {
    final MessageAndMetadata<byte[], byte[]> rx = it.next();
    relayed++;
    if (LOG.isTraceEnabled()) LOG.trace("Relaying {}/{}: key={}, value={}",
                                        relayed,
                                        maxRecords != 0 ? maxRecords : "\u221E",
                                        new String(rx.key()),
                                        new String(rx.message()));
    final KeyedMessage<byte[], byte[]> tx = new KeyedMessage<>(config.sink.topic, rx.key(), rx.message());
    producer.send(tx);
    
    if (maxRecords != 0 && relayed >= maxRecords) {
      LOG.info("Shutting down");
      break;
    }
  }

  producer.close();
  consumer.shutdown();
}
 
开发者ID:William-Hill-Community,项目名称:rekafka,代码行数:31,代码来源:Relay.java

示例11: publishMessagesToKafka

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public void publishMessagesToKafka(Producer producer, byte[] message) {
    try {
        List<KeyedMessage<String, byte[]>> keyedMessageList = Lists.newArrayListWithCapacity(1);
        String topic = config.getDefaultProducerKafkaTopicName();
        keyedMessageList.add(new KeyedMessage<>(topic, message));
        producer.send(keyedMessageList);
    } catch (Exception e) {
        logger.error("Error occurred while publishing to error kafka queue {}", e);
    }
}
 
开发者ID:ameyamk,项目名称:spark-streaming-direct-kafka,代码行数:11,代码来源:ProcessStreamingData.java

示例12: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) {
	String brokers = "localhost:9092";
	Producer<String, String> producer = KafkaProducer.getInstance(brokers).getProducer();

	KafkaRuleDataProducer instance = new KafkaRuleDataProducer();

	String topic = "metadata_rule";

	String message = instance.get();
	KeyedMessage<String, String> keyedMessage = new KeyedMessage<String, String>(topic, "device001", message);
	producer.send(keyedMessage);
	System.out.println("message[rule] is sent.");
}
 
开发者ID:osswangxining,项目名称:another-rule-based-analytics-on-spark,代码行数:14,代码来源:KafkaRuleDataProducer.java

示例13: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) {
        long events = 100;

        Properties props = new Properties();
        props.put("metadata.broker.list", "10.50.12.11:9092,10.50.12.12:9092,10.50.12.13:9092");
        props.put("serializer.class", "kafka.serializer.StringEncoder");
        //props.put("partitioner.class", "org.n3r.sandbox.kafka.KafkaProducerDemo$SimplePartitioner");
        props.put("request.required.acks", "1");
        props.put("client.id", "bingoo.producer");
//        props.put("retry.backoff.ms", "30000");
//        props.put("reconnect.backoff.ms", "30000");

        ProducerConfig config = new ProducerConfig(props);

        Producer<String, String> producer = new Producer<>(config);
        long start = System.currentTimeMillis();

        for (long nEvents = 0; nEvents < events; nEvents++) {
            long runtime = new Date().getTime();
            String ip = "192.168.2." + (nEvents + start);
            String msg = runtime + ",www.example.com," + ip;
            KeyedMessage<String, String> data = new KeyedMessage<>("bingoo-visits", msg);
            producer.send(data);
        }

        producer.close();
    }
 
开发者ID:bingoohuang,项目名称:javacode-demo,代码行数:28,代码来源:KafkaProducerDemo.java

示例14: main

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void main(String[] args) {
    Properties props = new Properties();
    props.put("zookeeper.connect",
            "wxb-1:2181,wxb-1:2181,wxb-12181");
    props.put("serializer.class", "kafka.serializer.StringEncoder");
    props.put("producer.type", "async");
    props.put("compression.codec", "1");
    props.put(
            "metadata.broker.list",
            "wxb-1:6667,wxb-2:6667,wxb-3:6667");

    ProducerConfig config = new ProducerConfig(props);
    Producer<String, String> producer = new Producer<String, String>(config);

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    Random r = new Random();
    for (int i = 0; i < 1000; i++) {
        int id = r.nextInt(10000000);
        int memberid = r.nextInt(100000);
        int totalprice = r.nextInt(1000) + 100;
        int preferential = r.nextInt(100);
        int sendpay = r.nextInt(3);

        StringBuffer data = new StringBuffer();
        data.append(String.valueOf(id)).append("\t")
                .append(String.valueOf(memberid)).append("\t")
                .append(String.valueOf(totalprice)).append("\t")
                .append(String.valueOf(preferential)).append("\t")
                .append(String.valueOf(sendpay)).append("\t")
                .append(df.format(new Date()));
        System.out.println(data.toString());
        producer.send(new KeyedMessage<String, String>("order", data
                .toString()));
    }
    producer.close();
    System.out.println("send over ------------------");
}
 
开发者ID:realxujiang,项目名称:storm-kafka-examples,代码行数:38,代码来源:SendMessageKafka.java

示例15: sendOne

import kafka.javaapi.producer.Producer; //导入方法依赖的package包/类
public static void sendOne(Producer<String, String> producer, String topic) throws InterruptedException {
boolean sleepFlag = false;
   KeyedMessage<String, String> message1 = new KeyedMessage<String, String>(topic, "0", "test 0");
   producer.send(message1);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message2 = new KeyedMessage<String, String>(topic, "1", "test 1");
   producer.send(message2);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message3 = new KeyedMessage<String, String>(topic, "2", "test 2");
   producer.send(message3);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message4 = new KeyedMessage<String, String>(topic, "3", "test 3");
   producer.send(message4);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message5 = new KeyedMessage<String, String>(topic, "4", "test 4");
   producer.send(message5);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message6 = new KeyedMessage<String, String>(topic, "5", "test 5");
   producer.send(message6);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message7 = new KeyedMessage<String, String>(topic, "6", "test 6");
   producer.send(message7);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message8 = new KeyedMessage<String, String>(topic, "7", "test 7");
   producer.send(message8);
   if(sleepFlag) Thread.sleep(5000);
   KeyedMessage<String, String> message9 = new KeyedMessage<String, String>(topic, "8", "test 8");
   producer.send(message9);
   if(sleepFlag) Thread.sleep(5000);
   producer.close();
 }
 
开发者ID:habren,项目名称:KafkaExample,代码行数:32,代码来源:ProducerDemo.java


注:本文中的kafka.javaapi.producer.Producer.send方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。