本文整理汇总了Java中org.springframework.kafka.core.DefaultKafkaProducerFactory类的典型用法代码示例。如果您正苦于以下问题:Java DefaultKafkaProducerFactory类的具体用法?Java DefaultKafkaProducerFactory怎么用?Java DefaultKafkaProducerFactory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
DefaultKafkaProducerFactory类属于org.springframework.kafka.core包,在下文中一共展示了DefaultKafkaProducerFactory类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setUp
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
@Before
public void setUp() throws Exception {
// set up the Kafka producer properties
Map<String, Object> senderProperties =
KafkaTestUtils.senderProps(embeddedKafka.getBrokersAsString());
// create a Kafka producer factory
ProducerFactory<String, String> producerFactory =
new DefaultKafkaProducerFactory<String, String>(senderProperties);
// create a Kafka template
template = new KafkaTemplate<>(producerFactory);
// set the default topic to send to
template.setDefaultTopic(RECEIVER_TOPIC);
// wait until the partitions are assigned
for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry
.getListenerContainers()) {
ContainerTestUtils.waitForAssignment(messageListenerContainer,
embeddedKafka.getPartitionsPerTopic());
}
}
示例2: receiveAndValidateFoo
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos");
template.sendDefault("{\"id\":\"123\"}");
ConsumerRecord<Integer, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
assertThat(cr.key().equals(123));
ObjectMapper om = new ObjectMapper();
Long aLong = om.readValue(cr.value(), Long.class);
assertThat(aLong.equals(1L));
}
开发者ID:spring-cloud,项目名称:spring-cloud-stream-binder-kafka,代码行数:14,代码来源:KStreamBinderPojoInputAndPrimitiveTypeOutputTests.java
示例3: setUp
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
@Before
public void setUp() throws Exception {
// set up the Kafka producer properties
Map<String, Object> senderProperties =
KafkaTestUtils.senderProps(AllSpringKafkaTests.embeddedKafka.getBrokersAsString());
// create a Kafka producer factory
ProducerFactory<String, String> producerFactory =
new DefaultKafkaProducerFactory<String, String>(senderProperties);
// create a Kafka template
template = new KafkaTemplate<>(producerFactory);
// set the default topic to send to
template.setDefaultTopic(AllSpringKafkaTests.RECEIVER_TOPIC);
// wait until the partitions are assigned
for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry
.getListenerContainers()) {
ContainerTestUtils.waitForAssignment(messageListenerContainer,
AllSpringKafkaTests.embeddedKafka.getPartitionsPerTopic());
}
}
示例4: producerFactory
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
/**
*
*
* @return producer instance(s)
*/
@SuppressWarnings("rawtypes")
@Bean
public ProducerFactory producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
示例5: setup
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
@Before
public void setup() throws Exception {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("testT", "false", embeddedKafka);
DefaultKafkaConsumerFactory<String, String> cf =
new DefaultKafkaConsumerFactory<>(consumerProps);
ContainerProperties containerProperties = new ContainerProperties(TEST_TOPIC);
container = new KafkaMessageListenerContainer<>(cf, containerProperties);
final BlockingQueue<ConsumerRecord<String, String>> records = new LinkedBlockingQueue<>();
container.setupMessageListener((MessageListener<String, String>) record -> {
log.error("Message received: " + record);
records.add(record);
});
container.start();
ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
Map<String, Object> senderProps = KafkaTestUtils.senderProps(embeddedKafka.getBrokersAsString());
ProducerFactory<String, String> pf =
new DefaultKafkaProducerFactory<>(senderProps);
template = new KafkaTemplate<>(pf);
template.setDefaultTopic(TEST_TOPIC);
}
示例6: producerFactory
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
public ProducerFactory<String, String> producerFactory() {
// set the producer properties
Map<String, Object> properties = new HashMap<String, Object>();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
properties.put(ProducerConfig.RETRIES_CONFIG, 0);
properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 4096);
properties.put(ProducerConfig.LINGER_MS_CONFIG, 1);
properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 40960);
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return new DefaultKafkaProducerFactory<String, String>(properties);
}
示例7: start
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
@Override
public void start() {
if (null == this.keyStrategy) {
this.keyStrategy = new DefaultKeyStrategy();
}
this.addProducerConfigValue(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
this.addProducerConfigValue(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
ProducerFactory<byte[], byte[]> producerFactory = new DefaultKafkaProducerFactory<>(this.producerConfig);
this.kafkaTemplate = new KafkaTemplate<>(producerFactory);
this.kafkaTemplate.setDefaultTopic(this.topic.trim());
super.start();
}
示例8: createTemplate
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
private <T> KafkaTemplate<Integer, T> createTemplate(Object keySer, Object valSer) {
Map<String, Object> senderProps = senderProps(keySer, valSer);
ProducerFactory<Integer, T> pf =
new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, T> template = new KafkaTemplate<>(pf);
return template;
}
示例9: getProducerFactory
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
protected DefaultKafkaProducerFactory<byte[], byte[]> getProducerFactory(String transactionIdPrefix,
ExtendedProducerProperties<KafkaProducerProperties> producerProperties) {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
props.put(ProducerConfig.ACKS_CONFIG, String.valueOf(this.configurationProperties.getRequiredAcks()));
if (!ObjectUtils.isEmpty(configurationProperties.getProducerConfiguration())) {
props.putAll(configurationProperties.getProducerConfiguration());
}
if (ObjectUtils.isEmpty(props.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG))) {
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configurationProperties.getKafkaConnectionString());
}
if (ObjectUtils.isEmpty(props.get(ProducerConfig.BATCH_SIZE_CONFIG))) {
props.put(ProducerConfig.BATCH_SIZE_CONFIG,
String.valueOf(producerProperties.getExtension().getBufferSize()));
}
if (ObjectUtils.isEmpty(props.get(ProducerConfig.LINGER_MS_CONFIG))) {
props.put(ProducerConfig.LINGER_MS_CONFIG,
String.valueOf(producerProperties.getExtension().getBatchTimeout()));
}
if (ObjectUtils.isEmpty(props.get(ProducerConfig.COMPRESSION_TYPE_CONFIG))) {
props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG,
producerProperties.getExtension().getCompressionType().toString());
}
if (!ObjectUtils.isEmpty(producerProperties.getExtension().getConfiguration())) {
props.putAll(producerProperties.getExtension().getConfiguration());
}
DefaultKafkaProducerFactory<byte[], byte[]> producerFactory = new DefaultKafkaProducerFactory<>(props);
if (transactionIdPrefix != null) {
producerFactory.setTransactionIdPrefix(transactionIdPrefix);
}
return producerFactory;
}
开发者ID:spring-cloud,项目名称:spring-cloud-stream-binder-kafka,代码行数:36,代码来源:KafkaMessageChannelBinder.java
示例10: testProducerRunsInTx
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testProducerRunsInTx() {
KafkaProperties kafkaProperties = new KafkaProperties();
kafkaProperties.setBootstrapServers(Collections.singletonList(embeddedKafka.getBrokersAsString()));
KafkaBinderConfigurationProperties configurationProperties = new KafkaBinderConfigurationProperties();
configurationProperties.getTransaction().setTransactionIdPrefix("foo-");
KafkaTopicProvisioner provisioningProvider = new KafkaTopicProvisioner(configurationProperties, kafkaProperties);
provisioningProvider.setMetadataRetryOperations(new RetryTemplate());
final Producer mockProducer = mock(Producer.class);
willReturn(Collections.singletonList(new TopicPartition("foo", 0))).given(mockProducer).partitionsFor(anyString());
KafkaMessageChannelBinder binder = new KafkaMessageChannelBinder(configurationProperties, provisioningProvider) {
@Override
protected DefaultKafkaProducerFactory<byte[], byte[]> getProducerFactory(String transactionIdPrefix,
ExtendedProducerProperties<KafkaProducerProperties> producerProperties) {
DefaultKafkaProducerFactory<byte[], byte[]> producerFactory =
spy(super.getProducerFactory(transactionIdPrefix, producerProperties));
willReturn(mockProducer).given(producerFactory).createProducer();
return producerFactory;
}
};
GenericApplicationContext applicationContext = new GenericApplicationContext();
applicationContext.refresh();
binder.setApplicationContext(applicationContext);
DirectChannel channel = new DirectChannel();
KafkaProducerProperties extension = new KafkaProducerProperties();
ExtendedProducerProperties<KafkaProducerProperties> properties = new ExtendedProducerProperties<>(extension);
binder.bindProducer("foo", channel, properties);
channel.send(new GenericMessage<>("foo".getBytes()));
InOrder inOrder = inOrder(mockProducer);
inOrder.verify(mockProducer).beginTransaction();
inOrder.verify(mockProducer).send(any(ProducerRecord.class), any(Callback.class));
inOrder.verify(mockProducer).commitTransaction();
inOrder.verify(mockProducer).close();
inOrder.verifyNoMoreInteractions();
}
示例11: receiveAndValidateFoo
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos");
template.sendDefault("{\"id\":\"123\"}");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
}
开发者ID:spring-cloud,项目名称:spring-cloud-stream-binder-kafka,代码行数:10,代码来源:KstreamBinderPojoInputStringOutputIntegrationTests.java
示例12: receiveAndValidate
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
private void receiveAndValidate(ConfigurableApplicationContext context) throws Exception {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("words");
template.sendDefault("foobar");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts");
assertThat(cr.value().contains("\"word\":\"foobar\",\"count\":1")).isTrue();
}
开发者ID:spring-cloud,项目名称:spring-cloud-stream-binder-kafka,代码行数:10,代码来源:KStreamBinderWordCountIntegrationTests.java
示例13: receiveAndValidateFoo
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
private void receiveAndValidateFoo(ConfigurableApplicationContext context) throws Exception {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
DefaultKafkaProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf, true);
template.setDefaultTopic("foos");
template.sendDefault("{\"id\":\"123\"}");
ConsumerRecord<String, String> cr = KafkaTestUtils.getSingleRecord(consumer, "counts-id");
assertThat(cr.value().contains("Count for product with ID 123: 1")).isTrue();
ProductCountApplication.Foo foo = context.getBean(ProductCountApplication.Foo.class);
assertThat(foo.getProductStock(123).equals(1L));
}
开发者ID:spring-cloud,项目名称:spring-cloud-stream-binder-kafka,代码行数:13,代码来源:KStreamInteractiveQueryIntegrationTests.java
示例14: producerFactory
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
@Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configProperties.getBrokerAddress());
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return new DefaultKafkaProducerFactory<>(props);
}
示例15: producerFactory
import org.springframework.kafka.core.DefaultKafkaProducerFactory; //导入依赖的package包/类
@Bean
public ProducerFactory<String, Foo> producerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.configProperties.getBrokerAddress());
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
return new DefaultKafkaProducerFactory<>(props);
}