本文整理汇总了Scala中org.apache.kafka.clients.producer.ProducerConfig类的典型用法代码示例。如果您正苦于以下问题:Scala ProducerConfig类的具体用法?Scala ProducerConfig怎么用?Scala ProducerConfig使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了ProducerConfig类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: KafkaProducer
//设置package包名称以及导入依赖的类
package org.hpi.esb.flink.kafka
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010.writeToKafkaWithTimestamps
import org.apache.flink.streaming.util.serialization.SimpleStringSchema
import org.apache.kafka.clients.producer.ProducerConfig
class KafkaProducer(producerTopic: String) extends KafkaConnector {
val uuid: String = java.util.UUID.randomUUID.toString
// TODO: read properties from file
props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS)
props.setProperty(ProducerConfig.CLIENT_ID_CONFIG, s"$producerTopic - $uuid")
def produce(stream: DataStream[String]): Unit = {
val config = writeToKafkaWithTimestamps(stream.javaStream, producerTopic, new SimpleStringSchema(), props)
config.setWriteTimestampToKafka(true)
}
}
示例2: DataDriver
//设置package包名称以及导入依赖的类
package org.hpi.esb.datasender
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig}
import org.hpi.esb.commons.config.Configs
import org.hpi.esb.commons.util.Logging
import org.hpi.esb.datasender.config._
import org.hpi.esb.datasender.output.writers.DatasenderRunResultWriter
import scala.io.Source
class DataDriver() extends Logging {
private val topics = Configs.benchmarkConfig.sourceTopics
private val config = ConfigHandler.config
private val dataReader = createDataReader(config.dataReaderConfig)
private val kafkaProducerProperties = createKafkaProducerProperties(config.kafkaProducerConfig)
private val kafkaProducer = new KafkaProducer[String, String](kafkaProducerProperties)
private val resultHandler = new DatasenderRunResultWriter(config, Configs.benchmarkConfig, kafkaProducer)
private val dataProducer = createDataProducer(kafkaProducer, dataReader, resultHandler)
def run(): Unit = {
dataProducer.execute()
}
def createKafkaProducerProperties(kafkaProducerConfig: KafkaProducerConfig): Properties = {
val props = new Properties()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProducerConfig.bootstrapServers.get)
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, kafkaProducerConfig.keySerializerClass.get)
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, kafkaProducerConfig.valueSerializerClass.get)
props.put(ProducerConfig.ACKS_CONFIG, kafkaProducerConfig.acks.get)
props.put(ProducerConfig.BATCH_SIZE_CONFIG, kafkaProducerConfig.batchSize.get.toString)
props.put(ProducerConfig.LINGER_MS_CONFIG, kafkaProducerConfig.lingerTime.toString)
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, kafkaProducerConfig.bufferMemorySize.toString)
props
}
def createDataReader(dataReaderConfig: DataReaderConfig): DataReader = {
new DataReader(Source.fromFile(dataReaderConfig.dataInputPath.get),
dataReaderConfig.columns.get,
dataReaderConfig.columnDelimiter.get,
dataReaderConfig.dataColumnStart.get,
dataReaderConfig.readInRam)
}
def createDataProducer(kafkaProducer: KafkaProducer[String, String], dataReader: DataReader,
resultHandler: DatasenderRunResultWriter): DataProducer = {
val numberOfThreads = config.dataSenderConfig.numberOfThreads.get
val sendingInterval = Configs.benchmarkConfig.sendingInterval
val sendingIntervalTimeUnit = Configs.benchmarkConfig.getSendingIntervalTimeUnit()
val duration = Configs.benchmarkConfig.duration
val durationTimeUnit = Configs.benchmarkConfig.getDurationTimeUnit()
val singleColumnMode = config.dataSenderConfig.singleColumnMode
new DataProducer(resultHandler, kafkaProducer, dataReader, topics, numberOfThreads,
sendingInterval, sendingIntervalTimeUnit, duration, durationTimeUnit, singleColumnMode)
}
}
示例3: EmbeddedVKitM
//设置package包名称以及导入依赖的类
package com.github.mmolimar.vkitm.embedded
import java.util.Properties
import com.github.mmolimar.vkitm.server.{VKitMConfig, VKitMServer}
import com.github.mmolimar.vkitm.utils.TestUtils
import kafka.server.KafkaConfig
import kafka.utils.Logging
import org.apache.kafka.clients.producer.ProducerConfig
class EmbeddedVKitM(zkConnection: String,
brokerList: String,
port: Int = TestUtils.getAvailablePort) extends Logging {
private var vkitmServer: VKitMServer = null
def startup() {
info("Starting up VKitM server")
val serverProps = new Properties
serverProps.setProperty(KafkaConfig.ZkConnectProp, zkConnection)
serverProps.setProperty(KafkaConfig.HostNameProp, "localhost")
serverProps.setProperty(KafkaConfig.PortProp, port.toString)
serverProps.setProperty(KafkaConfig.ListenersProp, "PLAINTEXT://localhost:" + port)
val producerProps = new Properties
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
val brokerPort = brokerList.split(":")(1)
val consumerProps = new Properties
consumerProps.setProperty(KafkaConfig.ZkConnectProp, zkConnection)
consumerProps.setProperty(KafkaConfig.HostNameProp, "localhost")
consumerProps.setProperty(KafkaConfig.PortProp, brokerPort)
consumerProps.setProperty(KafkaConfig.ListenersProp, "PLAINTEXT://localhost:" + brokerPort)
vkitmServer = new VKitMServer(VKitMConfig.fromProps(serverProps, producerProps, consumerProps))
vkitmServer.startup()
info("Started embedded VKitM server")
}
def shutdown() {
vkitmServer.shutdown()
}
def getPort: Int = port
def getBrokerList: String = "localhost:" + getPort
def getServer: VKitMServer = vkitmServer
override def toString: String = {
val sb: StringBuilder = StringBuilder.newBuilder
sb.append("VKitM{")
sb.append("config='").append(vkitmServer.config).append('\'')
sb.append('}')
sb.toString
}
}
示例4: KProducer
//设置package包名称以及导入依赖的类
package org.parsec
import java.util.Properties
import java.util.concurrent.Future
import com.sksamuel.avro4s.{FromRecord, RecordFormat, ToRecord}
import io.confluent.kafka.serializers.KafkaAvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
// Convenient Kafka producer using avro4s
class KProducer[K <: Product, V <: Product] {
val kafkaProps = new Properties()
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put("schema.registry.url", "http://localhost:8081")
private lazy val producer = new KafkaProducer[GenericRecord, GenericRecord](kafkaProps)
def produce(topic: String, key: K, value: V, partition: Int = 0)(implicit toRecordKey: ToRecord[K], fromRecordKey: FromRecord[K], toRecord: ToRecord[V], fromRecord: FromRecord[V]): Future[RecordMetadata] = {
val keyRec = RecordFormat[K].to(key)
val valueRec = RecordFormat[V].to(value)
val data: ProducerRecord[GenericRecord, GenericRecord] = new ProducerRecord(topic, partition, keyRec, valueRec)
producer.send(data)
}
}
示例5: Module
//设置package包名称以及导入依赖的类
package com.github.dnvriend
import com.google.inject.{AbstractModule, Provides}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.springframework.kafka.core.{DefaultKafkaConsumerFactory, DefaultKafkaProducerFactory, KafkaTemplate}
import org.springframework.kafka.listener.config.ContainerProperties
import org.springframework.kafka.listener.{KafkaMessageListenerContainer, MessageListener}
import scala.collection.JavaConversions._
class Module extends AbstractModule {
protected def configure(): Unit = {
}
@Provides
def createProducerTemplate: KafkaTemplate[String, String] = {
val senderProps: java.util.Map[String, Any] = Map(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG -> "localhost:9092",
ProducerConfig.RETRIES_CONFIG -> 0,
ProducerConfig.BATCH_SIZE_CONFIG -> 16384,
ProducerConfig.LINGER_MS_CONFIG -> 1,
ProducerConfig.BUFFER_MEMORY_CONFIG -> 33554432,
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG -> classOf[StringSerializer],
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG -> classOf[StringSerializer]
)
val producerFactory = new DefaultKafkaProducerFactory[String, String](senderProps.mapValues(_.asInstanceOf[AnyRef]))
new KafkaTemplate[String, String](producerFactory)
}
@Provides
def createKafkaMessageListenerContainer(messageListener: MessageListener[String, String]): KafkaMessageListenerContainer[String, String] = {
val consumerProps: java.util.Map[String, Any] = Map(
ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "localhost:9092",
ConsumerConfig.GROUP_ID_CONFIG -> "group",
ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> true,
ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG -> "100",
ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG -> "15000",
ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer]
)
val containerProperties = new ContainerProperties("test")
containerProperties.setMessageListener(messageListener)
val consumerFactory = new DefaultKafkaConsumerFactory[String, String](consumerProps.mapValues(_.asInstanceOf[AnyRef]))
val container = new KafkaMessageListenerContainer[String, String](consumerFactory, containerProperties)
container.setBeanName("testAuto")
container.start()
container
}
@Provides
def messageListener: MessageListener[String, String] = new MessageListener[String, String] {
override def onMessage(message: ConsumerRecord[String, String]): Unit = {
println(s"received: $message")
}
}
}
示例6: Producer
//设置package包名称以及导入依赖的类
package co.coinsmith.kafka.cryptocoin.producer
import java.util.Properties
import com.typesafe.config.ConfigFactory
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
object Producer {
val conf = ConfigFactory.load
val brokers = conf.getString("kafka.cryptocoin.bootstrap-servers")
val schemaRegistryUrl = conf.getString("kafka.cryptocoin.schema-registry-url")
val props = new Properties
props.put("bootstrap.servers", brokers)
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "io.confluent.kafka.serializers.KafkaAvroSerializer")
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "io.confluent.kafka.serializers.KafkaAvroSerializer")
props.put("schema.registry.url", schemaRegistryUrl)
val producer = new KafkaProducer[Object, Object](props)
def send(topic: String, msg: Object) {
val data = new ProducerRecord[Object, Object](topic, msg)
producer.send(data)
}
}
示例7: KafkaProducerFactory
//设置package包名称以及导入依赖的类
package service
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig}
object KafkaProducerFactory {
def create(kafkaConnectionString: String) : KafkaProducer[String, String] = {
val props = new java.util.Properties()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConnectionString)
props.put(ProducerConfig.ACKS_CONFIG, "all")
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
new KafkaProducer[String, String](props)
}
}
示例8: KafkaProducerWrapper
//设置package包名称以及导入依赖的类
package articlestreamer.shared.kafka
import java.util.Properties
import articlestreamer.shared.configuration.ConfigLoader
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.clients.producer.{ProducerConfig, ProducerRecord}
import org.apache.kafka.common.config.SslConfigs
class KafkaProducerWrapper(config: ConfigLoader, factory: KafkaFactory[String, String]) {
private val producer = factory.getProducer(KafkaProducerWrapper.getProperties(config))
def send(record: ProducerRecord[String, String]) = producer.send(record, new RecordCallback)
def stopProducer() = {
producer.close()
}
}
object KafkaProducerWrapper {
def getProperties(config: ConfigLoader): Properties = {
import config._
val properties = new Properties()
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBrokers)
properties.put(ProducerConfig.ACKS_CONFIG, "1")
properties.put(ProducerConfig.RETRIES_CONFIG, 0.asInstanceOf[AnyRef])
properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384.asInstanceOf[AnyRef])
properties.put(ProducerConfig.LINGER_MS_CONFIG, 1.asInstanceOf[AnyRef])
properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432.asInstanceOf[AnyRef])
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
properties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 2000.asInstanceOf[AnyRef])
properties.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 10000.asInstanceOf[AnyRef])
if (kafkaSSLMode) {
properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL")
properties.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, s"$kafkaTrustStore/truststore.jks")
properties.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "test1234")
properties.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, s"$kafkaTrustStore/keystore.jks")
properties.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "test1234")
properties.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "test1234")
}
properties
}
}
示例9: KafkaFactorySpec
//设置package包名称以及导入依赖的类
package articlestreamer.shared.kafka
import java.util.Properties
import articlestreamer.shared.BaseSpec
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig}
class KafkaFactorySpec extends BaseSpec {
val factory = new KafkaFactory[String, String]
val serverProps = new Properties()
serverProps.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "http://localhost:8080")
serverProps.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
serverProps.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
val consumerProps = new Properties()
consumerProps.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "http://localhost:8080")
consumerProps.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer")
consumerProps.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer")
"Factory" should "provide a producer" in {
val producer = factory.getProducer(serverProps)
producer.close()
producer shouldBe a [KafkaProducer[_, _]]
}
"Factory" should "provide a consumer" in {
val consumer = factory.getConsumer(consumerProps)
consumer.close()
consumer shouldBe a [KafkaConsumer[_, _]]
}
}
示例10: KProducer
//设置package包名称以及导入依赖的类
package org.parsec
import java.util.Properties
import java.util.concurrent.Future
import com.sksamuel.avro4s.{FromRecord, RecordFormat, ToRecord}
import io.confluent.kafka.serializers.KafkaAvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
// Convenient Kafka producer using avro4s
class KProducer[K <: Product, V <: Product] {
val kafkaProps = new Properties()
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "parsec.playground.landoop.com:49092")
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put("schema.registry.url", "http://parsec.playground.landoop.com:48081")
private lazy val producer = new KafkaProducer[GenericRecord, GenericRecord](kafkaProps)
def produce(topic: String, key: K, value: V, partition: Int = 0)(implicit toRecordKey: ToRecord[K], fromRecordKey: FromRecord[K], toRecord: ToRecord[V], fromRecord: FromRecord[V]): Future[RecordMetadata] = {
val keyRec = RecordFormat[K].to(key)
val valueRec = RecordFormat[V].to(value)
val data: ProducerRecord[GenericRecord, GenericRecord] = new ProducerRecord(topic, partition, keyRec, valueRec)
producer.send(data)
}
}
示例11: KafkaClusterService
//设置package包名称以及导入依赖的类
package services
import java.util.Properties
import javax.inject.Inject
import crawlerConfig.CrawlerConfig
import crawlerglobal.Global
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
class KafkaClusterService @Inject() () {
private var producerOpt: Option[KafkaProducer[String, String]] = None
initKafkaCluster()
@inline
private def getBootstrapServers(default: String = "localhost:9002"): String = {
CrawlerConfig.getValue("Kafka.BootstrapServers", default)
}
@inline
private def getAcksConfig(default: String = "0"): String = {
CrawlerConfig.getValue("Kafka.Required.acks", default)
}
private def getProperties: Properties = {
val props = new Properties()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, getBootstrapServers())
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[org.apache.kafka.common.serialization.StringSerializer])
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[org.apache.kafka.common.serialization.StringSerializer])
props.put(ProducerConfig.ACKS_CONFIG, getAcksConfig())
props
}
private def initKafkaCluster() = {
val producer = new KafkaProducer[String, String](getProperties)
producerOpt = Some(producer)
Global.KafkaClusterOpt = Some(this)
}
def send(topic: String, data: String) = {
producerOpt match {
case Some(producer) =>
producer.send(new ProducerRecord(topic, data))
true
case None =>
false
}
}
def stop() = {
producerOpt match {
case Some(producer) => producer.close()
case None =>
}
}
}
示例12: OutputManager
//设置package包名称以及导入依赖的类
package iomanager
import java.util
import com.typesafe.config.Config
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.streaming.Time
import scala.collection.JavaConversions._
import scala.collection.parallel.mutable.ParArray
object OutputManager {
var producer: KafkaProducer[String, String] = null
var predictionWindow = 0
def prepareOutputStream(config: Config) = {
predictionWindow = config.getInt("output.predictionWindow")*1000
val brokers = config.getStringList("output.kafka.brokers").reduce(_ + "," + _)
val props = new util.HashMap[String, Object]()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer")
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer")
producer = new KafkaProducer[String, String](props)
}
def sendPredictions(predictions: (
ParArray[(String, Double, String, String)],
ParArray[(String, Double, String, String)]), time: Time) = {
val simplePredictions =
"{\"predictionStart\":"+time.milliseconds+
",\"predictionEnd\":"+(time.milliseconds+predictionWindow)+
",\"positive\":["+predictions._1.map(_._3).mkString(",")+
"],\"negative\":["+predictions._2.map(_._3).mkString(",")+"]}"
val advancedPredictions =
"{\"predictionStart\":"+time.milliseconds+
",\"predictionEnd\":"+(time.milliseconds+predictionWindow)+
",\"positive\":["+predictions._1.map(_._4).mkString(",")+
"],\"negative\":["+predictions._2.map(_._4).mkString(",")+"]}"
val simpleMess =
new ProducerRecord[String, String]("simple-predictions",simplePredictions)
val advancedMess =
new ProducerRecord[String, String]("advanced-predictions",advancedPredictions)
producer.send(simpleMess)
producer.send(advancedMess)
}
}
示例13: KafkaWordCountProducer
//设置package包名称以及导入依赖的类
package com.jcode.spark.streaming
import java.util.HashMap
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka._
object KafkaWordCountProducer {
def main(args: Array[String]) {
if (args.length < 4) {
System.err.println("Usage: KafkaWordCountProducer <metadataBrokerList> <topic> " +
"<messagesPerSec> <wordsPerMessage>")
// System.exit(1)
}
// val Array(brokers, topic, messagesPerSec, wordsPerMessage) = args
val Array(brokers, topic, messagesPerSec, wordsPerMessage) = Array("192.168.1.234:9092","sparkStreamingTest","1","5")
// Zookeeper connection properties
val props = new HashMap[String, Object]()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer")
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer")
val producer = new KafkaProducer[String, String](props)
// Send some messages
while(true) {
(1 to messagesPerSec.toInt).foreach { messageNum =>
val str = (1 to wordsPerMessage.toInt).map(x => scala.util.Random.nextInt(10).toString)
.mkString(" ")
val message = new ProducerRecord[String, String](topic, null, str)
producer.send(message)
}
Thread.sleep(1000)
}
}
}
// scalastyle:on println
示例14: KafkaConfig
//设置package包名称以及导入依赖的类
package com.github.dnvriend
import java.util.Properties
import io.confluent.kafka.serializers.KafkaAvroSerializer
import org.apache.kafka.clients.producer.ProducerConfig
// see: http://docs.confluent.io/3.1.2/streams/developer-guide.html#overview
object KafkaConfig {
def configAsMap = Map(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG -> "localhost:9092",
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG -> classOf[KafkaAvroSerializer],
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG -> classOf[KafkaAvroSerializer],
"schema.registry.url" -> "http://localhost:8081"
)
def config(): Properties = {
import scala.collection.JavaConverters._
val settings = new Properties
settings.putAll(configAsMap.asJava)
settings
}
}
示例15: KafkaWSContext
//设置package包名称以及导入依赖的类
package com.landoop.kafka.ws
import java.util.Properties
import com.landoop.kafka.ws.core.decoders.DecoderType
import io.confluent.kafka.serializers.KafkaAvroSerializer
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig}
import org.apache.kafka.common.serialization.{ByteArraySerializer, IntegerSerializer, LongSerializer, StringSerializer}
case class KafkaWSContext(config: KafkaWSConfig) {
require(config != null, "Null instance for config parameter")
val KafkaProducer: KafkaProducer[String, String] = {
val props = new Properties()
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.brokers)
new KafkaProducer[String, String](props)
}
def getProducer[K, V](keyDecoder: DecoderType, valueDecoder: DecoderType): KafkaProducer[K, V] = {
val props = new Properties()
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, getSerializer(keyDecoder).getCanonicalName)
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, getSerializer(valueDecoder).getCanonicalName)
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.brokers)
props.put("schema.registry.url", config.schemaRegistryUrl)
new KafkaProducer[K, V](props)
}
def getSerializer(decoderType: DecoderType): Class[_] = {
decoderType match {
case DecoderType.AVRO => classOf[KafkaAvroSerializer]
case DecoderType.BINARY => classOf[ByteArraySerializer]
case DecoderType.LONG => classOf[LongSerializer]
case DecoderType.INT => classOf[IntegerSerializer]
case DecoderType.STRING | DecoderType.JSON => classOf[StringSerializer]
case other => throw new IllegalArgumentException(s"Decoder type '$other' is not recognized.")
}
}
}