当前位置: 首页>>代码示例>>Golang>>正文


Golang sarama.NewSyncProducer函数代码示例

本文整理汇总了Golang中github.com/Shopify/sarama.NewSyncProducer函数的典型用法代码示例。如果您正苦于以下问题:Golang NewSyncProducer函数的具体用法?Golang NewSyncProducer怎么用?Golang NewSyncProducer使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了NewSyncProducer函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。

示例1: main

func main() {
	var conf Conf
	err := envdecode.Decode(&conf)
	if err != nil {
		log.Fatal(err)
	}

	stripe.Key = conf.StripeKey
	//stripe.LogLevel = 1 // errors only

	producer, err := sarama.NewSyncProducer(strings.Split(conf.SeedBroker, ","), nil)
	if err != nil {
		log.Fatal(err)
	}
	defer func() {
		if err := producer.Close(); err != nil {
			log.Fatal(err)
		}
	}()

	log.Printf("Tailing the log")
	err = tailLog(producer, conf.KafkaTopic)
	if err != nil {
		log.Fatal(err)
	}
}
开发者ID:brandur,项目名称:stripe-warehouse,代码行数:26,代码来源:main.go

示例2: NewProducer

func NewProducer(brokerAddrs []string, conf *sarama.Config) (*Producer, error) {
	producer, err := sarama.NewSyncProducer(brokerAddrs, conf)
	if err != nil {
		return nil, errors.Trace(err)
	}
	return &Producer{producer}, nil
}
开发者ID:ChinaLongGanHu,项目名称:wqs,代码行数:7,代码来源:producer.go

示例3: Connect

func (k *Kafka) Connect() error {
	config := sarama.NewConfig()

	config.Producer.RequiredAcks = sarama.RequiredAcks(k.RequiredAcks)
	config.Producer.Compression = sarama.CompressionCodec(k.CompressionCodec)
	config.Producer.Retry.Max = k.MaxRetry

	// Legacy support ssl config
	if k.Certificate != "" {
		k.SSLCert = k.Certificate
		k.SSLCA = k.CA
		k.SSLKey = k.Key
	}

	tlsConfig, err := internal.GetTLSConfig(
		k.SSLCert, k.SSLKey, k.SSLCA, k.InsecureSkipVerify)
	if err != nil {
		return err
	}

	if tlsConfig != nil {
		config.Net.TLS.Config = tlsConfig
		config.Net.TLS.Enable = true
	}

	producer, err := sarama.NewSyncProducer(k.Brokers, config)
	if err != nil {
		return err
	}
	k.producer = producer
	return nil
}
开发者ID:jeichorn,项目名称:telegraf,代码行数:32,代码来源:kafka.go

示例4: Connect

func (k *Kafka) Connect() error {
	config := sarama.NewConfig()
	// Wait for all in-sync replicas to ack the message
	config.Producer.RequiredAcks = sarama.WaitForAll
	// Retry up to 10 times to produce the message
	config.Producer.Retry.Max = 10

	// Legacy support ssl config
	if k.Certificate != "" {
		k.SSLCert = k.Certificate
		k.SSLCA = k.CA
		k.SSLKey = k.Key
	}

	tlsConfig, err := internal.GetTLSConfig(
		k.SSLCert, k.SSLKey, k.SSLCA, k.InsecureSkipVerify)
	if err != nil {
		return err
	}

	if tlsConfig != nil {
		config.Net.TLS.Config = tlsConfig
		config.Net.TLS.Enable = true
	}

	producer, err := sarama.NewSyncProducer(k.Brokers, config)
	if err != nil {
		return err
	}
	k.producer = producer
	return nil
}
开发者ID:sepulworld,项目名称:telegraf,代码行数:32,代码来源:kafka.go

示例5: pubKafkaLoop

func pubKafkaLoop(seq int) {
	cf := sarama.NewConfig()
	cf.Producer.RequiredAcks = sarama.WaitForLocal
	cf.Producer.Partitioner = sarama.NewHashPartitioner
	cf.Producer.Timeout = time.Second
	//cf.Producer.Compression = sarama.CompressionSnappy
	cf.Producer.Retry.Max = 3
	producer, err := sarama.NewSyncProducer([]string{"localhost:9092"}, cf)
	if err != nil {
		stress.IncCounter("fail", 1)
		log.Println(err)
		return
	}

	defer producer.Close()
	msg := strings.Repeat("X", sz)
	for i := 0; i < loops; i++ {
		_, _, err := producer.SendMessage(&sarama.ProducerMessage{
			Topic: topic,
			Value: sarama.StringEncoder(msg),
		})
		if err == nil {
			stress.IncCounter("ok", 1)
		} else {
			stress.IncCounter("fail", 1)
		}
	}

}
开发者ID:chendx79,项目名称:gafka,代码行数:29,代码来源:bench.go

示例6: Start

func (k *KafkaProducer) Start(host string, port int) {

	connection := host + ":" + strconv.Itoa(port)

	k.Log.Notice("Connecting to Kafka on " + connection + "...")

	config := sarama.NewConfig()
	config.Metadata.Retry.Backoff = (10 * time.Second)

	/**
	 *  Set producer config
	 */
	// don't use zip compression
	config.Producer.Compression = 0

	// We are just streaming metrics, so don't not wait for any Kafka Acks.
	config.Producer.RequiredAcks = -1

	producer, err := sarama.NewSyncProducer([]string{connection}, config)
	if err != nil {

		k.Log.Error("Error connecting to Kafka: ", err.Error())
	} else {
		k.Log.Notice("Connection to Kafka successful")
	}

	go k.produce(producer)

}
开发者ID:hlm,项目名称:vamp-router,代码行数:29,代码来源:kafka_producer.go

示例7: InitKafka

func InitKafka(kafkaAddrs []string) (err error) {
	config := sarama.NewConfig()
	config.Producer.RequiredAcks = sarama.WaitForAll
	config.Producer.Partitioner = sarama.NewHashPartitioner
	producer, err = sarama.NewSyncProducer(kafkaAddrs, config)
	return
}
开发者ID:huweixuan,项目名称:goim,代码行数:7,代码来源:kafka.go

示例8: main

func main() {
	flag.Parse()
	if *host == "" || *topic == "" || *logfile == "" {
		fmt.Printf("pararm error,host=%s,topic=%s,logfile=%s\n", *host, *topic, *logfile)
		os.Exit(0)
	}

	hosts := strings.Split(*host, ",")
	producer, err := sarama.NewSyncProducer(hosts, nil)
	if err != nil {
		fmt.Printf("create kafka syncproducer fail. %+v\n", err)
		os.Exit(-1)
	}
	defer producer.Close()

	file, err1 := os.Open(*logfile)
	if err1 != nil {
		fmt.Printf("open logfile %s fail. %+v\n", *logfile, err1)
		os.Exit(-2)
	}
	defer file.Close()

	scanner := bufio.NewScanner(file)
	for scanner.Scan() {
		msg := &sarama.ProducerMessage{Topic: *topic, Value: sarama.StringEncoder(scanner.Text())}
		_, _, err := producer.SendMessage(msg)
		if err != nil {
			fmt.Printf("FAILED to send message: %s\n", err)
		}
	}
	if err := scanner.Err(); err != nil {
		log.Fatal(err)
	}
}
开发者ID:orange-jacky,项目名称:send_kafka,代码行数:34,代码来源:main.go

示例9: newProducer

func newProducer(brokers []string, kafkaVersion sarama.KafkaVersion, retryOptions config.Retry) Producer {
	var p sarama.SyncProducer
	var err error
	brokerConfig := newBrokerConfig(kafkaVersion, rawPartition)

	repeatTick := time.NewTicker(retryOptions.Period)
	panicTick := time.NewTicker(retryOptions.Stop)
	defer repeatTick.Stop()
	defer panicTick.Stop()

loop:
	for {
		select {
		case <-panicTick.C:
			panic(fmt.Errorf("Failed to create Kafka producer: %v", err))
		case <-repeatTick.C:
			logger.Debug("Connecting to Kafka cluster:", brokers)
			p, err = sarama.NewSyncProducer(brokers, brokerConfig)
			if err == nil {
				break loop
			}
		}
	}

	logger.Debug("Connected to the Kafka cluster")
	return &producerImpl{producer: p}
}
开发者ID:hyperledger,项目名称:fabric,代码行数:27,代码来源:producer.go

示例10: NewServer

func NewServer() Server {
	producer, err := sarama.NewSyncProducer([]string{"localhost:9092"}, nil)
	if err != nil {
		log.Fatalln(err)
	}
	return Server{producer}
}
开发者ID:gulyasm,项目名称:spark-workshop,代码行数:7,代码来源:produce-kafka.go

示例11: newDataCollector

func newDataCollector(brokerList []string) sarama.SyncProducer {

	// For the data collector, we are looking for strong consistency semantics.
	// Because we don't change the flush settings, sarama will try to produce messages
	// as fast as possible to keep latency low.
	config := sarama.NewConfig()
	config.Producer.RequiredAcks = sarama.WaitForAll // Wait for all in-sync replicas to ack the message
	config.Producer.Retry.Max = 10                   // Retry up to 10 times to produce the message
	tlsConfig := createTlsConfiguration()
	if tlsConfig != nil {
		config.Net.TLS.Config = tlsConfig
		config.Net.TLS.Enable = true
	}

	// On the broker side, you may want to change the following settings to get
	// stronger consistency guarantees:
	// - For your broker, set `unclean.leader.election.enable` to false
	// - For the topic, you could increase `min.insync.replicas`.

	producer, err := sarama.NewSyncProducer(brokerList, config)
	if err != nil {
		log.Fatalln("Failed to start Sarama producer:", err)
	}

	return producer
}
开发者ID:ChongFeng,项目名称:beats,代码行数:26,代码来源:http_server.go

示例12: main

func main() {

	config := sarama.NewConfig()
	config.Producer.RequiredAcks = sarama.WaitForAll
	config.Producer.Retry.Max = 5

	// brokers := []string{"192.168.59.103:9092"}
	brokers := []string{"localhost:9092"}
	producer, err := sarama.NewSyncProducer(brokers, config)
	if err != nil {
		// Should not reach here
		panic(err)
	}

	defer func() {
		if err := producer.Close(); err != nil {
			// Should not reach here
			panic(err)
		}
	}()

	topic := "important"
	msg := &sarama.ProducerMessage{
		Topic: topic,
		Value: sarama.StringEncoder("Something Cool"),
	}

	partition, offset, err := producer.SendMessage(msg)
	if err != nil {
		panic(err)
	}

	fmt.Printf("Message is stored in topic(%s)/partition(%d)/offset(%d)\n", topic, partition, offset)
}
开发者ID:hyndio,项目名称:go-kafka,代码行数:34,代码来源:main.go

示例13: TestReadsMetricsFromKafka

func TestReadsMetricsFromKafka(t *testing.T) {
	if testing.Short() {
		t.Skip("Skipping integration test in short mode")
	}

	brokerPeers := []string{testutil.GetLocalHost() + ":9092"}
	zkPeers := []string{testutil.GetLocalHost() + ":2181"}
	testTopic := fmt.Sprintf("telegraf_test_topic_%d", time.Now().Unix())

	// Send a Kafka message to the kafka host
	msg := "cpu_load_short,direction=in,host=server01,region=us-west value=23422.0 1422568543702900257"
	producer, err := sarama.NewSyncProducer(brokerPeers, nil)
	require.NoError(t, err)
	_, _, err = producer.SendMessage(
		&sarama.ProducerMessage{
			Topic: testTopic,
			Value: sarama.StringEncoder(msg),
		})
	require.NoError(t, err)
	defer producer.Close()

	// Start the Kafka Consumer
	k := &Kafka{
		ConsumerGroup:  "telegraf_test_consumers",
		Topics:         []string{testTopic},
		ZookeeperPeers: zkPeers,
		PointBuffer:    100000,
		Offset:         "oldest",
	}
	if err := k.Start(); err != nil {
		t.Fatal(err.Error())
	} else {
		defer k.Stop()
	}

	waitForPoint(k, t)

	// Verify that we can now gather the sent message
	var acc testutil.Accumulator
	// Sanity check
	assert.Equal(t, 0, len(acc.Points), "There should not be any points")

	// Gather points
	err = k.Gather(&acc)
	require.NoError(t, err)
	if len(acc.Points) == 1 {
		point := acc.Points[0]
		assert.Equal(t, "cpu_load_short", point.Measurement)
		assert.Equal(t, map[string]interface{}{"value": 23422.0}, point.Fields)
		assert.Equal(t, map[string]string{
			"host":      "server01",
			"direction": "in",
			"region":    "us-west",
		}, point.Tags)
		assert.Equal(t, time.Unix(0, 1422568543702900257).Unix(), point.Time.Unix())
	} else {
		t.Errorf("No points found in accumulator, expected 1")
	}
}
开发者ID:timjwright,项目名称:telegraf,代码行数:59,代码来源:kafka_consumer_integration_test.go

示例14: Connect

func (k *Kafka) Connect() error {
	producer, err := sarama.NewSyncProducer(k.Brokers, nil)
	if err != nil {
		return err
	}
	k.producer = producer
	return nil
}
开发者ID:ronaldslc,项目名称:telegraf,代码行数:8,代码来源:kafka.go

示例15: NewProducer

// NewProducer create a new kafka producer
func (k *Producer) NewProducer(brokerList, topics []string, config *sarama.Config) error {
	var err error
	k.producer, err = sarama.NewSyncProducer(brokerList, config)
	if err != nil {
		log.Fatalln("Failed to start producer:", err)
		return err
	}
	k.InitKafka(topics)
	return nil
}
开发者ID:NexwayGroup,项目名称:nx-lib,代码行数:11,代码来源:producer.go


注:本文中的github.com/Shopify/sarama.NewSyncProducer函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。