當前位置: 首頁>>代碼示例>>Java>>正文


Java VerifiableProperties類代碼示例

本文整理匯總了Java中kafka.utils.VerifiableProperties的典型用法代碼示例。如果您正苦於以下問題:Java VerifiableProperties類的具體用法?Java VerifiableProperties怎麽用?Java VerifiableProperties使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


VerifiableProperties類屬於kafka.utils包,在下文中一共展示了VerifiableProperties類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: run

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Override
public void run() {
    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(transducer_topic, new Integer(1));

    StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
    StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());

    Map<String, List<KafkaStream<String, String>>> consumerMap =
            consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
    KafkaStream<String, String> stream = consumerMap.get(transducer_topic).get(0);
    ConsumerIterator<String, String> it = stream.iterator();
    while (it.hasNext() && bStartConsume){
        transducerDataProcessor.newData(it.next().message());

        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
}
 
開發者ID:unrealinux,項目名稱:DataProcessPlatformKafkaJavaSDK,代碼行數:23,代碼來源:KafkaConsumerTransducer.java

示例2: init

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Override
public synchronized void init(VerifiableProperties props) {
    if (!initialized) {
        KafkaMetricsConfig metricsConfig = new KafkaMetricsConfig(props);
        influxDBAddress = props.getString("kafka.influxdb.metrics.address", INFLUXDB_DEFAULT_ADDRESS);
        influxDBUsername = props.getString("kafka.influxdb.metrics.username", INFLUXDB_DEFAULT_USERNAME);
        influxDBPassword = props.getString("kafka.influxdb.metrics.password", INFLUXDB_DEFAULT_PASSWORD);
        influxDBConsistency = props.getString("kafka.influxdb.metrics.consistency", INFLUXDB_DEFAULT_CONSISTENCY);
        influxDBDatabase = props.getString("kafka.influxdb.metrics.database", INFLUXDB_DEFAULT_DATABASE);
        influxDBRetentionPolicy = props.getString("kafka.influxdb.metrics.retentionPolicy", INFLUXDB_DEFAULT_RETENTIONPOLICY);
        influxDBTags = props.getString("kafka.influxdb.metrics.tags", INFLUXDB_DEFAULT_TAGS);
        metricDimensions = Dimension.fromProperties(props.props(), "kafka.influxdb.dimension.enabled.");

        LOG.debug("Initialize InfluxDBReporter [{},{},{}]", influxDBAddress, influxDBDatabase, influxDBRetentionPolicy);

        reporter = buildInfluxDBReporter();

        if (props.getBoolean("kafka.influxdb.metrics.reporter.enabled", false)) {
            initialized = true;
            startReporter(metricsConfig.pollingIntervalSecs());
            LOG.debug("InfluxDBReporter started.");
        }
    }
}
 
開發者ID:jasper-zhang,項目名稱:kafka-influxdb,代碼行數:25,代碼來源:KafkaInfluxDBMetricsReporter.java

示例3: consume

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
void consume() throws Exception {
	// specify the number of consumer threads
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	topicCountMap.put(KafkaProducer.TOPIC, new Integer(threadsNum));

	// specify data decoder
	StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
	StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());

	Map<String, List<KafkaStream<String, String>>> consumerMap = consumer
			.createMessageStreams(topicCountMap, keyDecoder, valueDecoder); // 三個String分別為TOPIC、Key、Value

	// acquire data
	List<KafkaStream<String, String>> streams = consumerMap.get(KafkaProducer.TOPIC);

	// multi-threaded consume
	executor = Executors.newFixedThreadPool(threadsNum);    //create a thread pool
	for (final KafkaStream<String, String> stream : streams) {
		executor.submit(new ConsumerThread(stream));        // run thread
	}
}
 
開發者ID:thulab,項目名稱:iotdb-jdbc,代碼行數:22,代碼來源:KafkaConsumer.java

示例4: collectMq

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
public void collectMq(){
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
       topicCountMap.put(Constants.kfTopic, new Integer(1));

       StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
       StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());

       Map<String, List<KafkaStream<String, String>>> consumerMap =
               consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
       
       KafkaStream<String, String> stream = consumerMap.get(Constants.kfTopic).get(0);
       ConsumerIterator<String, String> it = stream.iterator();
       MessageAndMetadata<String, String> msgMeta;
       while (it.hasNext()){
       	msgMeta = it.next();
       	super.mqTimer.parseMqText(msgMeta.key(), msgMeta.message());
       	//System.out.println(msgMeta.key()+"\t"+msgMeta.message());
       }
}
 
開發者ID:lrtdc,項目名稱:light_drtc,代碼行數:20,代碼來源:KafkaMqCollect.java

示例5: consumeMessages

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
private void consumeMessages() {
    final Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(TOPIC, 1);
    final StringDecoder decoder =
            new StringDecoder(new VerifiableProperties());
    final Map<String, List<KafkaStream<String, String>>> consumerMap =
            consumer.createMessageStreams(topicCountMap, decoder, decoder);
    final KafkaStream<String, String> stream =
            consumerMap.get(TOPIC).get(0);
    final ConsumerIterator<String, String> iterator = stream.iterator();

    Thread kafkaMessageReceiverThread = new Thread(
            () -> {
                while (iterator.hasNext()) {
                    String msg = iterator.next().message();
                    msg = msg == null ? "<null>" : msg;
                    System.out.println("got message: " + msg);
                    messagesReceived.add(msg);
                }
            },
            "kafkaMessageReceiverThread"
    );
    kafkaMessageReceiverThread.start();

}
 
開發者ID:hubrick,項目名稱:vertx-kafka-service,代碼行數:26,代碼來源:KafkaProducerServiceIntegrationTest.java

示例6: init

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Override
public void init(VerifiableProperties props) {

    if(!initialized) {
        KafkaMetricsConfig metricsConfig = new KafkaMetricsConfig(props);

        InfluxDBMetricsConfig config = new InfluxDBMetricsConfig(props);
        config.addTag("brokerId", props.getString("broker.id"));

        this.reporter = new InfluxReporter(Metrics.defaultRegistry(), DEFAULT_NAME
                ,new InfluxDBClient(config), new MetricsPredicate(config.getPredicates()));

        if (props.getBoolean(InfluxDBMetricsConfig.KAFKA_INFLUX_METRICS_ENABLE, false)) {
            initialized = true;
            startReporter(metricsConfig.pollingIntervalSecs());
            LOG.info("KafkaInfluxMetricsReporter initialized.");
        }
    }
}
 
開發者ID:fhussonnois,項目名稱:kafka-influxdb-reporter,代碼行數:20,代碼來源:KafkaInfluxMetricsReporter.java

示例7: open

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
    _collector = spoutOutputCollector;
    Properties props = new Properties();
    props.put("zookeeper.connect", conf.get(OSMIngest.ZOOKEEPERS));
    props.put("group.id", groupId);
    props.put("zookeeper.sync.time.ms", "200");
    props.put("auto.commit.interval.ms", "1000");
    ConsumerConfig consumerConfig = new ConsumerConfig(props);
    ConsumerConnector consumer = Consumer.createJavaConsumerConnector(consumerConfig);
    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(topic, 1);
    Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, new StringDecoder(new VerifiableProperties()), new StringDecoder(new VerifiableProperties()));
    List<KafkaStream<String, String>> streams = consumerMap.get(topic);
    KafkaStream<String, String> stream = null;
    if (streams.size() == 1) {
        stream = streams.get(0);
    } else {
        log.error("Streams should be of size 1");
    }
    kafkaIterator = stream.iterator();
}
 
開發者ID:geomesa,項目名稱:geomesa-tutorials,代碼行數:22,代碼來源:OSMKafkaSpout.java

示例8: kafkaStream

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Bean
protected KafkaStream<String, float[]> kafkaStream() {

    final String topicName = retrieveTopicNameFromGatewayAddress(gatewayUrl());

    ConsumerConnector consumerConnector =
            Consumer.createJavaConsumerConnector(consumerConfig());
    Map<String, Integer> topicCounts = new HashMap<>();
    topicCounts.put(topicName, 1);
    VerifiableProperties emptyProps = new VerifiableProperties();
    StringDecoder keyDecoder = new StringDecoder(emptyProps);
    FeatureVectorDecoder valueDecoder = new FeatureVectorDecoder();
    Map<String, List<KafkaStream<String, float[]>>> streams =
            consumerConnector.createMessageStreams(topicCounts, keyDecoder, valueDecoder);
    List<KafkaStream<String, float[]>> streamsByTopic = streams.get(topicName);
    Preconditions.checkNotNull(streamsByTopic, String.format("Topic %s not found in streams map.", topicName));
    Preconditions.checkElementIndex(0, streamsByTopic.size(),
            String.format("List of streams of topic %s is empty.", topicName));
    return streamsByTopic.get(0);
}
 
開發者ID:trustedanalytics,項目名稱:space-shuttle-demo,代碼行數:21,代碼來源:KafkaConfiguration.java

示例9: testKafkaLogAppender

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Test
public void testKafkaLogAppender() {
    Properties consumerProps = new Properties();
    consumerProps.put("zookeeper.connect", zookeeper);
    consumerProps.put("group.id", "kafka-log-appender-test");
    consumerProps.put("auto.offset.reset", "smallest");
    consumerProps.put("schema.registry.url", schemaRegistry);

    Map<String, Integer> topicMap = new HashMap<String, Integer>();
    topicMap.put(topic, 1);

    ConsumerIterator<String, Object> iterator = Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProps))
            .createMessageStreams(topicMap, new StringDecoder(null), new KafkaAvroDecoder(new VerifiableProperties(consumerProps)))
            .get(topic).get(0).iterator();

    String testMessage = "I am a test message";
    logger.info(testMessage);

    MessageAndMetadata<String, Object> messageAndMetadata = iterator.next();
    GenericRecord logLine = (GenericRecord) messageAndMetadata.message();
    assertEquals(logLine.get("line").toString(), testMessage);
    assertEquals(logLine.get("logtypeid"), KafkaLogAppender.InfoLogTypeId);
    assertNotNull(logLine.get("source"));
    assertEquals(((Map<CharSequence, Object>) logLine.get("timings")).size(), 1);
    assertEquals(((Map<CharSequence, Object>) logLine.get("tag")).size(), 2);
}
 
開發者ID:elodina,項目名稱:java-kafka,代碼行數:27,代碼來源:KafkaLogAppenderTest.java

示例10: getSerde

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Override
public AvroSerde getSerde(String s, Config config) {
    final String registryUrl = config.get(CFG_SCHEMA_REGISTRY_URL);
    if (registryUrl == null) {
        throw new ConfigException("Missing property: " + CFG_SCHEMA_REGISTRY_URL);
    }
    final String registryMasterUrl = config.get(CFG_SCHEMA_REGISTRY_MASTER_URL, registryUrl);
    final String specificReader = config.get(CFG_AVRO_SPECIFIC_DATA, "true");
    final Properties encoderProps = new Properties();
    encoderProps.setProperty("schema.registry.url", registryMasterUrl);
    logger.info("Avro encoder registry: " + registryMasterUrl);
    final Properties decoderProps = new Properties();
    decoderProps.setProperty("schema.registry.url", registryUrl);
    logger.info("Avro decoder registry: " + registryUrl);
    decoderProps.setProperty("specific.avro.reader", specificReader);
    return new AvroSerde(new VerifiableProperties(encoderProps), new VerifiableProperties(decoderProps));
}
 
開發者ID:quantiply,項目名稱:rico,代碼行數:18,代碼來源:AvroSerdeFactory.java

示例11: testSerde

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Test
public void testSerde() throws IOException {
  Properties props = new Properties();
  props.setProperty(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "bogus");
  props.setProperty(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, "true");
  AvroSerde avroSerde = new AvroSerde(new MockSchemaRegistryClient(), new VerifiableProperties(props));
  User user = User.newBuilder()
      .setName("Cornhoolio")
      .setAge(12)
      .build();

  byte[] bytes = avroSerde.toBytes(user);

  User userRead = (User) avroSerde.fromBytes(bytes);
  assertEquals(user, userRead);

  SimpleUser simpleUser = (SimpleUser) avroSerde.fromBytes(bytes, SimpleUser.getClassSchema());
  assertEquals("Cornhoolio", simpleUser.getName().toString());
}
 
開發者ID:quantiply,項目名稱:rico,代碼行數:20,代碼來源:AvroSerdeTest.java

示例12: testSerdeWithGenericData

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Test
public void testSerdeWithGenericData() throws IOException {
  Properties props = new Properties();
  props.setProperty(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "bogus");
  props.setProperty(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, "false");
  AvroSerde avroSerde = new AvroSerde(new MockSchemaRegistryClient(), new VerifiableProperties(props));
  User user = User.newBuilder()
      .setName("Cornhoolio")
      .setAge(12)
      .build();

  byte[] bytes = avroSerde.toBytes(user);

  GenericData.Record userRead = (GenericData.Record) avroSerde.fromBytes(bytes);
  assertNotNull(userRead.get("age"));

  GenericData.Record simpleUser = (GenericData.Record) avroSerde.fromBytes(bytes, SimpleUser.getClassSchema());
  //Projection masked the age field
  assertNull(simpleUser.get("age"));
}
 
開發者ID:quantiply,項目名稱:rico,代碼行數:21,代碼來源:AvroSerdeTest.java

示例13: init

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Override
public void init(VerifiableProperties verifiableProperties) {

    if (!initialized) {
        // get configured metrics from kafka
        KafkaMetricsConfig metricsConfig = new KafkaMetricsConfig(verifiableProperties);

        // get the configured properties from kafka to set the bindAddress and port.
        bindAddress = verifiableProperties.getProperty("kafka.http.metrics.host");
        port = Integer.parseInt(verifiableProperties.getProperty("kafka.http.metrics.port"));
        enabled = Boolean.parseBoolean(verifiableProperties.getProperty("kafka.http.metrics.reporter.enabled"));

        // construct the Metrics Server
        metricsServer = new KafkaHttpMetricsServer(bindAddress, port);
        initialized = true;

        // call the method startReporter
        startReporter(metricsConfig.pollingIntervalSecs());
    } else {
        LOG.error("Kafka Http Metrics Reporter already initialized");
    }
}
 
開發者ID:arnobroekhof,項目名稱:kafka-http-metrics-reporter,代碼行數:23,代碼來源:KafkaHttpMetricsReporter.java

示例14: init

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
@Override
public synchronized void init(VerifiableProperties props) {
    if (!initialized) {
        KafkaMetricsConfig metricsConfig = new KafkaMetricsConfig(props);
        graphiteHost = props.getString("kafka.graphite.metrics.host", GRAPHITE_DEFAULT_HOST);
        graphitePort = props.getInt("kafka.graphite.metrics.port", GRAPHITE_DEFAULT_PORT);
        metricPrefix = props.getString("kafka.graphite.metrics.group", GRAPHITE_DEFAULT_PREFIX);
        String excludeRegex = props.getString("kafka.graphite.metrics.exclude.regex", null);
        metricDimensions = Dimension.fromProperties(props.props(), "kafka.graphite.dimension.enabled.");

        LOG.debug("Initialize GraphiteReporter [{},{},{}]", graphiteHost, graphitePort, metricPrefix);

        if (excludeRegex != null) {
            LOG.debug("Using regex [{}] for GraphiteReporter", excludeRegex);
            metricPredicate = new FilterMetricPredicate(excludeRegex);
        }
        reporter = buildGraphiteReporter();

        if (props.getBoolean("kafka.graphite.metrics.reporter.enabled", false)) {
            initialized = true;
            startReporter(metricsConfig.pollingIntervalSecs());
            LOG.debug("GraphiteReporter started.");
        }
    }
}
 
開發者ID:damienclaveau,項目名稱:kafka-graphite,代碼行數:26,代碼來源:KafkaGraphiteMetricsReporter.java

示例15: setup

import kafka.utils.VerifiableProperties; //導入依賴的package包/類
private void setup(String groupId) {

	consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
	            createConsumerConfig(groupId));
  
    // Request a single connection that gathers messages from all partitions 
	Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
    topicCountMap.put(topic, new Integer(1));
    
    // Construct the encoders/decoders for the message key and values
    Decoder<Long> keyDecoder = new LongSupport(new VerifiableProperties());
    Decoder<String> messageDecoder = new StringSupport(new VerifiableProperties());

	// Configure and request the desired streams using the topic map, encoder and decoder
    Map<String, List<KafkaStream<Long, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, keyDecoder, messageDecoder);

	// We only get back one stream
    KafkaStream<Long, String> stream =  consumerMap.get(topic).get(0);
    
    // Capture the stream's iterator
    it = stream.iterator();
  }
 
開發者ID:IntersysConsulting,項目名稱:ingestive,代碼行數:23,代碼來源:IngestionHighLevelConsumer.java


注:本文中的kafka.utils.VerifiableProperties類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。