当前位置: 首页>>代码示例>>Java>>正文


Java OutputCollector类代码示例

本文整理汇总了Java中backtype.storm.task.OutputCollector的典型用法代码示例。如果您正苦于以下问题:Java OutputCollector类的具体用法?Java OutputCollector怎么用?Java OutputCollector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


OutputCollector类属于backtype.storm.task包,在下文中一共展示了OutputCollector类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
    //for backward compatibility.
    if(mapper == null) {
        this.mapper = new FieldNameBasedTupleToKafkaMapper<K,V>();
    }

    //for backward compatibility.
    if(topicSelector == null) {
        this.topicSelector = new DefaultTopicSelector((String) stormConf.get(TOPIC));
    }

    Map configMap = (Map) stormConf.get(KAFKA_BROKER_PROPERTIES);
    Properties properties = new Properties();
    properties.putAll(configMap);
    ProducerConfig config = new ProducerConfig(properties);
    producer = new Producer<K, V>(config);
    this.collector = collector;
}
 
开发者ID:redBorder,项目名称:rb-bi,代码行数:20,代码来源:KafkaBolt.java

示例2: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
public void prepare(Map map, TopologyContext topologyContext, OutputCollector collector) {
    this.collector = collector;
    final Configuration hbConfig = HBaseConfiguration.create();
    
    Map<String, Object> conf = (Map<String, Object>)map.get(this.configKey);
    if(conf == null) {
        throw new IllegalArgumentException("HBase configuration not found using key '" + this.configKey + "'");
    }
    if(conf.get("hbase.rootdir") == null) {
        LOG.warn("No 'hbase.rootdir' value found in configuration! Using HBase defaults.");
    }
    for(String key : conf.keySet()) {
        hbConfig.set(key, String.valueOf(conf.get(key)));
    }

    this.hBaseClient = new HBaseClient(conf, hbConfig, tableName);
}
 
开发者ID:mengzhiyi,项目名称:storm-hbase-1.0.x,代码行数:18,代码来源:AbstractHBaseBolt.java

示例3: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
public void prepare(Map conf, TopologyContext ctx, OutputCollector collector) {
	
	this.collector = collector;
	this.myId = ctx.getThisComponentId() + "-" + ctx.getThisTaskId();
	
	this.summary = new Summary();
	
	this.publisher = new ZkPublisher();
	try {
		this.publisher.init(conf);
	} catch (Exception e) {
		throw new RuntimeException(e);
	}
	
	this.lastPublishedTimestamp = 0;
}
 
开发者ID:pathbreak,项目名称:reddit-sentiment-storm,代码行数:17,代码来源:SummarizerBolt.java

示例4: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@SuppressWarnings("rawtypes")
@Override
public void prepare(Map globalConfig, TopologyContext arg1, OutputCollector outputCollector) {
	this.outputCollector = outputCollector;
	String karmaHomeDirectory = null;
	if(karmaHomeStrategy != null){

		karmaHomeStrategy.prepare(globalConfig);
		karmaHomeDirectory = karmaHomeStrategy.getKarmaHomeDirectory();	
	}
	karma = new BaseKarma();
	karma.setup(karmaHomeDirectory, (String)localConfig.get("karma.input.type"), (String)localConfig.get("model.uri"), (String)localConfig.get("model.file"), 
			(String)localConfig.get("base.uri"), (String)localConfig.get("context.uri"), 
			(String)localConfig.get("rdf.generation.root"), (String)localConfig.get("rdf.generation.selection"));
	
}
 
开发者ID:therelaxist,项目名称:spring-usc,代码行数:17,代码来源:KarmaBolt.java

示例5: execute

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
public SpringBolt.ExecuteResult execute(Tuple input, OutputCollector collector) {
  String docId = input.getString(0);
  Map<String,Object> values = (Map<String,Object>)input.getValue(1);

  Map<String,Object> json = new HashMap<String,Object>(10);
  json.put("id", docId);
  List fieldList = new ArrayList();
  for (String field : values.keySet())
    fieldList.add(buildField(field, values.get(field)));
  json.put("fields", fieldList);

  try {
    fusionPipelineClient.postBatchToPipeline(Collections.singletonList(json));
  } catch (Exception e) {
    log.error("Failed to send doc "+docId+" to Fusion due to: "+e);
    throw new RuntimeException(e);
  }

  return SpringBolt.ExecuteResult.ACK;
}
 
开发者ID:lucidworks,项目名称:storm-solr,代码行数:21,代码来源:FusionBoltAction.java

示例6: execute

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
public ExecuteResult execute(Tuple input, OutputCollector outputCollector) {

    if (tuplesReceived != null) {
      tuplesReceived.inc();
    }

    String docId = input.getString(0);
    Object docObj = input.getValue(1);
    if (docId == null || docObj == null) {

      log.warn("Ignored tuple: "+input);

      return ExecuteResult.IGNORED; // nothing to index
    }

    try {
      return processInputDoc(docId, docObj);
    } catch (Exception exc) {
      log.error("Failed to process "+docId+" due to: "+exc);
      if (exc instanceof RuntimeException) {
        throw (RuntimeException)exc;
      } else {
        throw new RuntimeException(exc);
      }
    }
  }
 
开发者ID:lucidworks,项目名称:storm-solr,代码行数:27,代码来源:SolrBoltAction.java

示例7: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@Override
public void prepare(@SuppressWarnings("rawtypes") Map stormConf, TopologyContext context, OutputCollector collector) {
	String fileName = (String)stormConf.get(OUTPUT_FILE_NAME);
	if(fileName != null) {
		this.outputFileName = fileName;
	}
	
	String dirName = (String)stormConf.get(OUTPUT_DIR_NAME);
	if(dirName != null) {
		this.outputDirName = dirName;
	}
	
	try {
		new File(this.outputDirName).mkdirs();
		this.writer = new BufferedWriter(new FileWriter(this.outputDirName + File.separator + this.outputFileName));
	} catch(IOException e) {
		logger.error("Could not open output file <{}> for writing.", this.outputDirName + File.separator
			+ this.outputFileName);
	}
	
	this.collector = collector;
}
 
开发者ID:mjsax,项目名称:aeolus,代码行数:23,代码来源:AbstractFileOutputBolt.java

示例8: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@Override
public void prepare(Map stormConf, TopologyContext context,
                    OutputCollector collector) {

  this.collector = collector;
  try {
    this.connection = HConnectionManager.createConnection(constructConfiguration());
    this.dangerousEventsTable = connection.getTable(DANGEROUS_EVENTS_TABLE_NAME);
    this.eventsCountTable = connection.getTable(EVENTS_COUNT_TABLE_NAME);
    this.eventsTable = connection.getTable(EVENTS_TABLE_NAME);

  } catch (Exception e) {
    String errMsg = "Error retrievinging connection and access to dangerousEventsTable";
    LOG.error(errMsg, e);
    throw new RuntimeException(errMsg, e);
  }
}
 
开发者ID:DhruvKumar,项目名称:iot-masterclass,代码行数:18,代码来源:TruckHBaseBolt.java

示例9: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {
    this.collector = collector;
    
    GenericObjectPoolConfig pconf = new GenericObjectPoolConfig();
    pconf.setMaxWaitMillis(2000);
    pconf.setMaxTotal(1000);
    pconf.setTestOnBorrow(false);
    pconf.setTestOnReturn(false);
    pconf.setTestWhileIdle(true);
    pconf.setMinEvictableIdleTimeMillis(120000);
    pconf.setTimeBetweenEvictionRunsMillis(60000);
    pconf.setNumTestsPerEvictionRun(-1);
    
    pool = new JedisPool(pconf, redisHost, redisPort, timeout);
}
 
开发者ID:kkllwww007,项目名称:jstrom,代码行数:18,代码来源:RedisSinkBolt.java

示例10: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {
    super.prepare(conf, context, collector);
    _collector = collector;

    indexName = ConfUtils.getString(conf, IndexerBolt.ESIndexNameParamName,
            "fetcher");
    docType = ConfUtils.getString(conf, IndexerBolt.ESDocTypeParamName,
            "doc");
    create = ConfUtils.getBoolean(conf, IndexerBolt.ESCreateParamName,
            false);

    try {
        connection = ElasticSearchConnection
                .getConnection(conf, ESBoltType);
    } catch (Exception e1) {
        LOG.error("Can't connect to ElasticSearch", e1);
        throw new RuntimeException(e1);
    }

    this.eventCounter = context.registerMetric("ElasticSearchIndexer",
            new MultiCountMetric(), 10);
}
 
开发者ID:zaizi,项目名称:alfresco-apache-storm-demo,代码行数:26,代码来源:IndexerBolt.java

示例11: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@Override
public void prepare(Map stormConf, TopologyContext context,
		OutputCollector collector) {
	try {
		_metricsConsumer = (IMetricsConsumer) Class.forName(
				_consumerClassName).newInstance();
	} catch (Exception e) {
		throw new RuntimeException(
				"Could not instantiate a class listed in config under section "
						+ Config.TOPOLOGY_METRICS_CONSUMER_REGISTER
						+ " with fully qualified name "
						+ _consumerClassName, e);
	}
	_metricsConsumer.prepare(stormConf, _registrationArgument, context,
			collector);
	_collector = collector;
}
 
开发者ID:zhangjunfang,项目名称:jstorm-0.9.6.3-,代码行数:18,代码来源:MetricsConsumerBolt.java

示例12: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@Override
public void prepare(@SuppressWarnings("rawtypes") Map arg0, TopologyContext arg1, OutputCollector arg2) {
	// for each logical input stream (ie, each producer bolt), we get an input partition for each of its tasks
	LinkedList<Integer> taskIds = new LinkedList<Integer>();
	for(Entry<GlobalStreamId, Grouping> inputStream : arg1.getThisSources().entrySet()) {
		taskIds.addAll(arg1.getComponentTasks(inputStream.getKey().get_componentId()));
	}
	
	logger.debug("Detected producer tasks: {}", taskIds);
	
	if(this.tsIndex != -1) {
		assert (this.tsAttributeName == null && this.tsExtractor == null);
		this.merger = new StreamMerger<Tuple>(taskIds, this.tsIndex);
	} else if(this.tsAttributeName != null) {
		assert (this.tsExtractor == null);
		this.merger = new StreamMerger<Tuple>(taskIds, this.tsAttributeName);
	} else {
		assert (this.tsExtractor != null);
		this.merger = new StreamMerger<Tuple>(taskIds, this.tsExtractor);
	}
	
	this.wrappedBolt.prepare(arg0, arg1, arg2);
}
 
开发者ID:mjsax,项目名称:aeolus,代码行数:24,代码来源:TimestampMerger.java

示例13: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@Override
public void prepare(Map conf, TopologyContext context,
        OutputCollector collector) {

    // get the implementation to use
    // and instantiate it
    String className = ConfUtils.getString(conf,
            "stormcrawler.indexer.class");

    if (StringUtils.isBlank(className)) {
        throw new RuntimeException("No configuration found for indexing");
    }

    try {
        final Class<BaseRichBolt> implClass = (Class<BaseRichBolt>) Class
                .forName(className);
        endpoint = implClass.newInstance();
    } catch (final Exception e) {
        throw new RuntimeException("Couldn't create " + className, e);
    }

    if (endpoint != null)
        endpoint.prepare(conf, context, collector);
}
 
开发者ID:zaizi,项目名称:alfresco-apache-storm-demo,代码行数:25,代码来源:IndexerBolt.java

示例14: testExecute

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
@Test
public void testExecute() {
	ForwardBolt bolt = new ForwardBolt(new Fields("dummy"));
	
	TestOutputCollector collector = new TestOutputCollector();
	bolt.prepare(null, null, new OutputCollector(collector));
	
	LinkedList<Tuple> tuples = new LinkedList<Tuple>();
	List<List<Object>> result = new LinkedList<List<Object>>();
	
	for(int i = 0; i < 3; ++i) {
		ArrayList<Object> attributes = new ArrayList<Object>();
		attributes.add(new Integer(i));
		
		tuples.add(mock(Tuple.class));
		when(tuples.get(i).getValues()).thenReturn(attributes);
		result.add(attributes);
		
		bolt.execute(tuples.get(i));
		Assert.assertEquals(tuples, collector.acked);
	}
	
	Assert.assertEquals(result, collector.output.get(Utils.DEFAULT_STREAM_ID));
}
 
开发者ID:mjsax,项目名称:aeolus,代码行数:25,代码来源:ForwardBoltTest.java

示例15: prepare

import backtype.storm.task.OutputCollector; //导入依赖的package包/类
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
    _collector = collector;
    liveFilters = new HashMap<String, Long>();
    jsonParser = new JsonParser();
    filterMaxTsAnalayzed = new HashMap<String, Long>();

    // Active analyzers
    analyzers = new ArrayList<ITimeserieAnalyzer>();
    analyzers.add(new NoopTimeserieAnalyzer());
    analyzers.add(new NormalDistributionTimeserieAnalyzer());
    analyzers.add(new LogNormalDistributionTimeserieAnalyzer());
    analyzers.add(new SimpleRegressionTimeserieAnalyzer());
    analyzers.add(new MovingAverageTimeserieAnalyzer());
    analyzers.add(new PolynomialRegressionTimeserieAnalyzer());
    analyzers.add(new IntervalInterceptorTimeserieAnalyzer());
    analyzers.add(new RandomWalkRegressionTimeserieAnalyzer());
    analyzers.add(new OneClassSVMTimeserieAnalyzer());
    analyzers.add(new TimeBucketSimpleRegressionTimeserieAnalyzer());
    analyzers.add(new MultipleLinearRegressionTimeserieAnalyzer());
    analyzers.add(new SimpleExponentialSmoothingTimeserieAnalyzer());

    // Start time
    startTime = now();
}
 
开发者ID:RobinUS2,项目名称:cloudpelican-lsd,代码行数:25,代码来源:OutlierDetectionBolt.java


注:本文中的backtype.storm.task.OutputCollector类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。