当前位置: 首页>>代码示例>>Java>>正文


Java DAG.addOperator方法代码示例

本文整理汇总了Java中com.datatorrent.api.DAG.addOperator方法的典型用法代码示例。如果您正苦于以下问题:Java DAG.addOperator方法的具体用法?Java DAG.addOperator怎么用?Java DAG.addOperator使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在com.datatorrent.api.DAG的用法示例。


在下文中一共展示了DAG.addOperator方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration configuration) {
    LogLevelProperties props = new LogLevelProperties(configuration);

    //dag.setAttribute(Context.DAGContext.STREAMING_WINDOW_SIZE_MILLIS, props.getWindowMillis());

    // create the operator to receive data from NiFi
    WindowDataManager inManager = new WindowDataManager.NoopWindowDataManager();
    NiFiSinglePortInputOperator nifiInput = getNiFiInput(dag, props, inManager);

    // create the operator to count log levels over a window
    String attributName = props.getLogLevelAttribute();
    LogLevelWindowCount count = dag.addOperator("count", new LogLevelWindowCount(attributName));
    dag.setAttribute(count, Context.OperatorContext.APPLICATION_WINDOW_COUNT, props.getAppWindowCount());

    // create the operator to send data back to NiFi
    WindowDataManager outManager = new WindowDataManager.NoopWindowDataManager();
    NiFiSinglePortOutputOperator nifiOutput = getNiFiOutput(dag, props, outManager);

    // configure the dag to get nifi-in -> count -> nifi-out
    dag.addStream("nifi-in-count", nifiInput.outputPort, count.input);
    dag.addStream("count-nifi-out", count.output, nifiOutput.inputPort);
}
 
开发者ID:bbende,项目名称:nifi-streaming-examples,代码行数:24,代码来源:LogLevelApplication.java

示例2: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  //Add S3 as input and redshift as output operators to DAG
  S3RecordReaderModule inputModule = dag.addModule("S3Input", new S3RecordReaderModule());
  setS3FilesToInput(inputModule, conf);

  CsvParser csvParser = dag.addOperator("csvParser", CsvParser.class);
  TransformOperator transform = dag.addOperator("transform", new TransformOperator());
  Map<String, String> expMap = Maps.newHashMap();
  expMap.put("name", "{$.name}.toUpperCase()");
  transform.setExpressionMap(expMap);
  CsvFormatter formatter = dag.addOperator("formatter", new CsvFormatter());
  StringToByteArrayConverterOperator converterOp = dag.addOperator("converter", new StringToByteArrayConverterOperator());
  RedshiftOutputModule redshiftOutput = dag.addModule("RedshiftOutput", new RedshiftOutputModule());

  //Create streams
  dag.addStream("data", inputModule.records, csvParser.in);
  dag.addStream("pojo", csvParser.out, transform.input);
  dag.addStream("transformed", transform.output, formatter.in);
  dag.addStream("string", formatter.out, converterOp.input).setLocality(DAG.Locality.THREAD_LOCAL);
  dag.addStream("writeToJDBC", converterOp.output, redshiftOutput.input);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:24,代码来源:Application.java

示例3: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
public void populateDAG(DAG dag, Configuration conf)
{
  KafkaSinglePortInputOperator kafkaInputOperator = dag.addOperator("kafkaInput", KafkaSinglePortInputOperator.class);
  JsonParser parser = dag.addOperator("parser", JsonParser.class);
  TransformOperator transform = dag.addOperator("transform", new TransformOperator());
  FilterOperator filterOperator = dag.addOperator("filter", new FilterOperator());
  JsonFormatter formatter = dag.addOperator("formatter", JsonFormatter.class);

  StringFileOutputOperator fileOutput = dag.addOperator("fileOutput", new StringFileOutputOperator());
  
  dag.addStream("data", kafkaInputOperator.outputPort, parser.in);
  dag.addStream("pojo", parser.out, filterOperator.input);
  dag.addStream("filtered", filterOperator.truePort, transform.input);
  dag.addStream("transformed", transform.output, formatter.in);
  dag.addStream("string", formatter.out, fileOutput.input);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:17,代码来源:Application.java

示例4: testInjectionOfOperatorName

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Test
public void testInjectionOfOperatorName() throws Exception
{
  StreamingApplication application = new StreamingApplication()
  {
    @Override
    public void populateDAG(DAG dag, Configuration conf)
    {
      dag.addOperator("input", new MockInputOperator());
    }
  };
  LocalMode lma = LocalMode.newInstance();
  lma.prepareDAG(application, new Configuration());
  LocalMode.Controller lc = lma.getController();
  lc.runAsync();
  latch.await();
  Assert.assertEquals("operator name", "input", operatorName);
  lc.shutdown();
}
 
开发者ID:apache,项目名称:apex-core,代码行数:20,代码来源:OperatorContextTest.java

示例5: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  JsonGenerator generator = dag.addOperator("JsonGenerator", JsonGenerator.class);
  JsonParser jsonParser = dag.addOperator("jsonParser", JsonParser.class);

  CsvFormatter formatter = dag.addOperator("formatter", CsvFormatter.class);
  formatter.setSchema(SchemaUtils.jarResourceFileToString(filename));
  dag.setInputPortAttribute(formatter.in, PortContext.TUPLE_CLASS, PojoEvent.class);

  HDFSOutputOperator<String> hdfsOutput = dag.addOperator("HDFSOutputOperator", HDFSOutputOperator.class);
  hdfsOutput.setLineDelimiter("");

  dag.addStream("parserStream", generator.out, jsonParser.in);
  dag.addStream("formatterStream", jsonParser.out, formatter.in);
  dag.addStream("outputStream", formatter.out, hdfsOutput.input);

}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:19,代码来源:Application.java

示例6: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  final SiteToSiteClientConfig clientConfig = new SiteToSiteClient.Builder()
      .url("http://localhost:8080/nifi")
      .portName("Apex")
      .requestBatchCount(5)
      .buildConfig();

  final SiteToSiteClient.Builder builder = new SiteToSiteClient.Builder().fromConfig(clientConfig);

  final WindowDataManager windowDataManager = new WindowDataManager.NoopWindowDataManager();

  NiFiSinglePortInputOperator nifi = dag.addOperator("nifi", new NiFiSinglePortInputOperator(builder, windowDataManager));
  ConsoleOutputOperator console = dag.addOperator("console", new ConsoleOutputOperator());
  dag.addStream("nifi_console", nifi.outputPort, console.input).setLocality(null);
}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:18,代码来源:TestNiFiInputApplication.java

示例7: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  Level1Module m1 = dag.addModule("M1", new Level1Module());
  m1.setMemory(1024);
  m1.setPortMemory(1);
  m1.setLevel1ModuleProp(level2ModuleAProp1);

  Level1Module m2 = dag.addModule("M2", new Level1Module());
  m2.setMemory(2048);
  m2.setPortMemory(2);
  m2.setLevel1ModuleProp(level2ModuleAProp2);

  DummyOperator o1 = dag.addOperator("O1", new DummyOperator());
  o1.setOperatorProp(level2ModuleAProp3);

  dag.addStream("M1_M2&O1", m1.mOut, m2.mIn, o1.in).setLocality(DAG.Locality.CONTAINER_LOCAL);

  mIn.set(m1.mIn);
  mOut1.set(m2.mOut);
  mOut2.set(o1.out1);
}
 
开发者ID:apache,项目名称:apex-core,代码行数:23,代码来源:TestModuleExpansion.java

示例8: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  TestStatsListener sl = new TestStatsListener();
  sl.adjustRate = conf.getBoolean("dt.hdsbench.adjustRate", false);
  TestGenerator gen = dag.addOperator("Generator", new TestGenerator());
  dag.setAttribute(gen, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl));
  TestStoreOperator store = dag.addOperator("Store", new TestStoreOperator());
  dag.setAttribute(store, OperatorContext.STATS_LISTENERS, Lists.newArrayList((StatsListener)sl));
  FileAccessFSImpl hfa = new HFileImpl();
  hfa.setBasePath(this.getClass().getSimpleName());
  store.setFileStore(hfa);
  dag.setInputPortAttribute(store.input, PortContext.PARTITION_PARALLEL, true);
  dag.getOperatorMeta("Store").getAttributes().put(Context.OperatorContext.COUNTERS_AGGREGATOR,
      new HDHTWriter.BucketIOStatAggregator());
  dag.addStream("Events", gen.data, store.input).setLocality(Locality.THREAD_LOCAL);
}
 
开发者ID:DataTorrent,项目名称:Megh,代码行数:18,代码来源:HDHTBenchmarkTest.java

示例9: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  // RandomEventGenerator rand = dag.addOperator("rand", new RandomEventGenerator());
  // rand.setMinvalue(0);
  // rand.setMaxvalue(999999);
  // rand.setTuplesBlastIntervalMillis(50);
  // dag.getMeta(rand).getMeta(rand.integer_data).getAttributes().put(PortContext.QUEUE_CAPACITY, QUEUE_CAPACITY);
  IntegerOperator intInput = dag.addOperator("intInput", new IntegerOperator());
  StreamDuplicater stream = dag.addOperator("stream", new StreamDuplicater());
  dag.getMeta(stream).getMeta(stream.data).getAttributes().put(PortContext.QUEUE_CAPACITY, QUEUE_CAPACITY);
  dag.addStream("streamdup1", intInput.integer_data, stream.data).setLocality(locality);
  DevNull<Integer> dev1 = dag.addOperator("dev1", new DevNull());
  DevNull<Integer> dev2 = dag.addOperator("dev2", new DevNull());
  dag.addStream("streamdup2", stream.out1, dev1.data).setLocality(locality);
  dag.addStream("streamdup3", stream.out2, dev2.data).setLocality(locality);

}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:19,代码来源:StreamDuplicaterApp.java

示例10: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
public void populateDAG(DAG dag, Configuration conf)
{
  FSInputModule module = dag.addModule("hdfsInputModule", FSInputModule.class);

  AbstractFileOutputOperator<FileMetadata> metadataWriter = new MetadataWriter(FSInputModuleAppTest.OUT_METADATA_FILE);
  metadataWriter.setFilePath(FSInputModuleAppTest.outputDir);
  dag.addOperator("FileMetadataWriter", metadataWriter);

  AbstractFileOutputOperator<ReaderRecord<Slice>> dataWriter = new HDFSFileWriter(FSInputModuleAppTest.OUT_DATA_FILE);
  dataWriter.setFilePath(FSInputModuleAppTest.outputDir);
  dag.addOperator("FileDataWriter", dataWriter);

  DevNull<FileBlockMetadata> devNull = dag.addOperator("devNull", DevNull.class);

  dag.addStream("FileMetaData", module.filesMetadataOutput, metadataWriter.input);
  dag.addStream("data", module.messages, dataWriter.input);
  dag.addStream("blockMetadata", module.blocksMetadataOutput, devNull.data);
}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:19,代码来源:FSInputModuleAppTest.java

示例11: getNiFiInput

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
private NiFiSinglePortInputOperator getNiFiInput(DAG dag, LogLevelProperties props, WindowDataManager windowDataManager) {
    final SiteToSiteClient.Builder inputConfig = new SiteToSiteClient.Builder()
            .url(props.getNifiUrl())
            .portName(props.getNifiInputPort())
            .requestBatchCount(props.getNifiRequestBatch());

    return dag.addOperator("nifi-in", new NiFiSinglePortInputOperator(inputConfig, windowDataManager));
}
 
开发者ID:bbende,项目名称:nifi-streaming-examples,代码行数:9,代码来源:LogLevelApplication.java

示例12: getNiFiOutput

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
private NiFiSinglePortOutputOperator getNiFiOutput(DAG dag, LogLevelProperties props, WindowDataManager windowDataManager) {
    final SiteToSiteClient.Builder outputConfig = new SiteToSiteClient.Builder()
            .url(props.getNifiUrl())
            .portName(props.getNifiOutputPort());

    final int batchSize = 1;
    final NiFiDataPacketBuilder<LogLevels> dataPacketBuilder = new DictionaryBuilder(
            props.getWindowMillis(), props.getLogLevelThreshold());

    return dag.addOperator("nifi-out", new NiFiSinglePortOutputOperator(
            outputConfig, dataPacketBuilder, windowDataManager ,batchSize));
}
 
开发者ID:bbende,项目名称:nifi-streaming-examples,代码行数:13,代码来源:LogLevelApplication.java

示例13: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
public void populateDAG(DAG dag, Configuration conf)
{
  // This kafka input operator takes input from specified Kafka brokers.
  KafkaSinglePortInputOperator kafkaInputOperator = dag.addOperator("kafkaInput", KafkaSinglePortInputOperator.class);

  // Parses a json string tuple against a specified json schema and emits JSONObject.
  JsonParser jsonParser = dag.addOperator("JsonParser", new JsonParser());

  // Filters the tuple as per specified condition by user.
  FilterOperator filterOperator = dag.addOperator("filter", new FilterOperator());

  // Transforms the tuple value to user logic. Note logic may be modified.
  TransformOperator transform = dag.addOperator("transform", new TransformOperator());
  Map<String, String> expMap = Maps.newHashMap();
  expMap.put("name", "{$.name}.toUpperCase()");
  transform.setExpressionMap(expMap);

  // Writes the data or Pojo to Cassandra database.
  CassandraTransactionalStore transactionalStore = new CassandraTransactionalStore();
  CassandraPOJOOutputOperator cassandraOutputOperator = dag.addOperator("CassandraOutput", new CassandraPOJOOutputOperator());
  cassandraOutputOperator.setStore(transactionalStore);

  // Now create the streams to complete the dag or application logic.
  dag.addStream("KafkaToJsonParser", kafkaInputOperator.outputPort, jsonParser.in);
  dag.addStream("JsonParserToFilter", jsonParser.out, filterOperator.input);
  dag.addStream("FilterToTransform", filterOperator.truePort, transform.input);
  dag.addStream("TransformToCassandraDB", transform.output, cassandraOutputOperator.input);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:29,代码来源:Application.java

示例14: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  // Add FSInputModule as input and PartFileWriter as output operators to dag.
  FSInputModule input = dag.addModule("HDFSInputModule", new FSInputModule());
  PartFileWriter output = dag.addOperator("PartFileCopy", new PartFileWriter());

  dag.setInputPortAttribute(output.input, Context.PortContext.PARTITION_PARALLEL, true);
  dag.setInputPortAttribute(output.blockMetadataInput, Context.PortContext.PARTITION_PARALLEL, true);

  // Create a stream for blockData, fileMetadata, blockMetadata from Input to PartFileWriter
  dag.addStream("BlocksMetaData", input.blocksMetadataOutput, output.blockMetadataInput).setLocality(DAG.Locality.CONTAINER_LOCAL);
  dag.addStream("BlocksData", input.messages, output.input).setLocality(DAG.Locality.CONTAINER_LOCAL);
  dag.addStream("FileMetaData", input.filesMetadataOutput, output.fileMetadataInput);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:16,代码来源:Application.java

示例15: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
public void populateDAG(DAG dag, Configuration conf)
{
  // This kafka input operator takes input from specified Kafka brokers.
  KafkaSinglePortInputOperator kafkaInputOperator = dag.addOperator("kafkaInput", KafkaSinglePortInputOperator.class);

  // Parses a json string tuple against a specified json schema and emits JSONObject.
  JsonParser jsonParser = dag.addOperator("JsonParser", JsonParser.class);

  // Filters the tuple as per specified condition by user.
  FilterOperator filterOperator = dag.addOperator("filter", new FilterOperator());

  // Transforms the tuple value to user logic. Note logic may be modified.
  TransformOperator transform = dag.addOperator("transform", new TransformOperator());
  Map<String, String> expMap = Maps.newHashMap();
  expMap.put("name", "{$.name}.toUpperCase()");
  transform.setExpressionMap(expMap);

  // Format the transformed logic into JSON format.
  JsonFormatter jsonFormatter = dag.addOperator("JsonFormatter", JsonFormatter.class);

  // Publish the data to kafka consumers.
  KafkaSinglePortOutputOperator kafkaOutput = dag.addOperator("kafkaOutput", KafkaSinglePortOutputOperator.class);

  // Now create the streams to complete the dag or application logic.
  dag.addStream("KafkaToJsonParser", kafkaInputOperator.outputPort, jsonParser.in);
  dag.addStream("JsonParserToFilter", jsonParser.out, filterOperator.input);
  dag.addStream("FilterToTransform", filterOperator.truePort, transform.input);
  dag.addStream("TransformToJsonFormatter", transform.output, jsonFormatter.in);
  dag.addStream("JsonFormatterToKafka", jsonFormatter.out, kafkaOutput.inputPort);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:31,代码来源:Application.java


注:本文中的com.datatorrent.api.DAG.addOperator方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。