当前位置: 首页>>代码示例>>Java>>正文


Java DAG.addModule方法代码示例

本文整理汇总了Java中com.datatorrent.api.DAG.addModule方法的典型用法代码示例。如果您正苦于以下问题:Java DAG.addModule方法的具体用法?Java DAG.addModule怎么用?Java DAG.addModule使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在com.datatorrent.api.DAG的用法示例。


在下文中一共展示了DAG.addModule方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{

  /*
   * Define HDFS and S3 as input and output module operators respectively.
   */
  FSInputModule inputModule = dag.addModule("HDFSInputModule", new FSInputModule());
  S3OutputModule outputModule = dag.addModule("S3OutputModule", new S3OutputModule());

  /*
   * Create a stream for Metadata blocks from HDFS to S3 output modules.
   * Note: DAG locality is set to CONTAINER_LOCAL for performance benefits by
   * avoiding any serialization/deserialization of objects.
   */
  dag.addStream("FileMetaData", inputModule.filesMetadataOutput, outputModule.filesMetadataInput);
  dag.addStream("BlocksMetaData", inputModule.blocksMetadataOutput, outputModule.blocksMetadataInput)
          .setLocality(DAG.Locality.CONTAINER_LOCAL);

  /*
   * Create a stream for Data blocks from HDFS to S3 output modules.
   */
  dag.addStream("BlocksData", inputModule.messages, outputModule.blockData).setLocality(DAG.Locality.CONTAINER_LOCAL);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:25,代码来源:Application.java

示例2: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  //Add S3 as input and redshift as output operators to DAG
  S3RecordReaderModule inputModule = dag.addModule("S3Input", new S3RecordReaderModule());
  setS3FilesToInput(inputModule, conf);

  CsvParser csvParser = dag.addOperator("csvParser", CsvParser.class);
  TransformOperator transform = dag.addOperator("transform", new TransformOperator());
  Map<String, String> expMap = Maps.newHashMap();
  expMap.put("name", "{$.name}.toUpperCase()");
  transform.setExpressionMap(expMap);
  CsvFormatter formatter = dag.addOperator("formatter", new CsvFormatter());
  StringToByteArrayConverterOperator converterOp = dag.addOperator("converter", new StringToByteArrayConverterOperator());
  RedshiftOutputModule redshiftOutput = dag.addModule("RedshiftOutput", new RedshiftOutputModule());

  //Create streams
  dag.addStream("data", inputModule.records, csvParser.in);
  dag.addStream("pojo", csvParser.out, transform.input);
  dag.addStream("transformed", transform.output, formatter.in);
  dag.addStream("string", formatter.out, converterOp.input).setLocality(DAG.Locality.THREAD_LOCAL);
  dag.addStream("writeToJDBC", converterOp.output, redshiftOutput.input);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:24,代码来源:Application.java

示例3: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
public void populateDAG(DAG dag, Configuration conf)
{
  S3InputModule module = dag.addModule("s3InputModule", S3InputModule.class);

  AbstractFileOutputOperator<AbstractFileSplitter.FileMetadata> metadataWriter = new S3InputModuleAppTest.MetadataWriter(S3InputModuleAppTest.OUT_METADATA_FILE);
  metadataWriter.setFilePath(S3InputModuleAppTest.outputDir);
  dag.addOperator("FileMetadataWriter", metadataWriter);

  AbstractFileOutputOperator<AbstractBlockReader.ReaderRecord<Slice>> dataWriter = new S3InputModuleAppTest.HDFSFileWriter(S3InputModuleAppTest.OUT_DATA_FILE);
  dataWriter.setFilePath(S3InputModuleAppTest.outputDir);
  dag.addOperator("FileDataWriter", dataWriter);

  DevNull<BlockMetadata.FileBlockMetadata> devNull = dag.addOperator("devNull", DevNull.class);

  dag.addStream("FileMetaData", module.filesMetadataOutput, metadataWriter.input);
  dag.addStream("data", module.messages, dataWriter.input);
  dag.addStream("blockMetadata", module.blocksMetadataOutput, devNull.data);
}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:19,代码来源:S3InputModuleAppTest.java

示例4: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  DummyOperator o1 = dag.addOperator("O1", new DummyOperator());
  o1.setOperatorProp(level2ModuleBProp1);

  Level1Module m1 = dag.addModule("M1", new Level1Module());
  m1.setMemory(4096);
  m1.setPortMemory(3);
  m1.setLevel1ModuleProp(level2ModuleBProp2);

  DummyOperator o2 = dag.addOperator("O2", new DummyOperator());
  o2.setOperatorProp(level2ModuleBProp3);

  dag.addStream("O1_M1", o1.out1, m1.mIn).setLocality(DAG.Locality.THREAD_LOCAL);
  dag.addStream("O1_O2", o1.out2, o2.in).setLocality(DAG.Locality.RACK_LOCAL);

  mIn.set(o1.in);
  mOut1.set(m1.mOut);
  mOut2.set(o2.out1);
}
 
开发者ID:apache,项目名称:apex-core,代码行数:22,代码来源:TestModuleExpansion.java

示例5: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
public void populateDAG(DAG dag, Configuration conf)
{
  FSInputModule module = dag.addModule("hdfsInputModule", FSInputModule.class);

  AbstractFileOutputOperator<FileMetadata> metadataWriter = new MetadataWriter(FSInputModuleAppTest.OUT_METADATA_FILE);
  metadataWriter.setFilePath(FSInputModuleAppTest.outputDir);
  dag.addOperator("FileMetadataWriter", metadataWriter);

  AbstractFileOutputOperator<ReaderRecord<Slice>> dataWriter = new HDFSFileWriter(FSInputModuleAppTest.OUT_DATA_FILE);
  dataWriter.setFilePath(FSInputModuleAppTest.outputDir);
  dag.addOperator("FileDataWriter", dataWriter);

  DevNull<FileBlockMetadata> devNull = dag.addOperator("devNull", DevNull.class);

  dag.addStream("FileMetaData", module.filesMetadataOutput, metadataWriter.input);
  dag.addStream("data", module.messages, dataWriter.input);
  dag.addStream("blockMetadata", module.blocksMetadataOutput, devNull.data);
}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:19,代码来源:FSInputModuleAppTest.java

示例6: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{

  FSRecordReaderModule recordReader = dag.addModule("recordReader", FSRecordReaderModule.class);
  CsvParser csvParser = dag.addOperator("csvParser", CsvParser.class);
  FilterOperator filterOperator = dag.addOperator("filterOperator", new FilterOperator());

  CsvFormatter selectedFormatter = dag.addOperator("selectedFormatter", new CsvFormatter());
  CsvFormatter rejectedFormatter = dag.addOperator("rejectedFormatter", new CsvFormatter());

  StringFileOutputOperator selectedOutput = dag.addOperator("selectedOutput", new StringFileOutputOperator());
  StringFileOutputOperator rejectedOutput = dag.addOperator("rejectedOutput", new StringFileOutputOperator());

  dag.addStream("record", recordReader.records, csvParser.in);
  dag.addStream("pojo", csvParser.out, filterOperator.input);

  dag.addStream("pojoSelected", filterOperator.truePort, selectedFormatter.in);
  dag.addStream("pojoRejected", filterOperator.falsePort, rejectedFormatter.in);

  dag.addStream("csvSelected", selectedFormatter.out, selectedOutput.input);
  dag.addStream("csvRejected", rejectedFormatter.out, rejectedOutput.input);
}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:24,代码来源:Application.java

示例7: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{

  S3InputModule inputModule = dag.addModule("S3InputModule", new S3InputModule());
  HDFSFileCopyModule outputModule = dag.addModule("HDFSFileCopyModule", new HDFSFileCopyModule());

  dag.addStream("FileMetaData", inputModule.filesMetadataOutput, outputModule.filesMetadataInput);
  dag.addStream("BlocksMetaData", inputModule.blocksMetadataOutput, outputModule.blocksMetadataInput)
      .setLocality(Locality.THREAD_LOCAL);
  dag.addStream("BlocksData", inputModule.messages, outputModule.blockData).setLocality(Locality.THREAD_LOCAL);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:13,代码来源:Application.java

示例8: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  // Add FSInputModule as input and PartFileWriter as output operators to dag.
  FSInputModule input = dag.addModule("HDFSInputModule", new FSInputModule());
  PartFileWriter output = dag.addOperator("PartFileCopy", new PartFileWriter());

  dag.setInputPortAttribute(output.input, Context.PortContext.PARTITION_PARALLEL, true);
  dag.setInputPortAttribute(output.blockMetadataInput, Context.PortContext.PARTITION_PARALLEL, true);

  // Create a stream for blockData, fileMetadata, blockMetadata from Input to PartFileWriter
  dag.addStream("BlocksMetaData", input.blocksMetadataOutput, output.blockMetadataInput).setLocality(DAG.Locality.CONTAINER_LOCAL);
  dag.addStream("BlocksData", input.messages, output.input).setLocality(DAG.Locality.CONTAINER_LOCAL);
  dag.addStream("FileMetaData", input.filesMetadataOutput, output.fileMetadataInput);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:16,代码来源:Application.java

示例9: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{

  FSInputModule inputModule = dag.addModule("HDFSInputModule", new FSInputModule());
  HDFSFileCopyModule outputModule = dag.addModule("HDFSFileCopyModule", new HDFSFileCopyModule());

  dag.addStream("FileMetaData", inputModule.filesMetadataOutput, outputModule.filesMetadataInput);
  dag.addStream("BlocksMetaData", inputModule.blocksMetadataOutput, outputModule.blocksMetadataInput)
      .setLocality(Locality.THREAD_LOCAL);
  dag.addStream("BlocksData", inputModule.messages, outputModule.blockData).setLocality(Locality.THREAD_LOCAL);
}
 
开发者ID:DataTorrent,项目名称:app-templates,代码行数:13,代码来源:Application.java

示例10: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  LOG.debug("Application - PopulateDAG");
  DummyInputOperator dummyInputOperator = dag.addOperator("DummyInputOperator", new DummyInputOperator());
  DummyOperatorAfterInput dummyOperatorAfterInput = dag.addOperator("DummyOperatorAfterInput",
      new DummyOperatorAfterInput());
  Module m1 = dag.addModule("TestModule1", new TestModule());
  Module m2 = dag.addModule("TestModule2", new TestModule());
  DummyOutputOperator dummyOutputOperator = dag.addOperator("DummyOutputOperator", new DummyOutputOperator());
  dag.addStream("Operator To Operator", dummyInputOperator.output, dummyOperatorAfterInput.input);
  dag.addStream("Operator To Module", dummyOperatorAfterInput.output, ((TestModule)m1).moduleInput);
  dag.addStream("Module To Module", ((TestModule)m1).moduleOutput, ((TestModule)m2).moduleInput);
  dag.addStream("Module To Operator", ((TestModule)m2).moduleOutput, dummyOutputOperator.input);
}
 
开发者ID:apache,项目名称:apex-core,代码行数:16,代码来源:ModuleAppTest.java

示例11: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  // Sample DAG with 2 operators
  // Replace this code with the DAG you want to build

  RandomNumberGenerator rand = dag.addOperator("rand", RandomNumberGenerator.class);
  StdoutOperator stdout = dag.addOperator("stdout", new StdoutOperator());
  // This module will be added to dag for testing purpose but will not be connected in a dag.
  Module testModule = dag.addModule("testModule", com.example.mydtapp.TestModule.class);
  dag.addStream("data", rand.out, stdout.in).setLocality(Locality.CONTAINER_LOCAL);
}
 
开发者ID:apache,项目名称:apex-core,代码行数:13,代码来源:Application.java

示例12: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  FSRecordReaderModule recordReader = dag.addModule("recordReader", FSRecordReaderModule.class);
  CsvParser csvParser = dag.addOperator("csvParser", CsvParser.class);
  CsvFormatter formatter = dag.addOperator("formatter", new CsvFormatter());
  StringFileOutputOperator fileOutput = dag.addOperator("fileOutput", new StringFileOutputOperator());

  dag.addStream("record", recordReader.records, csvParser.in);
  dag.addStream("pojo", csvParser.out, formatter.in);
  dag.addStream("string", formatter.out, fileOutput.input);
}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:13,代码来源:Application.java

示例13: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  FSInputModule inputModule = dag.addModule("HDFSInputModule", new FSInputModule());
  S3OutputModule outputModule = dag.addModule("S3OutputModule", new S3OutputModule());

  dag.addStream("FileMetaData", inputModule.filesMetadataOutput, outputModule.filesMetadataInput);
  dag.addStream("BlocksMetaData", inputModule.blocksMetadataOutput, outputModule.blocksMetadataInput)
    .setLocality(DAG.Locality.CONTAINER_LOCAL);
  dag.addStream("BlocksData", inputModule.messages, outputModule.blockData).setLocality(DAG.Locality.CONTAINER_LOCAL);
}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:12,代码来源:Application.java

示例14: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
public void populateDAG(DAG dag, Configuration conf)
{
  FSRecordReaderModule recordReader = dag.addModule("lineInput", FSRecordReaderModule.class);
  S3BytesOutputModule s3StringOutputModule = dag.addModule("s3TupleOutput", S3BytesOutputModule.class);
  dag.addStream("data", recordReader.records, s3StringOutputModule.input);

}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:8,代码来源:Application.java

示例15: populateDAG

import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
  AvroFileToPojoModule avroFileToPojoModule = dag.addModule("avroFileToPojoModule", getAvroFileToPojoModule());
  ConsoleOutputOperator consoleOutput = dag.addOperator("console", new ConsoleOutputOperator());

  dag.addStream("POJO", avroFileToPojoModule.output, consoleOutput.input);
}
 
开发者ID:apache,项目名称:apex-malhar,代码行数:9,代码来源:AvroFileToPojoModuleTest.java


注:本文中的com.datatorrent.api.DAG.addModule方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。