本文整理汇总了Java中com.datatorrent.api.DAG.setOutputPortAttribute方法的典型用法代码示例。如果您正苦于以下问题:Java DAG.setOutputPortAttribute方法的具体用法?Java DAG.setOutputPortAttribute怎么用?Java DAG.setOutputPortAttribute使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.datatorrent.api.DAG
的用法示例。
在下文中一共展示了DAG.setOutputPortAttribute方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: populateDAG
import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
pojoDataGenerator = dag.addOperator("Input", new POJOGenerator());
TransformOperator transform = dag.addOperator("Process", new TransformOperator());
// Set expression map
Map<String, String> expMap = new HashMap<>();
expMap.put("name", "{$.firstName}.concat(\" \").concat({$.lastName})");
expMap.put("age", "(new java.util.Date()).getYear() - {$.dateOfBirth}.getYear()");
expMap.put("address", "{$.address}.toLowerCase()");
transform.setExpressionMap(expMap);
FunctionOperator.MapFunctionOperator<Object, ?> output = dag.addOperator("out",
new FunctionOperator.MapFunctionOperator<>(outputFn));
dag.addStream("InputToTransform", pojoDataGenerator.output, transform.input);
dag.addStream("TransformToOutput", transform.output, output.input);
dag.setInputPortAttribute(transform.input, Context.PortContext.TUPLE_CLASS, CustomerEvent.class);
dag.setOutputPortAttribute(transform.output, Context.PortContext.TUPLE_CLASS, CustomerInfo.class);
setPartitioner(dag,conf,transform);
}
示例2: populateDAG
import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
AvroFileInputOperator avroFileInputOperator = dag.addOperator("AvroFileInputOperator", this.avroFileInputOperator);
AvroToPojo avroToPojo = dag.addOperator("AvroGenericObjectToPojo", new AvroToPojo());
dag.setOutputPortAttribute(avroToPojo.output, Context.PortContext.TUPLE_CLASS, pojoClass);
dag.addStream("avroFileContainerToPojo", avroFileInputOperator.output, avroToPojo.data)
.setLocality(DAG.Locality.CONTAINER_LOCAL);
output.set(avroToPojo.output);
errorPort.set(avroToPojo.errorPort);
completedAvroFilesPort.set(avroFileInputOperator.completedFilesPort);
avroErrorRecordsPort.set(avroFileInputOperator.errorRecordsPort);
}
示例3: populateDAG
import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration configuration)
{
TestGenerator gen1 = dag.addOperator("Generator1", new TestGenerator());
TestGenerator gen2 = dag.addOperator("Generator2", new TestGenerator());
joinOp = dag.addOperator("Join", new PartitionTestJoinOperator());
joinOp.setLeftKeyExpression("id");
joinOp.setRightKeyExpression("id");
joinOp.setIncludeFieldStr("id,eventTime;id,eventTime");
joinOp.setExpiryTime(10000L);
ConsoleOutputOperator console = dag.addOperator("Console", new ConsoleOutputOperator());
dag.addStream("Gen1ToJoin", gen1.output, joinOp.input1);
dag.addStream("Gen2ToJoin", gen2.output, joinOp.input2);
dag.addStream("JoinToConsole", joinOp.outputPort, console.input);
dag.setInputPortAttribute(joinOp.input1, DAG.InputPortMeta.TUPLE_CLASS,TestEvent.class);
dag.setInputPortAttribute(joinOp.input2, DAG.InputPortMeta.TUPLE_CLASS,TestEvent.class);
dag.setOutputPortAttribute(joinOp.outputPort, DAG.InputPortMeta.TUPLE_CLASS,TestEvent.class);
dag.setAttribute(joinOp, Context.OperatorContext.PARTITIONER,
new StatelessPartitioner<PartitionTestJoinOperator>(NUM_OF_PARTITIONS));
}
示例4: populateDAG
import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
TestGenerator gen = dag.addOperator("Generator", new TestGenerator());
dedup = dag.addOperator("Deduper", new TestDeduper());
dedup.setKeyExpression("id");
dedup.setTimeExpression("eventTime.getTime()");
dedup.setBucketSpan(60);
dedup.setExpireBefore(600);
ConsoleOutputOperator console = dag.addOperator("Console", new ConsoleOutputOperator());
dag.addStream("Generator to Dedup", gen.output, dedup.input);
dag.addStream("Dedup to Console", dedup.unique, console.input);
dag.setInputPortAttribute(dedup.input, Context.PortContext.TUPLE_CLASS, TestEvent.class);
dag.setOutputPortAttribute(dedup.unique, Context.PortContext.TUPLE_CLASS, TestEvent.class);
dag.setAttribute(dedup, Context.OperatorContext.PARTITIONER,
new StatelessPartitioner<TimeBasedDedupOperator>(NUM_DEDUP_PARTITIONS));
}
示例5: populateDAG
import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
TestGenerator gen = dag.addOperator("Generator", new TestGenerator());
TestDeduper dedup = dag.addOperator("Deduper", new TestDeduper());
TimeBasedBucketManagerPOJOImpl bucketManager = new TimeBasedBucketManagerPOJOImpl();
ExpirableBucketStore<Object> store = new ExpirableHdfsBucketStore<>();
bucketManager.setBucketStore(store);
bucketManager.setKeyExpression("$.id");
bucketManager.setTimeExpression("$.eventTime");
dedup.setBucketManager(bucketManager);
ConsoleOutputOperator console = dag.addOperator("Console", new ConsoleOutputOperator());
dag.addStream("Generator to Dedup", gen.output, dedup.input);
dag.addStream("Dedup to Console", dedup.output, console.input);
dag.setInputPortAttribute(dedup.input, Context.PortContext.TUPLE_CLASS, TestEvent.class);
dag.setOutputPortAttribute(dedup.output, Context.PortContext.TUPLE_CLASS, TestEvent.class);
dag.setAttribute(dedup, Context.OperatorContext.PARTITIONER,
new StatelessPartitioner<DeduperPOJOImpl>(NUM_DEDUP_PARTITIONS));
}
示例6: populateDAG
import com.datatorrent.api.DAG; //导入方法依赖的package包/类
@Override
public void populateDAG(DAG dag, Configuration conf)
{
ServerLogGenerator logGenerator = dag.addOperator("logGenerator", ServerLogGenerator.class);
RegexParser regexParser = dag.addOperator("regexParser", RegexParser.class);
dag.setOutputPortAttribute(regexParser.out, Context.PortContext.TUPLE_CLASS, ServerLog.class);
FileOutputOperator regexWriter = dag.addOperator("regexWriter", FileOutputOperator.class);
FileOutputOperator regexErrorWriter = dag.addOperator("regexErrorWriter", FileOutputOperator.class);
dag.addStream("regexInput", logGenerator.outputPort, regexParser.in);
dag.addStream("regexOutput", regexParser.out, regexWriter.input);
dag.addStream("regexError", regexParser.err, regexErrorWriter.input);
}