本文整理匯總了Java中org.apache.flink.runtime.operators.util.TaskConfig.setInputSerializer方法的典型用法代碼示例。如果您正苦於以下問題:Java TaskConfig.setInputSerializer方法的具體用法?Java TaskConfig.setInputSerializer怎麽用?Java TaskConfig.setInputSerializer使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.flink.runtime.operators.util.TaskConfig
的用法示例。
在下文中一共展示了TaskConfig.setInputSerializer方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createOutput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static OutputFormatVertex createOutput(JobGraph jobGraph, String resultPath, int numSubTasks,
TypeSerializerFactory<?> serializer) {
OutputFormatVertex output = JobGraphUtils.createFileOutput(jobGraph, "Final Output", numSubTasks);
TaskConfig outputConfig = new TaskConfig(output.getConfiguration());
{
outputConfig.addInputToGroup(0);
outputConfig.setInputSerializer(serializer, 0);
outputConfig.setStubWrapper(new UserCodeClassWrapper<CsvOutputFormat>(CsvOutputFormat.class));
outputConfig.setStubParameter(FileOutputFormat.FILE_PARAMETER_KEY, resultPath);
Configuration outputUserConfig = outputConfig.getStubParameters();
outputUserConfig.setString(CsvOutputFormat.RECORD_DELIMITER_PARAMETER, "\n");
outputUserConfig.setString(CsvOutputFormat.FIELD_DELIMITER_PARAMETER, " ");
outputUserConfig.setClass(CsvOutputFormat.FIELD_TYPE_PARAMETER_PREFIX + 0, LongValue.class);
outputUserConfig.setInteger(CsvOutputFormat.RECORD_POSITION_PARAMETER_PREFIX + 0, 0);
outputUserConfig.setClass(CsvOutputFormat.FIELD_TYPE_PARAMETER_PREFIX + 1, LongValue.class);
outputUserConfig.setInteger(CsvOutputFormat.RECORD_POSITION_PARAMETER_PREFIX + 1, 1);
outputUserConfig.setInteger(CsvOutputFormat.NUM_FIELDS_PARAMETER, 2);
}
return output;
}
示例2: createMapper
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static AbstractJobVertex createMapper(JobGraph jobGraph, int numSubTasks, TypeSerializerFactory<?> serializer) {
AbstractJobVertex pointsInput = JobGraphUtils.createTask(RegularPactTask.class, "Map[DotProducts]", jobGraph, numSubTasks);
{
TaskConfig taskConfig = new TaskConfig(pointsInput.getConfiguration());
taskConfig.setStubWrapper(new UserCodeClassWrapper<DotProducts>(DotProducts.class));
taskConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
taskConfig.setOutputSerializer(serializer);
taskConfig.setDriver(CollectorMapDriver.class);
taskConfig.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
taskConfig.addInputToGroup(0);
taskConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
taskConfig.setInputSerializer(serializer, 0);
taskConfig.setBroadcastInputName("models", 0);
taskConfig.addBroadcastInputToGroup(0);
taskConfig.setBroadcastInputSerializer(serializer, 0);
}
return pointsInput;
}
示例3: createOutput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static OutputFormatVertex createOutput(JobGraph jobGraph, String resultPath, int numSubTasks, TypeSerializerFactory<?> serializer) {
OutputFormatVertex output = JobGraphUtils.createFileOutput(jobGraph, "Output", numSubTasks);
{
TaskConfig taskConfig = new TaskConfig(output.getConfiguration());
taskConfig.addInputToGroup(0);
taskConfig.setInputSerializer(serializer, 0);
@SuppressWarnings("unchecked")
CsvOutputFormat outFormat = new CsvOutputFormat("\n", " ", LongValue.class, LongValue.class, LongValue.class);
outFormat.setOutputFilePath(new Path(resultPath));
taskConfig.setStubWrapper(new UserCodeObjectWrapper<CsvOutputFormat>(outFormat));
}
return output;
}
示例4: createOutput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static OutputFormatVertex createOutput(JobGraph jobGraph, String resultPath, int numSubTasks, TypeSerializerFactory<?> serializer) {
OutputFormatVertex output = JobGraphUtils.createFileOutput(jobGraph, "Output", numSubTasks);
{
TaskConfig taskConfig = new TaskConfig(output.getConfiguration());
taskConfig.addInputToGroup(0);
taskConfig.setInputSerializer(serializer, 0);
PointOutFormat outFormat = new PointOutFormat();
outFormat.setOutputFilePath(new Path(resultPath));
taskConfig.setStubWrapper(new UserCodeObjectWrapper<PointOutFormat>(outFormat));
}
return output;
}
示例5: addInput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
public IteratorWrappingTestSingleInputGate<Record> addInput(MutableObjectIterator<Record> input, int groupId, boolean read) {
final IteratorWrappingTestSingleInputGate<Record> reader = this.mockEnv.addInput(input);
TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration());
conf.addInputToGroup(groupId);
conf.setInputSerializer(RecordSerializerFactory.get(), groupId);
if (read) {
reader.notifyNonEmpty();
}
return reader;
}
示例6: createVerticesInput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static InputFormatVertex createVerticesInput(JobGraph jobGraph, String verticesPath, int numSubTasks,
TypeSerializerFactory<?> serializer, TypeComparatorFactory<?> comparator)
{
@SuppressWarnings("unchecked")
CsvInputFormat verticesInFormat = new CsvInputFormat(' ', LongValue.class);
InputFormatVertex verticesInput = JobGraphUtils.createInput(verticesInFormat, verticesPath, "VerticesInput",
jobGraph, numSubTasks);
TaskConfig verticesInputConfig = new TaskConfig(verticesInput.getConfiguration());
{
verticesInputConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
verticesInputConfig.setOutputSerializer(serializer);
// chained mapper that duplicates the id
TaskConfig chainedMapperConfig = new TaskConfig(new Configuration());
chainedMapperConfig.setStubWrapper(new UserCodeClassWrapper<IdDuplicator>(IdDuplicator.class));
chainedMapperConfig.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
chainedMapperConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
chainedMapperConfig.setInputSerializer(serializer, 0);
chainedMapperConfig.setOutputSerializer(serializer);
chainedMapperConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
chainedMapperConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
chainedMapperConfig.setOutputComparator(comparator, 0);
chainedMapperConfig.setOutputComparator(comparator, 1);
verticesInputConfig.addChainedTask(ChainedCollectorMapDriver.class, chainedMapperConfig, "ID Duplicator");
}
return verticesInput;
}
示例7: createIterationIntermediate
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static AbstractJobVertex createIterationIntermediate(JobGraph jobGraph, int numSubTasks,
TypeSerializerFactory<?> serializer, TypeComparatorFactory<?> comparator)
{
// --------------- the intermediate (reduce to min id) ---------------
AbstractJobVertex intermediate = JobGraphUtils.createTask(IterationIntermediatePactTask.class,
"Find Min Component-ID", jobGraph, numSubTasks);
TaskConfig intermediateConfig = new TaskConfig(intermediate.getConfiguration());
{
intermediateConfig.setIterationId(ITERATION_ID);
intermediateConfig.addInputToGroup(0);
intermediateConfig.setInputSerializer(serializer, 0);
intermediateConfig.setInputComparator(comparator, 0);
intermediateConfig.setInputLocalStrategy(0, LocalStrategy.SORT);
intermediateConfig.setRelativeMemoryInput(0, MEM_FRAC_PER_CONSUMER);
intermediateConfig.setFilehandlesInput(0, 64);
intermediateConfig.setSpillingThresholdInput(0, 0.85f);
intermediateConfig.setOutputSerializer(serializer);
intermediateConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
intermediateConfig.setDriver(GroupReduceDriver.class);
intermediateConfig.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
intermediateConfig.setDriverComparator(comparator, 0);
intermediateConfig.setStubWrapper(
new UserCodeObjectWrapper<WrappingReduceFunction>(new WrappingClassReduceFunction(MinimumComponentIDReduce.class)));
}
return intermediate;
}
示例8: createIterationHead
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static AbstractJobVertex createIterationHead(JobGraph jobGraph, int numSubTasks, TypeSerializerFactory<?> serializer) {
AbstractJobVertex head = JobGraphUtils.createTask(IterationHeadPactTask.class, "Iteration Head", jobGraph, numSubTasks);
TaskConfig headConfig = new TaskConfig(head.getConfiguration());
headConfig.setIterationId(ITERATION_ID);
// initial input / partial solution
headConfig.addInputToGroup(0);
headConfig.setIterationHeadPartialSolutionOrWorksetInputIndex(0);
headConfig.setInputSerializer(serializer, 0);
// back channel / iterations
headConfig.setRelativeBackChannelMemory(MEMORY_FRACTION_PER_CONSUMER);
// output into iteration. broadcasting the centers
headConfig.setOutputSerializer(serializer);
headConfig.addOutputShipStrategy(ShipStrategyType.BROADCAST);
// final output
TaskConfig headFinalOutConfig = new TaskConfig(new Configuration());
headFinalOutConfig.setOutputSerializer(serializer);
headFinalOutConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
headConfig.setIterationHeadFinalOutputConfig(headFinalOutConfig);
// the sync
headConfig.setIterationHeadIndexOfSyncOutput(2);
// the driver
headConfig.setDriver(NoOpDriver.class);
headConfig.setDriverStrategy(DriverStrategy.UNARY_NO_OP);
return head;
}
示例9: createMapper
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static AbstractJobVertex createMapper(JobGraph jobGraph, int numSubTasks, TypeSerializerFactory<?> inputSerializer,
TypeSerializerFactory<?> broadcastVarSerializer, TypeSerializerFactory<?> outputSerializer,
TypeComparatorFactory<?> outputComparator)
{
AbstractJobVertex mapper = JobGraphUtils.createTask(IterationIntermediatePactTask.class,
"Map (Select nearest center)", jobGraph, numSubTasks);
TaskConfig intermediateConfig = new TaskConfig(mapper.getConfiguration());
intermediateConfig.setIterationId(ITERATION_ID);
intermediateConfig.setDriver(CollectorMapDriver.class);
intermediateConfig.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
intermediateConfig.addInputToGroup(0);
intermediateConfig.setInputSerializer(inputSerializer, 0);
intermediateConfig.setOutputSerializer(outputSerializer);
intermediateConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
intermediateConfig.setOutputComparator(outputComparator, 0);
intermediateConfig.setBroadcastInputName("centers", 0);
intermediateConfig.addBroadcastInputToGroup(0);
intermediateConfig.setBroadcastInputSerializer(broadcastVarSerializer, 0);
// the udf
intermediateConfig.setStubWrapper(new UserCodeObjectWrapper<SelectNearestCenter>(new SelectNearestCenter()));
return mapper;
}
示例10: createReducer
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static AbstractJobVertex createReducer(JobGraph jobGraph, int numSubTasks, TypeSerializerFactory<?> inputSerializer,
TypeComparatorFactory<?> inputComparator, TypeSerializerFactory<?> outputSerializer)
{
// ---------------- the tail (reduce) --------------------
AbstractJobVertex tail = JobGraphUtils.createTask(IterationTailPactTask.class, "Reduce / Iteration Tail", jobGraph,
numSubTasks);
TaskConfig tailConfig = new TaskConfig(tail.getConfiguration());
tailConfig.setIterationId(ITERATION_ID);
tailConfig.setIsWorksetUpdate();
// inputs and driver
tailConfig.setDriver(GroupReduceDriver.class);
tailConfig.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
tailConfig.addInputToGroup(0);
tailConfig.setInputSerializer(inputSerializer, 0);
tailConfig.setDriverComparator(inputComparator, 0);
tailConfig.setInputLocalStrategy(0, LocalStrategy.SORT);
tailConfig.setInputComparator(inputComparator, 0);
tailConfig.setRelativeMemoryInput(0, MEMORY_FRACTION_PER_CONSUMER);
tailConfig.setFilehandlesInput(0, 128);
tailConfig.setSpillingThresholdInput(0, 0.9f);
// output
tailConfig.setOutputSerializer(outputSerializer);
// the udf
tailConfig.setStubWrapper(new UserCodeObjectWrapper<WrappingReduceFunction>(new WrappingReduceFunction(new RecomputeClusterCenter())));
return tail;
}
示例11: addLocalInfoFromChannelToConfig
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private void addLocalInfoFromChannelToConfig(Channel channel, TaskConfig config, int inputNum, boolean isBroadcastChannel) {
// serializer
if (isBroadcastChannel) {
config.setBroadcastInputSerializer(channel.getSerializer(), inputNum);
if (channel.getLocalStrategy() != LocalStrategy.NONE || (channel.getTempMode() != null && channel.getTempMode() != TempMode.NONE)) {
throw new CompilerException("Found local strategy or temp mode on a broadcast variable channel.");
} else {
return;
}
} else {
config.setInputSerializer(channel.getSerializer(), inputNum);
}
// local strategy
if (channel.getLocalStrategy() != LocalStrategy.NONE) {
config.setInputLocalStrategy(inputNum, channel.getLocalStrategy());
if (channel.getLocalStrategyComparator() != null) {
config.setInputComparator(channel.getLocalStrategyComparator(), inputNum);
}
}
assignLocalStrategyResources(channel, config, inputNum);
// materialization / caching
if (channel.getTempMode() != null) {
final TempMode tm = channel.getTempMode();
boolean needsMemory = false;
// Don't add a pipeline breaker if the data exchange is already blocking, EXCEPT the channel is within an iteration.
if (tm.breaksPipeline() &&
(channel.isOnDynamicPath() || channel.getDataExchangeMode() != DataExchangeMode.BATCH) ) {
config.setInputAsynchronouslyMaterialized(inputNum, true);
needsMemory = true;
}
if (tm.isCached()) {
config.setInputCached(inputNum, true);
needsMemory = true;
}
if (needsMemory) {
// sanity check
if (tm == TempMode.NONE || channel.getRelativeTempMemory() <= 0) {
throw new CompilerException("Bug in compiler: Inconsistent description of input materialization.");
}
config.setRelativeInputMaterializationMemory(inputNum, channel.getRelativeTempMemory());
}
}
}
示例12: addLocalInfoFromChannelToConfig
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private void addLocalInfoFromChannelToConfig(Channel channel, TaskConfig config, int inputNum, boolean isBroadcastChannel) {
// serializer
if (isBroadcastChannel) {
config.setBroadcastInputSerializer(channel.getSerializer(), inputNum);
if (channel.getLocalStrategy() != LocalStrategy.NONE || (channel.getTempMode() != null && channel.getTempMode() != TempMode.NONE)) {
throw new CompilerException("Found local strategy or temp mode on a broadcast variable channel.");
} else {
return;
}
} else {
config.setInputSerializer(channel.getSerializer(), inputNum);
}
// local strategy
if (channel.getLocalStrategy() != LocalStrategy.NONE) {
config.setInputLocalStrategy(inputNum, channel.getLocalStrategy());
if (channel.getLocalStrategyComparator() != null) {
config.setInputComparator(channel.getLocalStrategyComparator(), inputNum);
}
}
assignLocalStrategyResources(channel, config, inputNum);
// materialization / caching
if (channel.getTempMode() != null) {
final TempMode tm = channel.getTempMode();
boolean needsMemory = false;
if (tm.breaksPipeline()) {
config.setInputAsynchronouslyMaterialized(inputNum, true);
needsMemory = true;
}
if (tm.isCached()) {
config.setInputCached(inputNum, true);
needsMemory = true;
}
if (needsMemory) {
// sanity check
if (tm == null || tm == TempMode.NONE || channel.getRelativeTempMemory() <= 0) {
throw new CompilerException("Bug in compiler: Inconsistent description of input materialization.");
}
config.setRelativeInputMaterializationMemory(inputNum, channel.getRelativeTempMemory());
}
}
}
示例13: createIterationHead
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static AbstractJobVertex createIterationHead(JobGraph jobGraph, int numSubTasks,
TypeSerializerFactory<?> serializer,
TypeComparatorFactory<?> comparator,
TypePairComparatorFactory<?, ?> pairComparator) {
AbstractJobVertex head = JobGraphUtils.createTask(IterationHeadPactTask.class, "Join With Edges (Iteration Head)", jobGraph, numSubTasks);
TaskConfig headConfig = new TaskConfig(head.getConfiguration());
{
headConfig.setIterationId(ITERATION_ID);
// initial input / workset
headConfig.addInputToGroup(0);
headConfig.setInputSerializer(serializer, 0);
headConfig.setInputComparator(comparator, 0);
headConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
headConfig.setIterationHeadPartialSolutionOrWorksetInputIndex(0);
// regular plan input (second input to the join)
headConfig.addInputToGroup(1);
headConfig.setInputSerializer(serializer, 1);
headConfig.setInputComparator(comparator, 1);
headConfig.setInputLocalStrategy(1, LocalStrategy.NONE);
headConfig.setInputCached(1, true);
headConfig.setRelativeInputMaterializationMemory(1, MEM_FRAC_PER_CONSUMER);
// initial solution set input
headConfig.addInputToGroup(2);
headConfig.setInputSerializer(serializer, 2);
headConfig.setInputComparator(comparator, 2);
headConfig.setInputLocalStrategy(2, LocalStrategy.NONE);
headConfig.setIterationHeadSolutionSetInputIndex(2);
headConfig.setSolutionSetSerializer(serializer);
headConfig.setSolutionSetComparator(comparator);
// back channel / iterations
headConfig.setIsWorksetIteration();
headConfig.setRelativeBackChannelMemory(MEM_FRAC_PER_CONSUMER);
headConfig.setRelativeSolutionSetMemory(MEM_FRAC_PER_CONSUMER );
// output into iteration
headConfig.setOutputSerializer(serializer);
headConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
headConfig.setOutputComparator(comparator, 0);
// final output
TaskConfig headFinalOutConfig = new TaskConfig(new Configuration());
headFinalOutConfig.setOutputSerializer(serializer);
headFinalOutConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
headConfig.setIterationHeadFinalOutputConfig(headFinalOutConfig);
// the sync
headConfig.setIterationHeadIndexOfSyncOutput(2);
// the driver
headConfig.setDriver(BuildSecondCachedMatchDriver.class);
headConfig.setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
headConfig.setStubWrapper(
new UserCodeClassWrapper<NeighborWithComponentIDJoin>(NeighborWithComponentIDJoin.class));
headConfig.setDriverComparator(comparator, 0);
headConfig.setDriverComparator(comparator, 1);
headConfig.setDriverPairComparator(pairComparator);
headConfig.setRelativeMemoryDriver(MEM_FRAC_PER_CONSUMER);
headConfig.addIterationAggregator(
WorksetEmptyConvergenceCriterion.AGGREGATOR_NAME, new LongSumAggregator());
}
return head;
}
示例14: createJobGraphUnifiedTails
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
public JobGraph createJobGraphUnifiedTails(
String verticesPath, String edgesPath, String resultPath, int numSubTasks, int maxIterations)
{
// -- init -------------------------------------------------------------------------------------------------
final TypeSerializerFactory<?> serializer = RecordSerializerFactory.get();
@SuppressWarnings("unchecked")
final TypeComparatorFactory<?> comparator =
new RecordComparatorFactory(new int[] { 0 }, new Class[] { LongValue.class }, new boolean[] { true });
final TypePairComparatorFactory<?, ?> pairComparator = RecordPairComparatorFactory.get();
JobGraph jobGraph = new JobGraph("Connected Components (Unified Tails)");
// -- invariant vertices -----------------------------------------------------------------------------------
InputFormatVertex vertices = createVerticesInput(jobGraph, verticesPath, numSubTasks, serializer, comparator);
InputFormatVertex edges = createEdgesInput(jobGraph, edgesPath, numSubTasks, serializer, comparator);
AbstractJobVertex head = createIterationHead(jobGraph, numSubTasks, serializer, comparator, pairComparator);
AbstractJobVertex intermediate = createIterationIntermediate(jobGraph, numSubTasks, serializer, comparator);
TaskConfig intermediateConfig = new TaskConfig(intermediate.getConfiguration());
OutputFormatVertex output = createOutput(jobGraph, resultPath, numSubTasks, serializer);
AbstractJobVertex sync = createSync(jobGraph, numSubTasks, maxIterations);
// --------------- the tail (solution set join) ---------------
AbstractJobVertex tail = JobGraphUtils.createTask(IterationTailPactTask.class, "IterationTail", jobGraph, numSubTasks);
TaskConfig tailConfig = new TaskConfig(tail.getConfiguration());
{
tailConfig.setIterationId(ITERATION_ID);
tailConfig.setIsWorksetIteration();
tailConfig.setIsWorksetUpdate();
tailConfig.setIsSolutionSetUpdate();
tailConfig.setIsSolutionSetUpdateWithoutReprobe();
// inputs and driver
tailConfig.addInputToGroup(0);
tailConfig.setInputSerializer(serializer, 0);
// output
tailConfig.setOutputSerializer(serializer);
// the driver
tailConfig.setDriver(JoinWithSolutionSetSecondDriver.class);
tailConfig.setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
tailConfig.setDriverComparator(comparator, 0);
tailConfig.setDriverPairComparator(pairComparator);
tailConfig.setStubWrapper(new UserCodeClassWrapper<UpdateComponentIdMatch>(UpdateComponentIdMatch.class));
}
// -- edges ------------------------------------------------------------------------------------------------
JobGraphUtils.connect(vertices, head, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
JobGraphUtils.connect(edges, head, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
JobGraphUtils.connect(vertices, head, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
JobGraphUtils.connect(head, intermediate, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
intermediateConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, numSubTasks);
JobGraphUtils.connect(intermediate, tail, ChannelType.IN_MEMORY, DistributionPattern.POINTWISE);
tailConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, 1);
JobGraphUtils.connect(head, output, ChannelType.IN_MEMORY, DistributionPattern.POINTWISE);
JobGraphUtils.connect(head, sync, ChannelType.NETWORK, DistributionPattern.POINTWISE);
SlotSharingGroup sharingGroup = new SlotSharingGroup();
vertices.setSlotSharingGroup(sharingGroup);
edges.setSlotSharingGroup(sharingGroup);
head.setSlotSharingGroup(sharingGroup);
intermediate.setSlotSharingGroup(sharingGroup);
tail.setSlotSharingGroup(sharingGroup);
output.setSlotSharingGroup(sharingGroup);
sync.setSlotSharingGroup(sharingGroup);
intermediate.setStrictlyCoLocatedWith(head);
tail.setStrictlyCoLocatedWith(head);
return jobGraph;
}
示例15: addInput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
public void addInput(MutableObjectIterator<Record> input, int groupId) {
this.mockEnv.addInput(input);
TaskConfig conf = new TaskConfig(this.mockEnv.getTaskConfiguration());
conf.addInputToGroup(groupId);
conf.setInputSerializer(RecordSerializerFactory.get(), groupId);
}