本文整理匯總了Java中org.apache.flink.runtime.operators.util.TaskConfig.setDriverPairComparator方法的典型用法代碼示例。如果您正苦於以下問題:Java TaskConfig.setDriverPairComparator方法的具體用法?Java TaskConfig.setDriverPairComparator怎麽用?Java TaskConfig.setDriverPairComparator使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.flink.runtime.operators.util.TaskConfig
的用法示例。
在下文中一共展示了TaskConfig.setDriverPairComparator方法的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createDualInputVertex
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private JobVertex createDualInputVertex(DualInputPlanNode node) throws CompilerException {
final String taskName = node.getNodeName();
final DriverStrategy ds = node.getDriverStrategy();
final JobVertex vertex = new JobVertex(taskName);
final TaskConfig config = new TaskConfig(vertex.getConfiguration());
vertex.setResources(node.getMinResources(), node.getPreferredResources());
vertex.setInvokableClass( (this.currentIteration != null && node.isOnDynamicPath()) ? IterationIntermediateTask.class : BatchTask.class);
// set user code
config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper());
config.setStubParameters(node.getProgramOperator().getParameters());
// set the driver strategy
config.setDriver(ds.getDriverClass());
config.setDriverStrategy(ds);
if (node.getComparator1() != null) {
config.setDriverComparator(node.getComparator1(), 0);
}
if (node.getComparator2() != null) {
config.setDriverComparator(node.getComparator2(), 1);
}
if (node.getPairComparator() != null) {
config.setDriverPairComparator(node.getPairComparator());
}
// assign memory, file-handles, etc.
assignDriverResources(node, config);
return vertex;
}
示例2: createDualInputVertex
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private AbstractJobVertex createDualInputVertex(DualInputPlanNode node) throws CompilerException {
final String taskName = node.getNodeName();
final DriverStrategy ds = node.getDriverStrategy();
final AbstractJobVertex vertex = new AbstractJobVertex(taskName);
final TaskConfig config = new TaskConfig(vertex.getConfiguration());
vertex.setInvokableClass( (this.currentIteration != null && node.isOnDynamicPath()) ? IterationIntermediatePactTask.class : RegularPactTask.class);
// set user code
config.setStubWrapper(node.getPactContract().getUserCodeWrapper());
config.setStubParameters(node.getPactContract().getParameters());
// set the driver strategy
config.setDriver(ds.getDriverClass());
config.setDriverStrategy(ds);
if (node.getComparator1() != null) {
config.setDriverComparator(node.getComparator1(), 0);
}
if (node.getComparator2() != null) {
config.setDriverComparator(node.getComparator2(), 1);
}
if (node.getPairComparator() != null) {
config.setDriverPairComparator(node.getPairComparator());
}
// assign memory, file-handles, etc.
assignDriverResources(node, config);
return vertex;
}
示例3: createIterationHead
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
private static AbstractJobVertex createIterationHead(JobGraph jobGraph, int numSubTasks,
TypeSerializerFactory<?> serializer,
TypeComparatorFactory<?> comparator,
TypePairComparatorFactory<?, ?> pairComparator) {
AbstractJobVertex head = JobGraphUtils.createTask(IterationHeadPactTask.class, "Join With Edges (Iteration Head)", jobGraph, numSubTasks);
TaskConfig headConfig = new TaskConfig(head.getConfiguration());
{
headConfig.setIterationId(ITERATION_ID);
// initial input / workset
headConfig.addInputToGroup(0);
headConfig.setInputSerializer(serializer, 0);
headConfig.setInputComparator(comparator, 0);
headConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
headConfig.setIterationHeadPartialSolutionOrWorksetInputIndex(0);
// regular plan input (second input to the join)
headConfig.addInputToGroup(1);
headConfig.setInputSerializer(serializer, 1);
headConfig.setInputComparator(comparator, 1);
headConfig.setInputLocalStrategy(1, LocalStrategy.NONE);
headConfig.setInputCached(1, true);
headConfig.setRelativeInputMaterializationMemory(1, MEM_FRAC_PER_CONSUMER);
// initial solution set input
headConfig.addInputToGroup(2);
headConfig.setInputSerializer(serializer, 2);
headConfig.setInputComparator(comparator, 2);
headConfig.setInputLocalStrategy(2, LocalStrategy.NONE);
headConfig.setIterationHeadSolutionSetInputIndex(2);
headConfig.setSolutionSetSerializer(serializer);
headConfig.setSolutionSetComparator(comparator);
// back channel / iterations
headConfig.setIsWorksetIteration();
headConfig.setRelativeBackChannelMemory(MEM_FRAC_PER_CONSUMER);
headConfig.setRelativeSolutionSetMemory(MEM_FRAC_PER_CONSUMER );
// output into iteration
headConfig.setOutputSerializer(serializer);
headConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
headConfig.setOutputComparator(comparator, 0);
// final output
TaskConfig headFinalOutConfig = new TaskConfig(new Configuration());
headFinalOutConfig.setOutputSerializer(serializer);
headFinalOutConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
headConfig.setIterationHeadFinalOutputConfig(headFinalOutConfig);
// the sync
headConfig.setIterationHeadIndexOfSyncOutput(2);
// the driver
headConfig.setDriver(BuildSecondCachedMatchDriver.class);
headConfig.setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
headConfig.setStubWrapper(
new UserCodeClassWrapper<NeighborWithComponentIDJoin>(NeighborWithComponentIDJoin.class));
headConfig.setDriverComparator(comparator, 0);
headConfig.setDriverComparator(comparator, 1);
headConfig.setDriverPairComparator(pairComparator);
headConfig.setRelativeMemoryDriver(MEM_FRAC_PER_CONSUMER);
headConfig.addIterationAggregator(
WorksetEmptyConvergenceCriterion.AGGREGATOR_NAME, new LongSumAggregator());
}
return head;
}
示例4: createJobGraphUnifiedTails
import org.apache.flink.runtime.operators.util.TaskConfig; //導入方法依賴的package包/類
public JobGraph createJobGraphUnifiedTails(
String verticesPath, String edgesPath, String resultPath, int numSubTasks, int maxIterations)
{
// -- init -------------------------------------------------------------------------------------------------
final TypeSerializerFactory<?> serializer = RecordSerializerFactory.get();
@SuppressWarnings("unchecked")
final TypeComparatorFactory<?> comparator =
new RecordComparatorFactory(new int[] { 0 }, new Class[] { LongValue.class }, new boolean[] { true });
final TypePairComparatorFactory<?, ?> pairComparator = RecordPairComparatorFactory.get();
JobGraph jobGraph = new JobGraph("Connected Components (Unified Tails)");
// -- invariant vertices -----------------------------------------------------------------------------------
InputFormatVertex vertices = createVerticesInput(jobGraph, verticesPath, numSubTasks, serializer, comparator);
InputFormatVertex edges = createEdgesInput(jobGraph, edgesPath, numSubTasks, serializer, comparator);
AbstractJobVertex head = createIterationHead(jobGraph, numSubTasks, serializer, comparator, pairComparator);
AbstractJobVertex intermediate = createIterationIntermediate(jobGraph, numSubTasks, serializer, comparator);
TaskConfig intermediateConfig = new TaskConfig(intermediate.getConfiguration());
OutputFormatVertex output = createOutput(jobGraph, resultPath, numSubTasks, serializer);
AbstractJobVertex sync = createSync(jobGraph, numSubTasks, maxIterations);
// --------------- the tail (solution set join) ---------------
AbstractJobVertex tail = JobGraphUtils.createTask(IterationTailPactTask.class, "IterationTail", jobGraph, numSubTasks);
TaskConfig tailConfig = new TaskConfig(tail.getConfiguration());
{
tailConfig.setIterationId(ITERATION_ID);
tailConfig.setIsWorksetIteration();
tailConfig.setIsWorksetUpdate();
tailConfig.setIsSolutionSetUpdate();
tailConfig.setIsSolutionSetUpdateWithoutReprobe();
// inputs and driver
tailConfig.addInputToGroup(0);
tailConfig.setInputSerializer(serializer, 0);
// output
tailConfig.setOutputSerializer(serializer);
// the driver
tailConfig.setDriver(JoinWithSolutionSetSecondDriver.class);
tailConfig.setDriverStrategy(DriverStrategy.HYBRIDHASH_BUILD_SECOND);
tailConfig.setDriverComparator(comparator, 0);
tailConfig.setDriverPairComparator(pairComparator);
tailConfig.setStubWrapper(new UserCodeClassWrapper<UpdateComponentIdMatch>(UpdateComponentIdMatch.class));
}
// -- edges ------------------------------------------------------------------------------------------------
JobGraphUtils.connect(vertices, head, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
JobGraphUtils.connect(edges, head, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
JobGraphUtils.connect(vertices, head, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
JobGraphUtils.connect(head, intermediate, ChannelType.NETWORK, DistributionPattern.BIPARTITE);
intermediateConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, numSubTasks);
JobGraphUtils.connect(intermediate, tail, ChannelType.IN_MEMORY, DistributionPattern.POINTWISE);
tailConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, 1);
JobGraphUtils.connect(head, output, ChannelType.IN_MEMORY, DistributionPattern.POINTWISE);
JobGraphUtils.connect(head, sync, ChannelType.NETWORK, DistributionPattern.POINTWISE);
SlotSharingGroup sharingGroup = new SlotSharingGroup();
vertices.setSlotSharingGroup(sharingGroup);
edges.setSlotSharingGroup(sharingGroup);
head.setSlotSharingGroup(sharingGroup);
intermediate.setSlotSharingGroup(sharingGroup);
tail.setSlotSharingGroup(sharingGroup);
output.setSlotSharingGroup(sharingGroup);
sync.setSlotSharingGroup(sharingGroup);
intermediate.setStrictlyCoLocatedWith(head);
tail.setStrictlyCoLocatedWith(head);
return jobGraph;
}