本文整理汇总了Java中org.apache.flink.api.common.typeutils.TypePairComparatorFactory.createComparator12方法的典型用法代码示例。如果您正苦于以下问题:Java TypePairComparatorFactory.createComparator12方法的具体用法?Java TypePairComparatorFactory.createComparator12怎么用?Java TypePairComparatorFactory.createComparator12使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.flink.api.common.typeutils.TypePairComparatorFactory
的用法示例。
在下文中一共展示了TypePairComparatorFactory.createComparator12方法的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: prepare
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
public void prepare() throws Exception
{
final TaskConfig config = this.taskContext.getTaskConfig();
if (config.getDriverStrategy() != DriverStrategy.CO_GROUP) {
throw new Exception("Unrecognized driver strategy for CoGoup driver: " + config.getDriverStrategy().name());
}
final MutableObjectIterator<IT1> in1 = this.taskContext.getInput(0);
final MutableObjectIterator<IT2> in2 = this.taskContext.getInput(1);
// get the key positions and types
final TypeSerializer<IT1> serializer1 = this.taskContext.<IT1>getInputSerializer(0).getSerializer();
final TypeSerializer<IT2> serializer2 = this.taskContext.<IT2>getInputSerializer(1).getSerializer();
final TypeComparator<IT1> groupComparator1 = this.taskContext.getDriverComparator(0);
final TypeComparator<IT2> groupComparator2 = this.taskContext.getDriverComparator(1);
final TypePairComparatorFactory<IT1, IT2> pairComparatorFactory = config.getPairComparatorFactory(
this.taskContext.getUserCodeClassLoader());
if (pairComparatorFactory == null) {
throw new Exception("Missing pair comparator factory for CoGroup driver");
}
// create CoGropuTaskIterator according to provided local strategy.
this.coGroupIterator = new SortMergeCoGroupIterator<IT1, IT2>(in1, in2,
serializer1, groupComparator1, serializer2, groupComparator2,
pairComparatorFactory.createComparator12(groupComparator1, groupComparator2));
// open CoGroupTaskIterator - this triggers the sorting and blocks until the iterator is ready
this.coGroupIterator.open();
if (LOG.isDebugEnabled()) {
LOG.debug(this.taskContext.formatLogString("CoGroup task iterator ready."));
}
}
示例2: initialize
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public void initialize() throws Exception {
final TypeComparator<IT2> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativeTask) {
AbstractIterativeTask<?, ?> iterativeTaskContext = (AbstractIterativeTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT2>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
}
else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT2>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
}
else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
}
else {
throw new Exception("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeComparatorFactory<IT1> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideSerializer = taskContext.<IT1>getInputSerializer(0).getSerializer();
this.probeSideComparator = probeSideComparatorFactory.createComparator();
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
solutionSideRecord = solutionSetSerializer.createInstance();
};
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator12(this.probeSideComparator, solutionSetComparator);
}
示例3: initialize
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public void initialize() throws Exception {
final TypeSerializer<IT2> solutionSetSerializer;
final TypeComparator<IT2> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativeTask) {
AbstractIterativeTask<?, ?> iterativeTaskContext = (AbstractIterativeTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT2>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
}
else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT2>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
}
else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
}
else {
throw new Exception("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeSerializer<IT1> probeSideSerializer = taskContext.<IT1>getInputSerializer(0).getSerializer();
TypeComparatorFactory<IT1> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideComparator = probeSideComparatorFactory.createComparator();
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
solutionSideRecord = solutionSetSerializer.createInstance();
probeSideRecord = probeSideSerializer.createInstance();
}
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator12(this.probeSideComparator, solutionSetComparator);
}
示例4: getReusingOuterJoinIterator
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
protected JoinTaskIterator<IT1, IT2, OT> getReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction
) throws Exception {
switch (driverStrategy) {
case LEFT_OUTER_MERGE:
int numPages = memoryManager.computeNumberOfPages(driverMemFraction);
return new ReusingMergeOuterJoinIterator<>(
OuterJoinType.LEFT,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
numPages,
super.taskContext.getContainingTask()
);
case LEFT_HYBRIDHASH_BUILD_FIRST:
return new ReusingBuildFirstHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
false,
true,
false);
case LEFT_HYBRIDHASH_BUILD_SECOND:
return new ReusingBuildSecondHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
false,
false);
default:
throw new Exception("Unsupported driver strategy for left outer join driver: " + driverStrategy.name());
}
}
示例5: getNonReusingOuterJoinIterator
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
protected JoinTaskIterator<IT1, IT2, OT> getNonReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction
) throws Exception {
switch (driverStrategy) {
case LEFT_OUTER_MERGE:
int numPages = memoryManager.computeNumberOfPages(driverMemFraction);
return new NonReusingMergeOuterJoinIterator<>(
OuterJoinType.LEFT,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
numPages,
super.taskContext.getContainingTask()
);
case LEFT_HYBRIDHASH_BUILD_FIRST:
return new NonReusingBuildFirstHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
false,
true,
false);
case LEFT_HYBRIDHASH_BUILD_SECOND:
return new NonReusingBuildSecondHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
false,
false);
default:
throw new Exception("Unsupported driver strategy for left outer join driver: " + driverStrategy.name());
}
}
示例6: prepare
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
public void prepare() throws Exception
{
final TaskConfig config = this.taskContext.getTaskConfig();
if (config.getDriverStrategy() != DriverStrategy.CO_GROUP) {
throw new Exception("Unrecognized driver strategy for CoGoup driver: " + config.getDriverStrategy().name());
}
final Counter numRecordsIn = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
final MutableObjectIterator<IT1> in1 = new CountingMutableObjectIterator<>(this.taskContext.<IT1>getInput(0), numRecordsIn);
final MutableObjectIterator<IT2> in2 = new CountingMutableObjectIterator<>(this.taskContext.<IT2>getInput(1), numRecordsIn);
// get the key positions and types
final TypeSerializer<IT1> serializer1 = this.taskContext.<IT1>getInputSerializer(0).getSerializer();
final TypeSerializer<IT2> serializer2 = this.taskContext.<IT2>getInputSerializer(1).getSerializer();
final TypeComparator<IT1> groupComparator1 = this.taskContext.getDriverComparator(0);
final TypeComparator<IT2> groupComparator2 = this.taskContext.getDriverComparator(1);
final TypePairComparatorFactory<IT1, IT2> pairComparatorFactory = config.getPairComparatorFactory(
this.taskContext.getUserCodeClassLoader());
if (pairComparatorFactory == null) {
throw new Exception("Missing pair comparator factory for CoGroup driver");
}
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (LOG.isDebugEnabled()) {
LOG.debug("CoGroupDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
}
if (objectReuseEnabled) {
// create CoGroupTaskIterator according to provided local strategy.
this.coGroupIterator = new ReusingSortMergeCoGroupIterator<IT1, IT2>(
in1, in2,
serializer1, groupComparator1,
serializer2, groupComparator2,
pairComparatorFactory.createComparator12(groupComparator1, groupComparator2));
} else {
// create CoGroupTaskIterator according to provided local strategy.
this.coGroupIterator = new NonReusingSortMergeCoGroupIterator<IT1, IT2>(
in1, in2,
serializer1, groupComparator1,
serializer2, groupComparator2,
pairComparatorFactory.createComparator12(groupComparator1, groupComparator2));
}
// open CoGroupTaskIterator - this triggers the sorting and blocks until the iterator is ready
this.coGroupIterator.open();
if (LOG.isDebugEnabled()) {
LOG.debug(this.taskContext.formatLogString("CoGroup task iterator ready."));
}
}
示例7: getReusingOuterJoinIterator
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
protected JoinTaskIterator<IT1, IT2, OT> getReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction
) throws Exception {
switch (driverStrategy) {
case FULL_OUTER_MERGE:
int numPages = memoryManager.computeNumberOfPages(driverMemFraction);
return new ReusingMergeOuterJoinIterator<>(
OuterJoinType.FULL,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
numPages,
super.taskContext.getContainingTask()
);
case FULL_OUTER_HYBRIDHASH_BUILD_FIRST:
return new ReusingBuildFirstHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
true,
false);
case FULL_OUTER_HYBRIDHASH_BUILD_SECOND:
return new ReusingBuildSecondHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
true,
false);
default:
throw new Exception("Unsupported driver strategy for full outer join driver: " + driverStrategy.name());
}
}
示例8: getNonReusingOuterJoinIterator
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
protected JoinTaskIterator<IT1, IT2, OT> getNonReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction
) throws Exception {
switch (driverStrategy) {
case FULL_OUTER_MERGE:
int numPages = memoryManager.computeNumberOfPages(driverMemFraction);
return new NonReusingMergeOuterJoinIterator<>(
OuterJoinType.FULL,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
numPages,
super.taskContext.getContainingTask()
);
case FULL_OUTER_HYBRIDHASH_BUILD_FIRST:
return new NonReusingBuildFirstHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
true,
false);
case FULL_OUTER_HYBRIDHASH_BUILD_SECOND:
return new NonReusingBuildSecondHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
true,
false);
default:
throw new Exception("Unsupported driver strategy for full outer join driver: " + driverStrategy.name());
}
}
示例9: getReusingOuterJoinIterator
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
protected JoinTaskIterator<IT1, IT2, OT> getReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction
) throws Exception {
switch (driverStrategy) {
case RIGHT_OUTER_MERGE:
int numPages = memoryManager.computeNumberOfPages(driverMemFraction);
return new ReusingMergeOuterJoinIterator<>(
OuterJoinType.RIGHT,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
numPages,
super.taskContext.getContainingTask()
);
case RIGHT_HYBRIDHASH_BUILD_FIRST:
return new ReusingBuildFirstHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
false,
false);
case RIGHT_HYBRIDHASH_BUILD_SECOND:
return new ReusingBuildSecondHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
false,
true,
false);
default:
throw new Exception("Unsupported driver strategy for right outer join driver: " + driverStrategy.name());
}
}
示例10: getNonReusingOuterJoinIterator
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
protected JoinTaskIterator<IT1, IT2, OT> getNonReusingOuterJoinIterator(
DriverStrategy driverStrategy,
MutableObjectIterator<IT1> in1,
MutableObjectIterator<IT2> in2,
TypeSerializer<IT1> serializer1,
TypeComparator<IT1> comparator1,
TypeSerializer<IT2> serializer2,
TypeComparator<IT2> comparator2,
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory,
MemoryManager memoryManager,
IOManager ioManager,
double driverMemFraction
) throws Exception {
switch (driverStrategy) {
case RIGHT_OUTER_MERGE:
int numPages = memoryManager.computeNumberOfPages(driverMemFraction);
return new NonReusingMergeOuterJoinIterator<>(
OuterJoinType.RIGHT,
in1,
in2,
serializer1,
comparator1,
serializer2,
comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager,
ioManager,
numPages,
super.taskContext.getContainingTask()
);
case RIGHT_HYBRIDHASH_BUILD_FIRST:
return new NonReusingBuildFirstHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
true,
false,
false);
case RIGHT_HYBRIDHASH_BUILD_SECOND:
return new NonReusingBuildSecondHashJoinIterator<>(in1, in2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager, ioManager,
this.taskContext.getContainingTask(),
driverMemFraction,
false,
true,
false);
default:
throw new Exception("Unsupported driver strategy for right outer join driver: " + driverStrategy.name());
}
}
示例11: initialize
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public void initialize() throws Exception {
final TypeSerializer<IT2> solutionSetSerializer;
final TypeComparator<IT2> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativePactTask) {
AbstractIterativePactTask<?, ?> iterativeTaskContext = (AbstractIterativePactTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT2>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
}
else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT2>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
}
else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
}
else {
throw new Exception("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeComparatorFactory<IT1> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideSerializer = taskContext.<IT1>getInputSerializer(0).getSerializer();
this.probeSideComparator = probeSideComparatorFactory.createComparator();
solutionSideRecord = solutionSetSerializer.createInstance();
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator12(this.probeSideComparator, solutionSetComparator);
}
示例12: initialize
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public void initialize() throws Exception {
final TypeSerializer<IT2> solutionSetSerializer;
final TypeComparator<IT2> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativePactTask) {
AbstractIterativePactTask<?, ?> iterativeTaskContext = (AbstractIterativePactTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT2>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
}
else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT2>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
}
else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
}
else {
throw new Exception("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeSerializer<IT1> probeSideSerializer = taskContext.<IT1>getInputSerializer(0).getSerializer();
TypeComparatorFactory<IT1> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideComparator = probeSideComparatorFactory.createComparator();
solutionSideRecord = solutionSetSerializer.createInstance();
probeSideRecord = probeSideSerializer.createInstance();
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator12(this.probeSideComparator, solutionSetComparator);
}
示例13: initialize
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
public void initialize() throws Exception {
TaskConfig config = this.taskContext.getTaskConfig();
TypeSerializer<IT1> serializer1 = this.taskContext.<IT1>getInputSerializer(0).getSerializer();
TypeSerializer<IT2> serializer2 = this.taskContext.<IT2>getInputSerializer(1).getSerializer();
TypeComparator<IT1> comparator1 = this.taskContext.getDriverComparator(0);
TypeComparator<IT2> comparator2 = this.taskContext.getDriverComparator(1);
MutableObjectIterator<IT1> input1 = this.taskContext.getInput(0);
MutableObjectIterator<IT2> input2 = this.taskContext.getInput(1);
TypePairComparatorFactory<IT1, IT2> pairComparatorFactory =
this.taskContext.getTaskConfig().getPairComparatorFactory(this.taskContext.getUserCodeClassLoader());
double availableMemory = config.getRelativeMemoryDriver();
if (buildSideIndex == 0 && probeSideIndex == 1) {
matchIterator =
new BuildFirstReOpenableHashMatchIterator<IT1, IT2, OT>(input1, input2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator21(comparator1, comparator2),
this.taskContext.getMemoryManager(),
this.taskContext.getIOManager(),
this.taskContext.getOwningNepheleTask(),
availableMemory
);
} else if (buildSideIndex == 1 && probeSideIndex == 0) {
matchIterator =
new BuildSecondReOpenableHashMatchIterator<IT1, IT2, OT>(input1, input2,
serializer1, comparator1,
serializer2, comparator2,
pairComparatorFactory.createComparator12(comparator1, comparator2),
this.taskContext.getMemoryManager(),
this.taskContext.getIOManager(),
this.taskContext.getOwningNepheleTask(),
availableMemory
);
} else {
throw new Exception("Error: Inconcistent setup for repeatable hash join driver.");
}
this.matchIterator.open();
}
示例14: prepare
import org.apache.flink.api.common.typeutils.TypePairComparatorFactory; //导入方法依赖的package包/类
@Override
public void prepare() throws Exception{
final TaskConfig config = this.taskContext.getTaskConfig();
// obtain task manager's memory manager and I/O manager
final MemoryManager memoryManager = this.taskContext.getMemoryManager();
final IOManager ioManager = this.taskContext.getIOManager();
// set up memory and I/O parameters
final double fractionAvailableMemory = config.getRelativeMemoryDriver();
final int numPages = memoryManager.computeNumberOfPages(fractionAvailableMemory);
// test minimum memory requirements
final DriverStrategy ls = config.getDriverStrategy();
final MutableObjectIterator<IT1> in1 = this.taskContext.getInput(0);
final MutableObjectIterator<IT2> in2 = this.taskContext.getInput(1);
// get the key positions and types
final TypeSerializer<IT1> serializer1 = this.taskContext.<IT1>getInputSerializer(0).getSerializer();
final TypeSerializer<IT2> serializer2 = this.taskContext.<IT2>getInputSerializer(1).getSerializer();
final TypeComparator<IT1> comparator1 = this.taskContext.getDriverComparator(0);
final TypeComparator<IT2> comparator2 = this.taskContext.getDriverComparator(1);
final TypePairComparatorFactory<IT1, IT2> pairComparatorFactory = config.getPairComparatorFactory(
this.taskContext.getUserCodeClassLoader());
if (pairComparatorFactory == null) {
throw new Exception("Missing pair comparator factory for Match driver");
}
// create and return MatchTaskIterator according to provided local strategy.
switch (ls) {
case MERGE:
this.matchIterator = new MergeMatchIterator<IT1, IT2, OT>(in1, in2, serializer1, comparator1,
serializer2, comparator2, pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager, ioManager, numPages, this.taskContext.getOwningNepheleTask());
break;
case HYBRIDHASH_BUILD_FIRST:
this.matchIterator = new BuildFirstHashMatchIterator<IT1, IT2, OT>(in1, in2, serializer1, comparator1,
serializer2, comparator2, pairComparatorFactory.createComparator21(comparator1, comparator2),
memoryManager, ioManager, this.taskContext.getOwningNepheleTask(), fractionAvailableMemory);
break;
case HYBRIDHASH_BUILD_SECOND:
this.matchIterator = new BuildSecondHashMatchIterator<IT1, IT2, OT>(in1, in2, serializer1, comparator1,
serializer2, comparator2, pairComparatorFactory.createComparator12(comparator1, comparator2),
memoryManager, ioManager, this.taskContext.getOwningNepheleTask(), fractionAvailableMemory);
break;
default:
throw new Exception("Unsupported driver strategy for Match driver: " + ls.name());
}
// open MatchTaskIterator - this triggers the sorting or hash-table building
// and blocks until the iterator is ready
this.matchIterator.open();
if (LOG.isDebugEnabled()) {
LOG.debug(this.taskContext.formatLogString("Match task iterator ready."));
}
}