本文整理汇总了Java中org.apache.flink.runtime.operators.hash.CompactingHashTable类的典型用法代码示例。如果您正苦于以下问题:Java CompactingHashTable类的具体用法?Java CompactingHashTable怎么用?Java CompactingHashTable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
CompactingHashTable类属于org.apache.flink.runtime.operators.hash包,在下文中一共展示了CompactingHashTable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createSolutionSetUpdateOutputCollector
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
/**
* Creates a new solution set update output collector.
*
* <p>This collector is used by {@link IterationIntermediateTask} or {@link IterationTailTask} to update the
* solution set of workset iterations. Depending on the task configuration, either a fast (non-probing)
* {@link org.apache.flink.runtime.iterative.io.SolutionSetFastUpdateOutputCollector} or normal (re-probing)
* {@link SolutionSetUpdateOutputCollector} is created.
*
* <p>If a non-null delegate is given, the new {@link Collector} will write back to the solution set and also call
* collect(T) of the delegate.
*
* @param delegate null -OR- a delegate collector to be called by the newly created collector
* @return a new {@link org.apache.flink.runtime.iterative.io.SolutionSetFastUpdateOutputCollector} or
* {@link SolutionSetUpdateOutputCollector}
*/
protected Collector<OT> createSolutionSetUpdateOutputCollector(Collector<OT> delegate) {
Broker<Object> solutionSetBroker = SolutionSetBroker.instance();
Object ss = solutionSetBroker.get(brokerKey());
if (ss instanceof CompactingHashTable) {
@SuppressWarnings("unchecked")
CompactingHashTable<OT> solutionSet = (CompactingHashTable<OT>) ss;
return new SolutionSetUpdateOutputCollector<OT>(solutionSet, delegate);
}
else if (ss instanceof JoinHashMap) {
@SuppressWarnings("unchecked")
JoinHashMap<OT> map = (JoinHashMap<OT>) ss;
return new SolutionSetObjectsUpdateOutputCollector<OT>(map, delegate);
} else {
throw new RuntimeException("Unrecognized solution set handle: " + ss);
}
}
示例2: createSolutionSetUpdateOutputCollector
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
/**
* Creates a new solution set update output collector.
* <p/>
* This collector is used by {@link IterationIntermediatePactTask} or {@link IterationTailPactTask} to update the
* solution set of workset iterations. Depending on the task configuration, either a fast (non-probing)
* {@link org.apache.flink.runtime.iterative.io.SolutionSetFastUpdateOutputCollector} or normal (re-probing)
* {@link SolutionSetUpdateOutputCollector} is created.
* <p/>
* If a non-null delegate is given, the new {@link Collector} will write back to the solution set and also call
* collect(T) of the delegate.
*
* @param delegate null -OR- a delegate collector to be called by the newly created collector
* @return a new {@link org.apache.flink.runtime.iterative.io.SolutionSetFastUpdateOutputCollector} or
* {@link SolutionSetUpdateOutputCollector}
*/
protected Collector<OT> createSolutionSetUpdateOutputCollector(Collector<OT> delegate) {
Broker<Object> solutionSetBroker = SolutionSetBroker.instance();
Object ss = solutionSetBroker.get(brokerKey());
if (ss instanceof CompactingHashTable) {
@SuppressWarnings("unchecked")
CompactingHashTable<OT> solutionSet = (CompactingHashTable<OT>) ss;
TypeSerializer<OT> serializer = getOutputSerializer();
return new SolutionSetUpdateOutputCollector<OT>(solutionSet, serializer, delegate);
}
else if (ss instanceof JoinHashMap) {
@SuppressWarnings("unchecked")
JoinHashMap<OT> map = (JoinHashMap<OT>) ss;
return new SolutionSetObjectsUpdateOutputCollector<OT>(map, delegate);
} else {
throw new RuntimeException("Unrecognized solution set handle: " + ss);
}
}
示例3: streamSolutionSetToFinalOutput
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
private void streamSolutionSetToFinalOutput(CompactingHashTable<X> hashTable) throws IOException {
final MutableObjectIterator<X> results = hashTable.getEntryIterator();
final Collector<X> output = this.finalOutputCollector;
X record = solutionTypeSerializer.getSerializer().createInstance();
while ((record = results.next(record)) != null) {
output.collect(record);
}
}
示例4: testDifferentProbers
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Test
public void testDifferentProbers() {
final int NUM_MEM_PAGES = 32 * NUM_PAIRS / PAGE_SIZE;
AbstractMutableHashTable<IntPair> table = new CompactingHashTable<IntPair>(serializer, comparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE));
AbstractHashTableProber<IntPair, IntPair> prober1 = table.getProber(comparator, pairComparator);
AbstractHashTableProber<IntPair, IntPair> prober2 = table.getProber(comparator, pairComparator);
assertFalse(prober1 == prober2);
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
}
示例5: testBuildAndRetrieve
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Test
public void testBuildAndRetrieve() {
try {
final int NUM_MEM_PAGES = 32 * NUM_PAIRS / PAGE_SIZE;
final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);
AbstractMutableHashTable<IntPair> table = new CompactingHashTable<IntPair>(serializer, comparator, getMemory(NUM_MEM_PAGES, PAGE_SIZE));
table.open();
for (int i = 0; i < NUM_PAIRS; i++) {
table.insert(pairs[i]);
}
AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(comparator, pairComparator);
IntPair target = new IntPair();
for (int i = 0; i < NUM_PAIRS; i++) {
assertNotNull(prober.getMatchFor(pairs[i], target));
assertEquals(pairs[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
} catch (Exception e) {
e.printStackTrace();
fail("Error: " + e.getMessage());
}
}
示例6: testEntryIterator
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Test
public void testEntryIterator() {
try {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
AbstractMutableHashTable<IntList> table = new CompactingHashTable<IntList>(serializerV, comparatorV, getMemory(NUM_MEM_PAGES, PAGE_SIZE));
table.open();
int result = 0;
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
result += lists[i].getKey();
}
MutableObjectIterator<IntList> iter = table.getEntryIterator();
IntList target = new IntList();
int sum = 0;
while((target = iter.next(target)) != null) {
sum += target.getKey();
}
table.close();
assertTrue(sum == result);
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
} catch (Exception e) {
e.printStackTrace();
fail("Error: " + e.getMessage());
}
}
示例7: testMultipleProbers
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Test
public void testMultipleProbers() {
try {
final int NUM_MEM_PAGES = SIZE * NUM_LISTS / PAGE_SIZE;
final IntList[] lists = getRandomizedIntLists(NUM_LISTS, rnd);
final IntPair[] pairs = getRandomizedIntPairs(NUM_LISTS, rnd);
AbstractMutableHashTable<IntList> table = new CompactingHashTable<IntList>(serializerV, comparatorV, getMemory(NUM_MEM_PAGES, PAGE_SIZE));
table.open();
for (int i = 0; i < NUM_LISTS; i++) {
table.insert(lists[i]);
}
AbstractHashTableProber<IntList, IntList> listProber = table.getProber(comparatorV, pairComparatorV);
AbstractHashTableProber<IntPair, IntList> pairProber = table.getProber(comparator, pairComparatorPL);
IntList target = new IntList();
for (int i = 0; i < NUM_LISTS; i++) {
assertNotNull(pairProber.getMatchFor(pairs[i], target));
assertNotNull(listProber.getMatchFor(lists[i], target));
assertArrayEquals(lists[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES, table.getFreeMemory().size());
} catch (Exception e) {
e.printStackTrace();
fail("Error: " + e.getMessage());
}
}
示例8: testResize
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Test
public void testResize() {
try {
final int NUM_MEM_PAGES = 30 * NUM_PAIRS / PAGE_SIZE;
final IntPair[] pairs = getRandomizedIntPairs(NUM_PAIRS, rnd);
List<MemorySegment> memory = getMemory(NUM_MEM_PAGES, PAGE_SIZE);
CompactingHashTable<IntPair> table = new CompactingHashTable<IntPair>(serializer, comparator, memory);
table.open();
for (int i = 0; i < NUM_PAIRS; i++) {
table.insert(pairs[i]);
}
AbstractHashTableProber<IntPair, IntPair> prober = table.getProber(comparator, pairComparator);
IntPair target = new IntPair();
for (int i = 0; i < NUM_PAIRS; i++) {
assertNotNull(prober.getMatchFor(pairs[i], target));
assertEquals(pairs[i].getValue(), target.getValue());
}
// make sure there is enough memory for resize
memory.addAll(getMemory(ADDITIONAL_MEM, PAGE_SIZE));
Boolean b = Whitebox.<Boolean>invokeMethod(table, "resizeHashTable");
assertTrue(b.booleanValue());
for (int i = 0; i < NUM_PAIRS; i++) {
assertNotNull(pairs[i].getKey() + " " + pairs[i].getValue(), prober.getMatchFor(pairs[i], target));
assertEquals(pairs[i].getValue(), target.getValue());
}
table.close();
assertEquals("Memory lost", NUM_MEM_PAGES + ADDITIONAL_MEM, table.getFreeMemory().size());
} catch (Exception e) {
e.printStackTrace();
fail("Error: " + e.getMessage());
}
}
示例9: readInitialSolutionSet
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
private void readInitialSolutionSet(CompactingHashTable<X> solutionSet, MutableObjectIterator<X> solutionSetInput) throws IOException {
solutionSet.open();
solutionSet.buildTableWithUniqueKey(solutionSetInput);
}
示例10: SolutionSetFastUpdateOutputCollector
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
public SolutionSetFastUpdateOutputCollector(CompactingHashTable<T> solutionSet) {
this(solutionSet, null);
}
示例11: SolutionSetUpdateOutputCollector
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
public SolutionSetUpdateOutputCollector(CompactingHashTable<T> solutionSet) {
this(solutionSet, null);
}
示例12: initialize
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public void initialize() throws Exception {
final TypeComparator<IT2> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativeTask) {
AbstractIterativeTask<?, ?> iterativeTaskContext = (AbstractIterativeTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT2>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
}
else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT2>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
}
else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
}
else {
throw new Exception("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeComparatorFactory<IT1> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideSerializer = taskContext.<IT1>getInputSerializer(0).getSerializer();
this.probeSideComparator = probeSideComparatorFactory.createComparator();
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
solutionSideRecord = solutionSetSerializer.createInstance();
};
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator12(this.probeSideComparator, solutionSetComparator);
}
示例13: initialize
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public void initialize() throws Exception {
final TypeSerializer<IT2> solutionSetSerializer;
final TypeComparator<IT2> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativeTask) {
AbstractIterativeTask<?, ?> iterativeTaskContext = (AbstractIterativeTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT2>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
}
else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT2>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
}
else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
}
else {
throw new Exception("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeSerializer<IT1> probeSideSerializer = taskContext.<IT1>getInputSerializer(0).getSerializer();
TypeComparatorFactory<IT1> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideComparator = probeSideComparatorFactory.createComparator();
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
solutionSideRecord = solutionSetSerializer.createInstance();
probeSideRecord = probeSideSerializer.createInstance();
}
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator12(this.probeSideComparator, solutionSetComparator);
}
示例14: initialize
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public void initialize() {
final TypeComparator<IT1> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativeTask) {
AbstractIterativeTask<?, ?> iterativeTaskContext = (AbstractIterativeTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT1>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
}
else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT1>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
}
else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
} else {
throw new RuntimeException("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeComparatorFactory<IT2> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideSerializer = taskContext.<IT2>getInputSerializer(0).getSerializer();
this.probeSideComparator = probeSideComparatorFactory.createComparator();
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
solutionSideRecord = solutionSetSerializer.createInstance();
}
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator21(solutionSetComparator, this.probeSideComparator);
}
示例15: initialize
import org.apache.flink.runtime.operators.hash.CompactingHashTable; //导入依赖的package包/类
@Override
@SuppressWarnings("unchecked")
public void initialize() {
final TypeSerializer<IT1> solutionSetSerializer;
final TypeComparator<IT1> solutionSetComparator;
// grab a handle to the hash table from the iteration broker
if (taskContext instanceof AbstractIterativeTask) {
AbstractIterativeTask<?, ?> iterativeTaskContext = (AbstractIterativeTask<?, ?>) taskContext;
String identifier = iterativeTaskContext.brokerKey();
Object table = SolutionSetBroker.instance().get(identifier);
if (table instanceof CompactingHashTable) {
this.hashTable = (CompactingHashTable<IT1>) table;
solutionSetSerializer = this.hashTable.getBuildSideSerializer();
solutionSetComparator = this.hashTable.getBuildSideComparator().duplicate();
}
else if (table instanceof JoinHashMap) {
this.objectMap = (JoinHashMap<IT1>) table;
solutionSetSerializer = this.objectMap.getBuildSerializer();
solutionSetComparator = this.objectMap.getBuildComparator().duplicate();
}
else {
throw new RuntimeException("Unrecognized solution set index: " + table);
}
} else {
throw new RuntimeException("The task context of this driver is no iterative task context.");
}
TaskConfig config = taskContext.getTaskConfig();
ClassLoader classLoader = taskContext.getUserCodeClassLoader();
TypeSerializer<IT2> probeSideSerializer = taskContext.<IT2>getInputSerializer(0).getSerializer();
TypeComparatorFactory<IT2> probeSideComparatorFactory = config.getDriverComparator(0, classLoader);
this.probeSideComparator = probeSideComparatorFactory.createComparator();
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (objectReuseEnabled) {
solutionSideRecord = solutionSetSerializer.createInstance();
probeSideRecord = probeSideSerializer.createInstance();
}
TypePairComparatorFactory<IT1, IT2> factory = taskContext.getTaskConfig().getPairComparatorFactory(taskContext.getUserCodeClassLoader());
pairComparator = factory.createComparator21(solutionSetComparator, this.probeSideComparator);
}