本文整理汇总了Java中org.apache.flink.api.common.operators.base.GroupReduceOperatorBase类的典型用法代码示例。如果您正苦于以下问题:Java GroupReduceOperatorBase类的具体用法?Java GroupReduceOperatorBase怎么用?Java GroupReduceOperatorBase使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
GroupReduceOperatorBase类属于org.apache.flink.api.common.operators.base包,在下文中一共展示了GroupReduceOperatorBase类的14个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getSingleInputNodeSchema
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
@Override
protected void getSingleInputNodeSchema(SingleInputPlanNode node, SparseKeySchema schema)
throws CompilerPostPassException, ConflictingFieldTypeInfoException
{
// check that we got the right types
SingleInputOperator<?, ?, ?> contract = (SingleInputOperator<?, ?, ?>) node.getSingleInputNode().getPactContract();
if (! (contract instanceof RecordOperator)) {
throw new CompilerPostPassException("Error: Operator is not a Record based contract. Wrong compiler invokation.");
}
RecordOperator recContract = (RecordOperator) contract;
// add the information to the schema
int[] localPositions = contract.getKeyColumns(0);
Class<? extends Key<?>>[] types = recContract.getKeyClasses();
for (int i = 0; i < localPositions.length; i++) {
schema.addType(localPositions[i], types[i]);
}
// this is a temporary fix, we should solve this more generic
if (contract instanceof GroupReduceOperatorBase) {
Ordering groupOrder = ((GroupReduceOperatorBase<?, ?, ?>) contract).getGroupOrder();
if (groupOrder != null) {
addOrderingToSchema(groupOrder, schema);
}
}
}
示例2: testCombinable
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
@Test
public void testCombinable() {
try {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<String> input = env.fromElements("1", "2", "1", "3");
DistinctOperator<String> op = input.distinct(new KeySelector<String, String>() {
public String getKey(String value) { return value; }
});
op.print();
Plan p = env.createProgramPlan();
GroupReduceOperatorBase<?, ?, ?> reduceOp = (GroupReduceOperatorBase<?, ?, ?>) p.getDataSinks().iterator().next().getInput();
Assert.assertTrue(reduceOp.isCombinable());
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
示例3: translateAggregate
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
@Test
public void translateAggregate() {
try {
final int parallelism = 8;
ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(parallelism);
@SuppressWarnings("unchecked")
DataSet<Tuple3<Double, StringValue, Long>> initialData =
env.fromElements(new Tuple3<Double, StringValue, Long>(3.141592, new StringValue("foobar"), Long.valueOf(77)));
initialData.groupBy(0).aggregate(Aggregations.MIN, 1).and(Aggregations.SUM, 2).output(new DiscardingOutputFormat<Tuple3<Double, StringValue, Long>>());
Plan p = env.createProgramPlan();
GenericDataSinkBase<?> sink = p.getDataSinks().iterator().next();
GroupReduceOperatorBase<?, ?, ?> reducer = (GroupReduceOperatorBase<?, ?, ?>) sink.getInput();
// check keys
assertEquals(1, reducer.getKeyColumns(0).length);
assertEquals(0, reducer.getKeyColumns(0)[0]);
assertEquals(-1, reducer.getParallelism());
assertTrue(reducer.isCombinable());
assertTrue(reducer.getInput() instanceof GenericDataSourceBase<?, ?>);
}
catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
fail("Test caused an error: " + e.getMessage());
}
}
示例4: instantiate
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
// create in input node for combine with the same parallelism as input node
GroupReduceNode combinerNode = new GroupReduceNode((GroupReduceOperatorBase<?, ?, ?>) node.getOperator());
combinerNode.setParallelism(in.getSource().getParallelism());
SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine ("+node.getOperator().getName()+")", in,
DriverStrategy.SORTED_GROUP_COMBINE);
// sorting key info
combiner.setDriverKeyInfo(in.getLocalStrategyKeys(), in.getLocalStrategySortOrder(), 0);
// set grouping comparator key info
combiner.setDriverKeyInfo(this.keyList, 1);
return combiner;
}
示例5: GroupReduceNode
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
/**
* Creates a new optimizer node for the given operator.
*
* @param operator The reduce operation.
*/
public GroupReduceNode(GroupReduceOperatorBase<?, ?, ?> operator) {
super(operator);
this.operatorName = "GroupReduce";
if (this.keys == null) {
// case of a key-less reducer. force a parallelism of 1
setParallelism(1);
}
this.possibleProperties = initPossibleProperties(operator.getCustomPartitioner());
}
示例6: initPossibleProperties
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
private List<OperatorDescriptorSingle> initPossibleProperties(Partitioner<?> customPartitioner) {
// see if an internal hint dictates the strategy to use
final Configuration conf = getOperator().getParameters();
final String localStrategy = conf.getString(Optimizer.HINT_LOCAL_STRATEGY, null);
final boolean useCombiner;
if (localStrategy != null) {
if (Optimizer.HINT_LOCAL_STRATEGY_SORT.equals(localStrategy)) {
useCombiner = false;
}
else if (Optimizer.HINT_LOCAL_STRATEGY_COMBINING_SORT.equals(localStrategy)) {
if (!isCombineable()) {
Optimizer.LOG.warn("Strategy hint for GroupReduce '" + getOperator().getName() +
"' requires combinable reduce, but user function is not marked combinable.");
}
useCombiner = true;
} else {
throw new CompilerException("Invalid local strategy hint for match contract: " + localStrategy);
}
} else {
useCombiner = isCombineable();
}
// check if we can work with a grouping (simple reducer), or if we need ordering because of a group order
Ordering groupOrder = null;
if (getOperator() instanceof GroupReduceOperatorBase) {
groupOrder = getOperator().getGroupOrder();
if (groupOrder != null && groupOrder.getNumberOfFields() == 0) {
groupOrder = null;
}
}
OperatorDescriptorSingle props = useCombiner ?
(this.keys == null ? new AllGroupWithPartialPreGroupProperties() : new GroupReduceWithCombineProperties(this.keys, groupOrder, customPartitioner)) :
(this.keys == null ? new AllGroupReduceProperties() : new GroupReduceProperties(this.keys, groupOrder, customPartitioner));
return Collections.singletonList(props);
}
示例7: testGetSemanticProperties
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
@Test
public void testGetSemanticProperties() {
SingleInputSemanticProperties origProps = new SingleInputSemanticProperties();
origProps.addForwardedField(0, 1);
origProps.addForwardedField(2, 2);
origProps.addForwardedField(3, 4);
origProps.addForwardedField(6, 0);
origProps.addReadFields(new FieldSet(0, 2, 4, 7));
GroupReduceOperatorBase<?,?,?> op = mock(GroupReduceOperatorBase.class);
when(op.getSemanticProperties()).thenReturn(origProps);
when(op.getKeyColumns(0)).thenReturn(new int[]{3,2});
when(op.getParameters()).thenReturn(new Configuration());
GroupReduceNode node = new GroupReduceNode(op);
SemanticProperties filteredProps = node.getSemanticPropertiesForLocalPropertyFiltering();
assertTrue(filteredProps.getForwardingTargetFields(0, 0).size() == 0);
assertTrue(filteredProps.getForwardingTargetFields(0, 2).size() == 1);
assertTrue(filteredProps.getForwardingTargetFields(0, 2).contains(2));
assertTrue(filteredProps.getForwardingTargetFields(0, 3).size() == 1);
assertTrue(filteredProps.getForwardingTargetFields(0, 3).contains(4));
assertTrue(filteredProps.getForwardingTargetFields(0, 6).size() == 0);
assertTrue(filteredProps.getForwardingSourceField(0, 1) < 0);
assertTrue(filteredProps.getForwardingSourceField(0, 2) == 2);
assertTrue(filteredProps.getForwardingSourceField(0, 4) == 3);
assertTrue(filteredProps.getForwardingSourceField(0, 0) < 0);
assertTrue(filteredProps.getReadFields(0).size() == 4);
assertTrue(filteredProps.getReadFields(0).contains(0));
assertTrue(filteredProps.getReadFields(0).contains(2));
assertTrue(filteredProps.getReadFields(0).contains(4));
assertTrue(filteredProps.getReadFields(0).contains(7));
}
示例8: instantiate
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
@Override
public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) {
// create in input node for combine with same DOP as input node
GroupReduceNode combinerNode = new GroupReduceNode((GroupReduceOperatorBase<?, ?, ?>) node.getPactContract());
combinerNode.setDegreeOfParallelism(in.getSource().getDegreeOfParallelism());
SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine("+node.getPactContract().getName()+")", in,
DriverStrategy.SORTED_GROUP_COMBINE);
// sorting key info
combiner.setDriverKeyInfo(in.getLocalStrategyKeys(), in.getLocalStrategySortOrder(), 0);
// set grouping comparator key info
combiner.setDriverKeyInfo(this.keyList, 1);
return combiner;
}
示例9: GroupReduceNode
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
/**
* Creates a new ReduceNode for the given contract.
*
* @param pactContract The reduce contract object.
*/
public GroupReduceNode(GroupReduceOperatorBase<?, ?, ?> pactContract) {
super(pactContract);
if (this.keys == null) {
// case of a key-less reducer. force a parallelism of 1
setDegreeOfParallelism(1);
}
}
示例10: getPossibleProperties
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
@Override
protected List<OperatorDescriptorSingle> getPossibleProperties() {
// see if an internal hint dictates the strategy to use
final Configuration conf = getPactContract().getParameters();
final String localStrategy = conf.getString(PactCompiler.HINT_LOCAL_STRATEGY, null);
final boolean useCombiner;
if (localStrategy != null) {
if (PactCompiler.HINT_LOCAL_STRATEGY_SORT.equals(localStrategy)) {
useCombiner = false;
} else if (PactCompiler.HINT_LOCAL_STRATEGY_COMBINING_SORT.equals(localStrategy)) {
if (!isCombineable()) {
PactCompiler.LOG.warn("Strategy hint for Reduce Pact '" + getPactContract().getName() +
"' desires combinable reduce, but user function is not marked combinable.");
}
useCombiner = true;
} else {
throw new CompilerException("Invalid local strategy hint for match contract: " + localStrategy);
}
} else {
useCombiner = isCombineable();
}
// check if we can work with a grouping (simple reducer), or if we need ordering because of a group order
Ordering groupOrder = null;
if (getPactContract() instanceof GroupReduceOperatorBase) {
groupOrder = ((GroupReduceOperatorBase<?, ?, ?>) getPactContract()).getGroupOrder();
if (groupOrder != null && groupOrder.getNumberOfFields() == 0) {
groupOrder = null;
}
}
OperatorDescriptorSingle props = useCombiner ?
(this.keys == null ? new AllGroupWithPartialPreGroupProperties() : new GroupReduceWithCombineProperties(this.keys, groupOrder)) :
(this.keys == null ? new AllGroupReduceProperties() : new GroupReduceProperties(this.keys, groupOrder));
return Collections.singletonList(props);
}
示例11: translateAggregate
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
@Test
public void translateAggregate() {
try {
final int DOP = 8;
ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment(DOP);
@SuppressWarnings("unchecked")
DataSet<Tuple3<Double, StringValue, Long>> initialData =
env.fromElements(new Tuple3<Double, StringValue, Long>(3.141592, new StringValue("foobar"), new Long(77)));
initialData.groupBy(0).aggregate(Aggregations.MIN, 1).and(Aggregations.SUM, 2).print();
Plan p = env.createProgramPlan();
GenericDataSinkBase<?> sink = p.getDataSinks().iterator().next();
GroupReduceOperatorBase<?, ?, ?> reducer = (GroupReduceOperatorBase<?, ?, ?>) sink.getInput();
// check keys
assertEquals(1, reducer.getKeyColumns(0).length);
assertEquals(0, reducer.getKeyColumns(0)[0]);
assertEquals(-1, reducer.getDegreeOfParallelism());
assertTrue(reducer.isCombinable());
assertTrue(reducer.getInput() instanceof GenericDataSourceBase<?, ?>);
}
catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
fail("Test caused an error: " + e.getMessage());
}
}
示例12: getContractClassShouldReturnReduceForReduceStub
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
/**
* Test {@link OperatorUtil#getContractClass(Class)}
*/
@Test
public void getContractClassShouldReturnReduceForReduceStub() {
final Class<?> result = OperatorUtil.getContractClass(Reducer.class);
assertEquals(GroupReduceOperatorBase.class, result);
}
示例13: getOperator
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
/**
* Gets the operator represented by this optimizer node.
*
* @return The operator represented by this optimizer node.
*/
@Override
public GroupReduceOperatorBase<?, ?, ?> getOperator() {
return (GroupReduceOperatorBase<?, ?, ?>) super.getOperator();
}
示例14: getPactContract
import org.apache.flink.api.common.operators.base.GroupReduceOperatorBase; //导入依赖的package包/类
/**
* Gets the contract object for this reduce node.
*
* @return The contract.
*/
@Override
public GroupReduceOperatorBase<?, ?, ?> getPactContract() {
return (GroupReduceOperatorBase<?, ?, ?>) super.getPactContract();
}