当前位置: 首页>>代码示例>>Java>>正文


Java Partitioner类代码示例

本文整理汇总了Java中org.apache.flink.api.common.functions.Partitioner的典型用法代码示例。如果您正苦于以下问题:Java Partitioner类的具体用法?Java Partitioner怎么用?Java Partitioner使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


Partitioner类属于org.apache.flink.api.common.functions包,在下文中一共展示了Partitioner类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: PartitionOperator

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
private <P> PartitionOperator(DataSet<T> input, PartitionMethod pMethod, Keys<T> pKeys, Partitioner<P> customPartitioner,
		TypeInformation<P> partitionerTypeInfo, DataDistribution distribution, String partitionLocationName) {
	super(input, input.getType());

	Preconditions.checkNotNull(pMethod);
	Preconditions.checkArgument(pKeys != null || pMethod == PartitionMethod.REBALANCE, "Partitioning requires keys");
	Preconditions.checkArgument(pMethod != PartitionMethod.CUSTOM || customPartitioner != null, "Custom partioning requires a partitioner.");
	Preconditions.checkArgument(distribution == null || pMethod == PartitionMethod.RANGE, "Customized data distribution is only neccessary for range partition.");

	if (distribution != null) {
		Preconditions.checkArgument(pKeys.getNumberOfKeyFields() <= distribution.getNumberOfFields(), "The distribution must provide at least as many fields as flat key fields are specified.");
		Preconditions.checkArgument(Arrays.equals(pKeys.getKeyFieldTypes(), Arrays.copyOfRange(distribution.getKeyTypes(), 0, pKeys.getNumberOfKeyFields())),
				"The types of the flat key fields must be equal to the types of the fields of the distribution.");
	}

	if (customPartitioner != null) {
		pKeys.validateCustomPartitioner(customPartitioner, partitionerTypeInfo);
	}

	this.pMethod = pMethod;
	this.pKeys = pKeys;
	this.partitionLocationName = partitionLocationName;
	this.customPartitioner = customPartitioner;
	this.distribution = distribution;
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:26,代码来源:PartitionOperator.java

示例2: translateSelectorFunctionPartitioner

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
@SuppressWarnings("unchecked")
private static <T, K> org.apache.flink.api.common.operators.SingleInputOperator<?, T, ?> translateSelectorFunctionPartitioner(
	SelectorFunctionKeys<T, ?> rawKeys,
	PartitionMethod pMethod,
	String name,
	Operator<T> input,
	int partitionDop,
	Partitioner<?> customPartitioner,
	Order[] orders) {
	final SelectorFunctionKeys<T, K> keys = (SelectorFunctionKeys<T, K>) rawKeys;
	TypeInformation<Tuple2<K, T>> typeInfoWithKey = KeyFunctions.createTypeWithKey(keys);

	Operator<Tuple2<K, T>> keyedInput = KeyFunctions.appendKeyExtractor(input, keys);

	PartitionOperatorBase<Tuple2<K, T>> keyedPartitionedInput =
		new PartitionOperatorBase<>(new UnaryOperatorInformation<>(typeInfoWithKey, typeInfoWithKey), pMethod, new int[]{0}, name);
	keyedPartitionedInput.setInput(keyedInput);
	keyedPartitionedInput.setCustomPartitioner(customPartitioner);
	keyedPartitionedInput.setParallelism(partitionDop);
	keyedPartitionedInput.setOrdering(new Ordering(0, null, orders != null ? orders[0] : Order.ASCENDING));

	return KeyFunctions.appendKeyRemover(keyedPartitionedInput, keys);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:24,代码来源:PartitionOperator.java

示例3: testRangePartitionCustomPartitionerByKeySelector

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
@Test
public void testRangePartitionCustomPartitionerByKeySelector() throws Exception {
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	final DataSet<CustomPojo> ds = getPojoDataSet(env);
	ds.partitionCustom(new Partitioner<Integer>() {
		@Override
		public int partition(Integer key, int numPartitions) {
			return 1;
		}
	}, new KeySelector<CustomPojo, Integer>() {
		@Override
		public Integer getKey(CustomPojo value) throws Exception {
			return value.getNumber();
		}
	});
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:PartitionOperatorTest.java

示例4: validateCustomPartitioner

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
@Override
public <E> void validateCustomPartitioner(Partitioner<E> partitioner, TypeInformation<E> typeInfo) {

	if (keyFields.size() != 1) {
		throw new InvalidProgramException("Custom partitioners can only be used with keys that have one key field.");
	}
	
	if (typeInfo == null) {
		// try to extract key type from partitioner
		try {
			typeInfo = TypeExtractor.getPartitionerTypes(partitioner);
		}
		catch (Throwable t) {
			// best effort check, so we ignore exceptions
		}
	}

	// only check if type is known and not a generic type
	if (typeInfo != null && !(typeInfo instanceof GenericTypeInfo)) {
		// check equality of key and partitioner type
		if (!keyType.equals(typeInfo)) {
			throw new InvalidProgramException("The partitioner is incompatible with the key type. "
				+ "Partitioner type: " + typeInfo + " , key type: " + keyType);
		}
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:27,代码来源:Keys.java

示例5: getSelectorForOneKey

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
public static <X, K> KeySelector<X, K> getSelectorForOneKey(
		Keys<X> keys, Partitioner<K> partitioner, TypeInformation<X> typeInfo, ExecutionConfig executionConfig) {
	if (!(typeInfo instanceof CompositeType)) {
		throw new InvalidTypesException(
				"This key operation requires a composite type such as Tuples, POJOs, case classes, etc");
	}
	if (partitioner != null) {
		keys.validateCustomPartitioner(partitioner, null);
	}

	CompositeType<X> compositeType = (CompositeType<X>) typeInfo;
	int[] logicalKeyPositions = keys.computeLogicalKeyPositions();
	if (logicalKeyPositions.length != 1) {
		throw new IllegalArgumentException("There must be exactly 1 key specified");
	}

	TypeComparator<X> comparator = compositeType.createComparator(
			logicalKeyPositions, new boolean[] { true }, 0, executionConfig);
	return new OneKeySelector<>(comparator);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:KeySelectorUtil.java

示例6: GroupReduceWithCombineProperties

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
public GroupReduceWithCombineProperties(FieldSet groupKeys, Ordering additionalOrderKeys, Partitioner<?> customPartitioner) {
	super(groupKeys);
	
	// if we have an additional ordering, construct the ordering to have primarily the grouping fields
	if (additionalOrderKeys != null) {
		this.ordering = new Ordering();
		for (Integer key : this.keyList) {
			this.ordering.appendOrdering(key, null, Order.ANY);
		}
	
		// and next the additional order fields
		for (int i = 0; i < additionalOrderKeys.getNumberOfFields(); i++) {
			Integer field = additionalOrderKeys.getFieldNumber(i);
			Order order = additionalOrderKeys.getOrder(i);
			this.ordering.appendOrdering(field, additionalOrderKeys.getType(i), order);
		}
	} else {
		this.ordering = null;
	}
	
	this.customPartitioner = customPartitioner;
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:23,代码来源:GroupReduceWithCombineProperties.java

示例7: GroupReduceProperties

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
public GroupReduceProperties(FieldSet groupKeys, Ordering additionalOrderKeys, Partitioner<?> customPartitioner) {
	super(groupKeys);
	
	// if we have an additional ordering, construct the ordering to have primarily the grouping fields
	if (additionalOrderKeys != null) {
		this.ordering = new Ordering();
		for (Integer key : this.keyList) {
			this.ordering.appendOrdering(key, null, Order.ANY);
		}
	
		// and next the additional order fields
		for (int i = 0; i < additionalOrderKeys.getNumberOfFields(); i++) {
			Integer field = additionalOrderKeys.getFieldNumber(i);
			Order order = additionalOrderKeys.getOrder(i);
			this.ordering.appendOrdering(field, additionalOrderKeys.getType(i), order);
		}
	}
	else {
		this.ordering = null;
	}
	
	this.customPartitioner = customPartitioner;
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:24,代码来源:GroupReduceProperties.java

示例8: initializeDataProperties

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
private List<OperatorDescriptorDual> initializeDataProperties(Partitioner<?> customPartitioner) {
	Ordering groupOrder1 = null;
	Ordering groupOrder2 = null;
	
	CoGroupOperatorBase<?, ?, ?, ?> cgc = getOperator();
	groupOrder1 = cgc.getGroupOrderForInputOne();
	groupOrder2 = cgc.getGroupOrderForInputTwo();
		
	if (groupOrder1 != null && groupOrder1.getNumberOfFields() == 0) {
		groupOrder1 = null;
	}
	if (groupOrder2 != null && groupOrder2.getNumberOfFields() == 0) {
		groupOrder2 = null;
	}
	
	CoGroupDescriptor descr = new CoGroupDescriptor(this.keys1, this.keys2, groupOrder1, groupOrder2);
	if (customPartitioner != null) {
		descr.setCustomPartitioner(customPartitioner);
	}
	
	return Collections.<OperatorDescriptorDual>singletonList(descr);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:23,代码来源:CoGroupNode.java

示例9: testCustomPartitioningPreserved1

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
@Test
public void testCustomPartitioningPreserved1() {

	SingleInputSemanticProperties sprops = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sprops, new String[]{"0;1;4"}, null, null, tupleInfo, tupleInfo);

	GlobalProperties gprops = new GlobalProperties();
	Partitioner<Tuple2<Long, Integer>> myP = new MockPartitioner();
	gprops.setCustomPartitioned(new FieldList(0, 4), myP);

	GlobalProperties result = gprops.filterBySemanticProperties(sprops, 0);

	assertEquals(PartitioningProperty.CUSTOM_PARTITIONING, result.getPartitioning());
	FieldList pFields = result.getPartitioningFields();
	assertEquals(2, pFields.size());
	assertTrue(pFields.contains(0));
	assertTrue(pFields.contains(4));
	assertEquals(myP, result.getCustomPartitioner());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:20,代码来源:GlobalPropertiesFilteringTest.java

示例10: testCustomPartitioningPreserved2

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
@Test
public void testCustomPartitioningPreserved2() {

	SingleInputSemanticProperties sprops = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sprops, new String[]{"0->1; 1->2; 4->3"}, null, null, tupleInfo, tupleInfo);

	GlobalProperties gprops = new GlobalProperties();
	Partitioner<Tuple2<Long, Integer>> myP = new MockPartitioner();
	gprops.setCustomPartitioned(new FieldList(0, 4), myP);

	GlobalProperties result = gprops.filterBySemanticProperties(sprops, 0);

	assertEquals(PartitioningProperty.CUSTOM_PARTITIONING, result.getPartitioning());
	FieldList pFields = result.getPartitioningFields();
	assertEquals(2, pFields.size());
	assertTrue(pFields.contains(1));
	assertTrue(pFields.contains(3));
	assertEquals(myP, result.getCustomPartitioner());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:20,代码来源:GlobalPropertiesFilteringTest.java

示例11: testCustomPartitioningErased

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
@Test
public void testCustomPartitioningErased() {

	SingleInputSemanticProperties sprops = new SingleInputSemanticProperties();
	SemanticPropUtil.getSemanticPropsSingleFromString(sprops, new String[]{"0;1"}, null, null, tupleInfo, tupleInfo);

	GlobalProperties gprops = new GlobalProperties();
	Partitioner<Tuple2<Long, Integer>> myP = new MockPartitioner();
	gprops.setCustomPartitioned(new FieldList(0, 4), myP);

	GlobalProperties result = gprops.filterBySemanticProperties(sprops, 0);

	assertEquals(PartitioningProperty.RANDOM_PARTITIONED, result.getPartitioning());
	assertNull(result.getPartitioningFields());
	assertNull(result.getCustomPartitioner());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:GlobalPropertiesFilteringTest.java

示例12: testPartitionerLambda

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
@Test
public void testPartitionerLambda() {
	Partitioner<Tuple2<Integer, String>> partitioner = (key, numPartitions) -> key.f1.length() % numPartitions;
	final TypeInformation<?> ti = TypeExtractor.getPartitionerTypes(partitioner);

	Assert.assertTrue(ti.isTupleType());
	Assert.assertEquals(2, ti.getArity());
	Assert.assertEquals(((TupleTypeInfo<?>) ti).getTypeAt(0), BasicTypeInfo.INT_TYPE_INFO);
	Assert.assertEquals(((TupleTypeInfo<?>) ti).getTypeAt(1), BasicTypeInfo.STRING_TYPE_INFO);

}
 
开发者ID:axbaretto,项目名称:flink,代码行数:12,代码来源:LambdaExtractionTest.java

示例13: withPartitioner

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
/**
 * Uses a custom partitioner for the grouping.
 *
 * @param partitioner The custom partitioner.
 * @return The grouping object itself, to allow for method chaining.
 */
public UnsortedGrouping<T> withPartitioner(Partitioner<?> partitioner) {
	Preconditions.checkNotNull(partitioner);
	getKeys().validateCustomPartitioner(partitioner, null);

	this.customPartitioner = partitioner;
	return this;
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:14,代码来源:UnsortedGrouping.java

示例14: withPartitioner

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
/**
 * Uses a custom partitioner for the grouping.
 *
 * @param partitioner The custom partitioner.
 * @return The grouping object itself, to allow for method chaining.
 */
public SortedGrouping<T> withPartitioner(Partitioner<?> partitioner) {
	Preconditions.checkNotNull(partitioner);

	getKeys().validateCustomPartitioner(partitioner, null);

	this.customPartitioner = partitioner;
	return this;
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:15,代码来源:SortedGrouping.java

示例15: testRangePartitionCustomPartitionerByFieldId

import org.apache.flink.api.common.functions.Partitioner; //导入依赖的package包/类
@Test
public void testRangePartitionCustomPartitionerByFieldId() throws Exception {
	final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	final DataSet<Tuple2<Integer, String>> ds = getTupleDataSet(env);
	ds.partitionCustom(new Partitioner<Integer>() {
		@Override
		public int partition(Integer key, int numPartitions) {
			return 1;
		}
	}, 0);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:13,代码来源:PartitionOperatorTest.java


注:本文中的org.apache.flink.api.common.functions.Partitioner类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。