本文整理汇总了Java中org.apache.beam.sdk.transforms.Combine.CombineFn方法的典型用法代码示例。如果您正苦于以下问题:Java Combine.CombineFn方法的具体用法?Java Combine.CombineFn怎么用?Java Combine.CombineFn使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.beam.sdk.transforms.Combine
的用法示例。
在下文中一共展示了Combine.CombineFn方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: params
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
@Parameters(name = "{index}: {0}")
public static Iterable<Combine.CombineFn<Integer, ?, ?>> params() {
BinaryCombineIntegerFn sum = Sum.ofIntegers();
CombineFn<Integer, ?, Long> count = Count.combineFn();
TestCombineFn test = new TestCombineFn();
return ImmutableList.<CombineFn<Integer, ?, ?>>builder()
.add(sum)
.add(count)
.add(test)
.build();
}
示例2: FlinkCombiningState
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
FlinkCombiningState(
KeyedStateBackend<ByteBuffer> flinkStateBackend,
StateTag<CombiningState<InputT, AccumT, OutputT>> address,
Combine.CombineFn<InputT, AccumT, OutputT> combineFn,
StateNamespace namespace,
Coder<AccumT> accumCoder) {
this.namespace = namespace;
this.address = address;
this.combineFn = combineFn;
this.flinkStateBackend = flinkStateBackend;
flinkStateDescriptor = new ValueStateDescriptor<>(
address.getId(), new CoderTypeSerializer<>(accumCoder));
}
示例3: FlinkCombiningState
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
FlinkCombiningState(
OperatorStateBackend flinkStateBackend,
StateTag<CombiningState<InputT, AccumT, OutputT>> address,
Combine.CombineFn<InputT, AccumT, OutputT> combineFn,
StateNamespace namespace,
Coder<AccumT> accumCoder) {
super(flinkStateBackend, address.getId(), namespace, accumCoder);
this.namespace = namespace;
this.address = address;
this.combineFn = combineFn;
}
示例4: FlinkKeyedCombiningState
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
FlinkKeyedCombiningState(
OperatorStateBackend flinkStateBackend,
StateTag<CombiningState<InputT, AccumT, OutputT>> address,
Combine.CombineFn<InputT, AccumT, OutputT> combineFn,
StateNamespace namespace,
Coder<AccumT> accumCoder,
FlinkBroadcastStateInternals<K> flinkStateInternals) {
super(flinkStateBackend, address.getId(), namespace, accumCoder);
this.namespace = namespace;
this.address = address;
this.combineFn = combineFn;
this.flinkStateInternals = flinkStateInternals;
}
示例5: bindCombining
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
<InputT, AccumT, OutputT> CombiningState<InputT, AccumT, OutputT> bindCombining(
String id,
StateSpec<CombiningState<InputT, AccumT, OutputT>> spec,
Coder<AccumT> accumCoder,
Combine.CombineFn<InputT, AccumT, OutputT> combineFn);
示例6: dispatchCombining
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
@Override
public ResultT dispatchCombining(Combine.CombineFn<?, ?, ?> combineFn, Coder<?> accumCoder) {
return dispatchDefault();
}
示例7: fromProto
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
@VisibleForTesting
static StateSpec<?> fromProto(RunnerApi.StateSpec stateSpec, RehydratedComponents components)
throws IOException {
switch (stateSpec.getSpecCase()) {
case VALUE_SPEC:
return StateSpecs.value(components.getCoder(stateSpec.getValueSpec().getCoderId()));
case BAG_SPEC:
return StateSpecs.bag(components.getCoder(stateSpec.getBagSpec().getElementCoderId()));
case COMBINING_SPEC:
FunctionSpec combineFnSpec = stateSpec.getCombiningSpec().getCombineFn().getSpec();
if (!combineFnSpec.getUrn().equals(CombineTranslation.JAVA_SERIALIZED_COMBINE_FN_URN)) {
throw new UnsupportedOperationException(
String.format(
"Cannot create %s from non-Java %s: %s",
StateSpec.class.getSimpleName(),
Combine.CombineFn.class.getSimpleName(),
combineFnSpec.getUrn()));
}
Combine.CombineFn<?, ?, ?> combineFn =
(Combine.CombineFn<?, ?, ?>)
SerializableUtils.deserializeFromByteArray(
combineFnSpec.getPayload().toByteArray(),
Combine.CombineFn.class.getSimpleName());
// Rawtype coder cast because it is required to be a valid accumulator coder
// for the CombineFn, by construction
return StateSpecs.combining(
(Coder) components.getCoder(stateSpec.getCombiningSpec().getAccumulatorCoderId()),
combineFn);
case MAP_SPEC:
return StateSpecs.map(
components.getCoder(stateSpec.getMapSpec().getKeyCoderId()),
components.getCoder(stateSpec.getMapSpec().getValueCoderId()));
case SET_SPEC:
return StateSpecs.set(components.getCoder(stateSpec.getSetSpec().getElementCoderId()));
case SPEC_NOT_SET:
default:
throw new IllegalArgumentException(
String.format("Unknown %s: %s", RunnerApi.StateSpec.class.getName(), stateSpec));
}
}
示例8: translateNode
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
@Override
public void translateNode(
PTransform<PCollection<KV<K, InputT>>, PCollection<KV<K, Iterable<InputT>>>> transform,
FlinkBatchTranslationContext context) {
// for now, this is copied from the Combine.PerKey translater. Once we have the new runner API
// we can replace GroupByKey by a Combine.PerKey with the Concatenate CombineFn
DataSet<WindowedValue<KV<K, InputT>>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
Combine.CombineFn<InputT, List<InputT>, List<InputT>> combineFn = new Concatenate<>();
KvCoder<K, InputT> inputCoder =
(KvCoder<K, InputT>) context.getInput(transform).getCoder();
Coder<List<InputT>> accumulatorCoder;
try {
accumulatorCoder =
combineFn.getAccumulatorCoder(
context.getInput(transform).getPipeline().getCoderRegistry(),
inputCoder.getValueCoder());
} catch (CannotProvideCoderException e) {
throw new RuntimeException(e);
}
WindowingStrategy<?, ?> windowingStrategy =
context.getInput(transform).getWindowingStrategy();
TypeInformation<WindowedValue<KV<K, List<InputT>>>> partialReduceTypeInfo =
new CoderTypeInformation<>(
WindowedValue.getFullCoder(
KvCoder.of(inputCoder.getKeyCoder(), accumulatorCoder),
windowingStrategy.getWindowFn().windowCoder()));
Grouping<WindowedValue<KV<K, InputT>>> inputGrouping =
inputDataSet.groupBy(new KvKeySelector<InputT, K>(inputCoder.getKeyCoder()));
@SuppressWarnings("unchecked")
WindowingStrategy<Object, BoundedWindow> boundedStrategy =
(WindowingStrategy<Object, BoundedWindow>) windowingStrategy;
FlinkPartialReduceFunction<K, InputT, List<InputT>, ?> partialReduceFunction =
new FlinkPartialReduceFunction<>(
combineFn, boundedStrategy,
Collections.<PCollectionView<?>, WindowingStrategy<?, ?>>emptyMap(),
context.getPipelineOptions());
FlinkReduceFunction<K, List<InputT>, List<InputT>, ?> reduceFunction =
new FlinkReduceFunction<>(
combineFn, boundedStrategy,
Collections.<PCollectionView<?>, WindowingStrategy<?, ?>>emptyMap(),
context.getPipelineOptions());
// Partially GroupReduce the values into the intermediate format AccumT (combine)
GroupCombineOperator<
WindowedValue<KV<K, InputT>>,
WindowedValue<KV<K, List<InputT>>>> groupCombine =
new GroupCombineOperator<>(
inputGrouping,
partialReduceTypeInfo,
partialReduceFunction,
"GroupCombine: " + transform.getName());
Grouping<WindowedValue<KV<K, List<InputT>>>> intermediateGrouping =
groupCombine.groupBy(new KvKeySelector<List<InputT>, K>(inputCoder.getKeyCoder()));
// Fully reduce the values and create output format VO
GroupReduceOperator<
WindowedValue<KV<K, List<InputT>>>, WindowedValue<KV<K, List<InputT>>>> outputDataSet =
new GroupReduceOperator<>(
intermediateGrouping, partialReduceTypeInfo, reduceFunction, transform.getName());
context.setOutputDataSet(context.getOutput(transform), outputDataSet);
}
示例9: registerUdaf
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
/**
* Register a UDAF function which can be used in GROUP-BY expression.
* See {@link org.apache.beam.sdk.transforms.Combine.CombineFn} on how to implement a UDAF.
*/
public void registerUdaf(String functionName, Combine.CombineFn combineFn) {
schema.add(functionName, new UdafImpl(combineFn));
}
示例10: getCombineFn
import org.apache.beam.sdk.transforms.Combine; //导入方法依赖的package包/类
Combine.CombineFn<InputT, InterT, OutputT> getCombineFn();