本文整理汇总了Java中org.apache.beam.runners.flink.FlinkPipelineOptions类的典型用法代码示例。如果您正苦于以下问题:Java FlinkPipelineOptions类的具体用法?Java FlinkPipelineOptions怎么用?Java FlinkPipelineOptions使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
FlinkPipelineOptions类属于org.apache.beam.runners.flink包,在下文中一共展示了FlinkPipelineOptions类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: ReaderInvocationUtil
import org.apache.beam.runners.flink.FlinkPipelineOptions; //导入依赖的package包/类
public ReaderInvocationUtil(
String stepName,
PipelineOptions options,
FlinkMetricContainer container) {
FlinkPipelineOptions flinkPipelineOptions = options.as(FlinkPipelineOptions.class);
this.stepName = stepName;
enableMetrics = flinkPipelineOptions.getEnableMetrics();
this.container = container;
}
示例2: DoFnOperator
import org.apache.beam.runners.flink.FlinkPipelineOptions; //导入依赖的package包/类
public DoFnOperator(
DoFn<InputT, OutputT> doFn,
String stepName,
Coder<WindowedValue<InputT>> inputCoder,
TupleTag<OutputT> mainOutputTag,
List<TupleTag<?>> additionalOutputTags,
OutputManagerFactory<OutputT> outputManagerFactory,
WindowingStrategy<?, ?> windowingStrategy,
Map<Integer, PCollectionView<?>> sideInputTagMapping,
Collection<PCollectionView<?>> sideInputs,
PipelineOptions options,
Coder<?> keyCoder) {
this.doFn = doFn;
this.stepName = stepName;
this.inputCoder = inputCoder;
this.mainOutputTag = mainOutputTag;
this.additionalOutputTags = additionalOutputTags;
this.sideInputTagMapping = sideInputTagMapping;
this.sideInputs = sideInputs;
this.serializedOptions = new SerializablePipelineOptions(options);
this.windowingStrategy = windowingStrategy;
this.outputManagerFactory = outputManagerFactory;
setChainingStrategy(ChainingStrategy.ALWAYS);
this.keyCoder = keyCoder;
this.timerCoder =
TimerInternals.TimerDataCoder.of(windowingStrategy.getWindowFn().windowCoder());
FlinkPipelineOptions flinkOptions = options.as(FlinkPipelineOptions.class);
this.maxBundleSize = flinkOptions.getMaxBundleSize();
this.maxBundleTimeMills = flinkOptions.getMaxBundleTimeMills();
}
示例3: setup
import org.apache.beam.runners.flink.FlinkPipelineOptions; //导入依赖的package包/类
@Override
public void setup(
StreamTask<?, ?> containingTask,
StreamConfig config,
Output<StreamRecord<WindowedValue<OutputT>>> output) {
// make sure that FileSystems is initialized correctly
FlinkPipelineOptions options =
serializedOptions.get().as(FlinkPipelineOptions.class);
FileSystems.setDefaultPipelineOptions(options);
super.setup(containingTask, config, output);
}
示例4: mapPartition
import org.apache.beam.runners.flink.FlinkPipelineOptions; //导入依赖的package包/类
@Override
public void mapPartition(
Iterable<WindowedValue<InputT>> values,
Collector<WindowedValue<OutputT>> out) throws Exception {
RuntimeContext runtimeContext = getRuntimeContext();
DoFnRunners.OutputManager outputManager;
if (outputMap.size() == 1) {
outputManager = new FlinkDoFnFunction.DoFnOutputManager(out);
} else {
// it has some additional outputs
outputManager =
new FlinkDoFnFunction.MultiDoFnOutputManager((Collector) out, outputMap);
}
List<TupleTag<?>> additionalOutputTags = Lists.newArrayList(outputMap.keySet());
DoFnRunner<InputT, OutputT> doFnRunner = DoFnRunners.simpleRunner(
serializedOptions.get(), doFn,
new FlinkSideInputReader(sideInputs, runtimeContext),
outputManager,
mainOutputTag,
additionalOutputTags,
new FlinkNoOpStepContext(),
windowingStrategy);
if ((serializedOptions.get().as(FlinkPipelineOptions.class))
.getEnableMetrics()) {
doFnRunner = new DoFnRunnerWithMetricsUpdate<>(stepName, doFnRunner, getRuntimeContext());
}
doFnRunner.startBundle();
for (WindowedValue<InputT> value : values) {
doFnRunner.processElement(value);
}
doFnRunner.finishBundle();
}
示例5: testSingleOutput
import org.apache.beam.runners.flink.FlinkPipelineOptions; //导入依赖的package包/类
@Test
@SuppressWarnings("unchecked")
public void testSingleOutput() throws Exception {
Coder<WindowedValue<String>> coder = WindowedValue.getValueOnlyCoder(StringUtf8Coder.of());
TupleTag<String> outputTag = new TupleTag<>("main-output");
DoFnOperator<String, String> doFnOperator = new DoFnOperator<>(
new IdentityDoFn<String>(),
"stepName",
coder,
outputTag,
Collections.<TupleTag<?>>emptyList(),
new DoFnOperator.MultiOutputOutputManagerFactory<>(outputTag, coder),
WindowingStrategy.globalDefault(),
new HashMap<Integer, PCollectionView<?>>(), /* side-input mapping */
Collections.<PCollectionView<?>>emptyList(), /* side inputs */
PipelineOptionsFactory.as(FlinkPipelineOptions.class),
null);
OneInputStreamOperatorTestHarness<WindowedValue<String>, WindowedValue<String>> testHarness =
new OneInputStreamOperatorTestHarness<>(doFnOperator);
testHarness.open();
testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("Hello")));
assertThat(
this.<String>stripStreamRecordFromWindowedValue(testHarness.getOutput()),
contains(WindowedValue.valueInGlobalWindow("Hello")));
testHarness.close();
}
示例6: reduce
import org.apache.beam.runners.flink.FlinkPipelineOptions; //导入依赖的package包/类
@Override
public void reduce(
Iterable<WindowedValue<KV<K, V>>> values,
Collector<WindowedValue<OutputT>> out) throws Exception {
RuntimeContext runtimeContext = getRuntimeContext();
DoFnRunners.OutputManager outputManager;
if (outputMap.size() == 1) {
outputManager = new FlinkDoFnFunction.DoFnOutputManager(out);
} else {
// it has some additional Outputs
outputManager =
new FlinkDoFnFunction.MultiDoFnOutputManager((Collector) out, outputMap);
}
final Iterator<WindowedValue<KV<K, V>>> iterator = values.iterator();
// get the first value, we need this for initializing the state internals with the key.
// we are guaranteed to have a first value, otherwise reduce() would not have been called.
WindowedValue<KV<K, V>> currentValue = iterator.next();
final K key = currentValue.getValue().getKey();
final InMemoryStateInternals<K> stateInternals = InMemoryStateInternals.forKey(key);
// Used with Batch, we know that all the data is available for this key. We can't use the
// timer manager from the context because it doesn't exist. So we create one and advance
// time to the end after processing all elements.
final InMemoryTimerInternals timerInternals = new InMemoryTimerInternals();
timerInternals.advanceProcessingTime(Instant.now());
timerInternals.advanceSynchronizedProcessingTime(Instant.now());
List<TupleTag<?>> additionalOutputTags = Lists.newArrayList(outputMap.keySet());
DoFnRunner<KV<K, V>, OutputT> doFnRunner = DoFnRunners.simpleRunner(
serializedOptions.get(), dofn,
new FlinkSideInputReader(sideInputs, runtimeContext),
outputManager,
mainOutputTag,
additionalOutputTags,
new FlinkNoOpStepContext() {
@Override
public StateInternals stateInternals() {
return stateInternals;
}
@Override
public TimerInternals timerInternals() {
return timerInternals;
}
},
windowingStrategy);
if ((serializedOptions.get().as(FlinkPipelineOptions.class))
.getEnableMetrics()) {
doFnRunner = new DoFnRunnerWithMetricsUpdate<>(stepName, doFnRunner, getRuntimeContext());
}
doFnRunner.startBundle();
doFnRunner.processElement(currentValue);
while (iterator.hasNext()) {
currentValue = iterator.next();
doFnRunner.processElement(currentValue);
}
// Finish any pending windows by advancing the input watermark to infinity.
timerInternals.advanceInputWatermark(BoundedWindow.TIMESTAMP_MAX_VALUE);
// Finally, advance the processing time to infinity to fire any timers.
timerInternals.advanceProcessingTime(BoundedWindow.TIMESTAMP_MAX_VALUE);
timerInternals.advanceSynchronizedProcessingTime(BoundedWindow.TIMESTAMP_MAX_VALUE);
fireEligibleTimers(timerInternals, doFnRunner);
doFnRunner.finishBundle();
}
示例7: testMultiOutputOutput
import org.apache.beam.runners.flink.FlinkPipelineOptions; //导入依赖的package包/类
@Test
@SuppressWarnings("unchecked")
public void testMultiOutputOutput() throws Exception {
WindowedValue.ValueOnlyWindowedValueCoder<String> coder =
WindowedValue.getValueOnlyCoder(StringUtf8Coder.of());
TupleTag<String> mainOutput = new TupleTag<>("main-output");
TupleTag<String> additionalOutput1 = new TupleTag<>("output-1");
TupleTag<String> additionalOutput2 = new TupleTag<>("output-2");
ImmutableMap<TupleTag<?>, OutputTag<?>> tagsToOutputTags =
ImmutableMap.<TupleTag<?>, OutputTag<?>>builder()
.put(additionalOutput1, new OutputTag<String>(additionalOutput1.getId()){})
.put(additionalOutput2, new OutputTag<String>(additionalOutput2.getId()){})
.build();
ImmutableMap<TupleTag<?>, Coder<WindowedValue<?>>> tagsToCoders =
ImmutableMap.<TupleTag<?>, Coder<WindowedValue<?>>>builder()
.put(mainOutput, (Coder) coder)
.put(additionalOutput1, coder)
.put(additionalOutput2, coder)
.build();
ImmutableMap<TupleTag<?>, Integer> tagsToIds =
ImmutableMap.<TupleTag<?>, Integer>builder()
.put(mainOutput, 0)
.put(additionalOutput1, 1)
.put(additionalOutput2, 2)
.build();
DoFnOperator<String, String> doFnOperator = new DoFnOperator<>(
new MultiOutputDoFn(additionalOutput1, additionalOutput2),
"stepName",
coder,
mainOutput,
ImmutableList.<TupleTag<?>>of(additionalOutput1, additionalOutput2),
new DoFnOperator.MultiOutputOutputManagerFactory(
mainOutput, tagsToOutputTags, tagsToCoders, tagsToIds),
WindowingStrategy.globalDefault(),
new HashMap<Integer, PCollectionView<?>>(), /* side-input mapping */
Collections.<PCollectionView<?>>emptyList(), /* side inputs */
PipelineOptionsFactory.as(FlinkPipelineOptions.class),
null);
OneInputStreamOperatorTestHarness<WindowedValue<String>, WindowedValue<String>> testHarness =
new OneInputStreamOperatorTestHarness<>(doFnOperator);
testHarness.open();
testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("one")));
testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("two")));
testHarness.processElement(new StreamRecord<>(WindowedValue.valueInGlobalWindow("hello")));
assertThat(
this.stripStreamRecord(testHarness.getOutput()),
contains(
WindowedValue.valueInGlobalWindow("got: hello")));
assertThat(
this.stripStreamRecord(testHarness.getSideOutput(tagsToOutputTags.get(additionalOutput1))),
contains(
WindowedValue.valueInGlobalWindow("extra: one"),
WindowedValue.valueInGlobalWindow("got: hello")));
assertThat(
this.stripStreamRecord(testHarness.getSideOutput(tagsToOutputTags.get(additionalOutput2))),
contains(
WindowedValue.valueInGlobalWindow("extra: two"),
WindowedValue.valueInGlobalWindow("got: hello")));
testHarness.close();
}