本文整理汇总了Java中org.apache.beam.sdk.coders.VoidCoder类的典型用法代码示例。如果您正苦于以下问题:Java VoidCoder类的具体用法?Java VoidCoder怎么用?Java VoidCoder使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
VoidCoder类属于org.apache.beam.sdk.coders包,在下文中一共展示了VoidCoder类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: insertDefaultValueIfEmpty
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
private PCollection<OutputT> insertDefaultValueIfEmpty(PCollection<OutputT> maybeEmpty) {
final PCollectionView<Iterable<OutputT>> maybeEmptyView = maybeEmpty.apply(
View.<OutputT>asIterable());
final OutputT defaultValue = fn.defaultValue();
PCollection<OutputT> defaultIfEmpty = maybeEmpty.getPipeline()
.apply("CreateVoid", Create.of((Void) null).withCoder(VoidCoder.of()))
.apply("ProduceDefault", ParDo.of(
new DoFn<Void, OutputT>() {
@ProcessElement
public void processElement(ProcessContext c) {
Iterator<OutputT> combined = c.sideInput(maybeEmptyView).iterator();
if (!combined.hasNext()) {
c.output(defaultValue);
}
}
}).withSideInputs(maybeEmptyView))
.setCoder(maybeEmpty.getCoder())
.setWindowingStrategyInternal(maybeEmpty.getWindowingStrategy());
return PCollectionList.of(maybeEmpty).and(defaultIfEmpty)
.apply(Flatten.<OutputT>pCollections());
}
示例2: displayDataForPrimitiveTransforms
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
/**
* Traverse the specified {@link PTransform}, collecting {@link DisplayData} registered on the
* inner primitive {@link PTransform PTransforms}.
*
* @param root The root {@link PTransform} to traverse
* @param inputCoder The coder to set for the {@link PTransform} input, or null to infer the
* default coder.
*
* @return the set of {@link DisplayData} for primitive {@link PTransform PTransforms}.
*/
public <InputT> Set<DisplayData> displayDataForPrimitiveTransforms(
final PTransform<? super PCollection<InputT>, ? extends POutput> root,
Coder<InputT> inputCoder) {
Create.Values<InputT> input;
if (inputCoder != null) {
input = Create.empty(inputCoder);
} else {
// These types don't actually work, but the pipeline will never be run
input = (Create.Values<InputT>) Create.empty(VoidCoder.of());
}
Pipeline pipeline = Pipeline.create(options);
pipeline
.apply("Input", input)
.apply("Transform", root);
return displayDataForPipeline(pipeline, root);
}
示例3: testEmptyFlattenAsSideInput
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Test
@Category(ValidatesRunner.class)
public void testEmptyFlattenAsSideInput() {
final PCollectionView<Iterable<String>> view =
PCollectionList.<String>empty(p)
.apply(Flatten.<String>pCollections()).setCoder(StringUtf8Coder.of())
.apply(View.<String>asIterable());
PCollection<String> output = p
.apply(Create.of((Void) null).withCoder(VoidCoder.of()))
.apply(ParDo.of(new DoFn<Void, String>() {
@ProcessElement
public void processElement(ProcessContext c) {
for (String side : c.sideInput(view)) {
c.output(side);
}
}
}).withSideInputs(view));
PAssert.that(output).empty();
p.run();
}
示例4: expand
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Override
public PCollection<Iterable<T>> expand(PCollection<T> input) {
final PCollectionView<Iterable<T>> view = input.apply(View.<T>asIterable());
return input
.getPipeline()
.apply(Create.of((Void) null).withCoder(VoidCoder.of()))
.apply(
ParDo.of(
new DoFn<Void, Iterable<T>>() {
@ProcessElement
public void processElement(ProcessContext c) {
c.output(c.sideInput(view));
}
})
.withSideInputs(view));
}
示例5: nonAdditionalInputsWithMultipleNonAdditionalInputsSucceeds
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Test
public void nonAdditionalInputsWithMultipleNonAdditionalInputsSucceeds() {
Map<TupleTag<?>, PValue> allInputs = new HashMap<>();
PCollection<Integer> mainInts = pipeline.apply("MainInput", Create.of(12, 3));
allInputs.put(new TupleTag<Integer>() {}, mainInts);
PCollection<Void> voids = pipeline.apply("VoidInput", Create.empty(VoidCoder.of()));
allInputs.put(new TupleTag<Void>() {}, voids);
AppliedPTransform<PInput, POutput, TestTransform> transform =
AppliedPTransform.of(
"additional-free",
allInputs,
Collections.<TupleTag<?>, PValue>emptyMap(),
new TestTransform(),
pipeline);
assertThat(
TransformInputs.nonAdditionalInputs(transform),
Matchers.<PValue>containsInAnyOrder(voids, mainInts));
}
示例6: nonAdditionalInputsWithAdditionalInputsSucceeds
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Test
public void nonAdditionalInputsWithAdditionalInputsSucceeds() {
Map<TupleTag<?>, PValue> additionalInputs = new HashMap<>();
additionalInputs.put(new TupleTag<String>() {}, pipeline.apply(Create.of("1, 2", "3")));
additionalInputs.put(new TupleTag<Long>() {}, pipeline.apply(GenerateSequence.from(3L)));
Map<TupleTag<?>, PValue> allInputs = new HashMap<>();
PCollection<Integer> mainInts = pipeline.apply("MainInput", Create.of(12, 3));
allInputs.put(new TupleTag<Integer>() {}, mainInts);
PCollection<Void> voids = pipeline.apply("VoidInput", Create.empty(VoidCoder.of()));
allInputs.put(
new TupleTag<Void>() {}, voids);
allInputs.putAll(additionalInputs);
AppliedPTransform<PInput, POutput, TestTransform> transform =
AppliedPTransform.of(
"additional",
allInputs,
Collections.<TupleTag<?>, PValue>emptyMap(),
new TestTransform(additionalInputs),
pipeline);
assertThat(
TransformInputs.nonAdditionalInputs(transform),
Matchers.<PValue>containsInAnyOrder(mainInts, voids));
}
示例7: testStreamingWriteOverride
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
private void testStreamingWriteOverride(PipelineOptions options, int expectedNumShards) {
TestPipeline p = TestPipeline.fromOptions(options);
StreamingShardedWriteFactory<Object, Void, Object> factory =
new StreamingShardedWriteFactory<>(p.getOptions());
WriteFiles<Object, Void, Object> original = WriteFiles.to(new TestSink(tmpFolder.toString()));
PCollection<Object> objs = (PCollection) p.apply(Create.empty(VoidCoder.of()));
AppliedPTransform<PCollection<Object>, WriteFilesResult<Void>, WriteFiles<Object, Void, Object>>
originalApplication =
AppliedPTransform.of(
"writefiles",
objs.expand(),
Collections.<TupleTag<?>, PValue>emptyMap(),
original,
p);
WriteFiles<Object, Void, Object> replacement =
(WriteFiles<Object, Void, Object>)
factory.getReplacementTransform(originalApplication).getTransform();
assertThat(replacement, not(equalTo((Object) original)));
assertThat(replacement.getNumShardsProvider().get(), equalTo(expectedNumShards));
}
示例8: expand
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Override
public PCollection<V> expand(PBegin input) {
return input
.apply(Create.of((Void) null).withCoder(VoidCoder.of()))
.apply(Reify.<Void, V>viewAsValues(view, coder))
.apply(Values.<V>create());
}
示例9: inferCoderFromObject
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
/**
* Attempt to infer the type for some very common Apache Beam parameterized types.
*
* <p>TODO: Instead, build a TypeDescriptor so that the {@link CoderRegistry} is invoked
* for the type instead of hard coding the coders for common types.
*/
private static Coder<?> inferCoderFromObject(CoderRegistry registry, Object o)
throws CannotProvideCoderException {
if (o == null) {
return VoidCoder.of();
} else if (o instanceof TimestampedValue) {
return TimestampedValueCoder.of(
inferCoderFromObject(registry, ((TimestampedValue) o).getValue()));
} else if (o instanceof List) {
return ListCoder.of(inferCoderFromObjects(registry, ((Iterable) o)));
} else if (o instanceof Set) {
return SetCoder.of(inferCoderFromObjects(registry, ((Iterable) o)));
} else if (o instanceof Collection) {
return CollectionCoder.of(inferCoderFromObjects(registry, ((Iterable) o)));
} else if (o instanceof Iterable) {
return IterableCoder.of(inferCoderFromObjects(registry, ((Iterable) o)));
} else if (o instanceof Map) {
return MapCoder.of(
inferCoderFromObjects(registry, ((Map) o).keySet()),
inferCoderFromObjects(registry, ((Map) o).entrySet()));
} else if (o instanceof KV) {
return KvCoder.of(
inferCoderFromObject(registry, ((KV) o).getKey()),
inferCoderFromObject(registry, ((KV) o).getValue()));
} else {
return registry.getCoder(o.getClass());
}
}
示例10: expand
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Override
public PCollection<OutputT> expand(PCollection<InputT> input) {
PCollection<KV<Void, InputT>> withKeys = input
.apply(WithKeys.<Void, InputT>of((Void) null))
.setCoder(KvCoder.of(VoidCoder.of(), input.getCoder()));
Combine.PerKey<Void, InputT, OutputT> combine = Combine.fewKeys(fn, fnDisplayData);
if (!sideInputs.isEmpty()) {
combine = combine.withSideInputs(sideInputs);
}
PCollection<KV<Void, OutputT>> combined;
if (fanout >= 2) {
combined = withKeys.apply(combine.withHotKeyFanout(fanout));
} else {
combined = withKeys.apply(combine);
}
PCollection<OutputT> output = combined.apply(Values.<OutputT>create());
if (insertDefault) {
if (!output.getWindowingStrategy().getWindowFn().isCompatible(new GlobalWindows())) {
throw new IllegalStateException(fn.getIncompatibleGlobalWindowErrorMessage());
}
return insertDefaultValueIfEmpty(output);
} else {
return output;
}
}
示例11: testSideInputWithNullDefault
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Test
@Category(ValidatesRunner.class)
public void testSideInputWithNullDefault() {
final PCollectionView<Void> view =
pipeline.apply("CreateSideInput", Create.of((Void) null).withCoder(VoidCoder.of()))
.apply(Combine.globally(new SerializableFunction<Iterable<Void>, Void>() {
@Override
public Void apply(Iterable<Void> input) {
return null;
}
}).asSingletonView());
PCollection<String> output =
pipeline.apply("CreateMainInput", Create.of(""))
.apply(
"OutputMainAndSideInputs",
ParDo.of(new DoFn<String, String>() {
@ProcessElement
public void processElement(ProcessContext c) {
c.output(c.element() + c.sideInput(view));
}
}).withSideInputs(view));
PAssert.that(output).containsInAnyOrder("null");
pipeline.run();
}
示例12: testCreateEmptyIterableWithCoder
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Test
@Category(NeedsRunner.class)
public void testCreateEmptyIterableWithCoder() {
PCollection<Void> output =
p.apply(Create.of(Collections.<Void>emptyList()).withCoder(VoidCoder.of()));
assertEquals(VoidCoder.of(), output.getCoder());
PAssert.that(output).empty();
p.run();
}
示例13: testSourceSplitVoid
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Test
public void testSourceSplitVoid() throws Exception {
CreateSource<Void> source =
CreateSource.fromIterable(
Lists.<Void>newArrayList(null, null, null, null, null), VoidCoder.of());
PipelineOptions options = PipelineOptionsFactory.create();
List<? extends BoundedSource<Void>> splitSources = source.split(3, options);
SourceTestUtils.assertSourcesEqualReferenceSource(source, splitSources, options);
}
示例14: expand
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Override
public PCollection<T> expand(PCollection<T> input) {
TupleTag<T> mainOutput = new TupleTag<>();
TupleTag<Void> cleanupSignal = new TupleTag<>();
PCollectionTuple outputs = input.apply(ParDo.of(new IdentityFn<T>())
.withOutputTags(mainOutput, TupleTagList.of(cleanupSignal)));
PCollectionView<Iterable<Void>> cleanupSignalView = outputs.get(cleanupSignal)
.setCoder(VoidCoder.of())
.apply(View.<Void>asIterable());
input
.getPipeline()
.apply("Create(CleanupOperation)", Create.of(cleanupOperation))
.apply(
"Cleanup",
ParDo.of(
new DoFn<CleanupOperation, Void>() {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
c.element().cleanup(new ContextContainer(c, jobIdSideInput));
}
})
.withSideInputs(jobIdSideInput, cleanupSignalView));
return outputs.get(mainOutput).setCoder(input.getCoder());
}
示例15: expand
import org.apache.beam.sdk.coders.VoidCoder; //导入依赖的package包/类
@Override
public PCollection<KV<TableDestination, String>> expand(
PCollection<KV<ShardedKey<DestinationT>, List<String>>> input) {
PCollectionTuple writeTablesOutputs = input.apply(ParDo.of(new WriteTablesDoFn())
.withSideInputs(sideInputs)
.withOutputTags(mainOutputTag, TupleTagList.of(temporaryFilesTag)));
// Garbage collect temporary files.
// We mustn't start garbage collecting files until we are assured that the WriteTablesDoFn has
// succeeded in loading those files and won't be retried. Otherwise, we might fail part of the
// way through deleting temporary files, and retry WriteTablesDoFn. This will then fail due
// to missing files, causing either the entire workflow to fail or get stuck (depending on how
// the runner handles persistent failures).
writeTablesOutputs
.get(temporaryFilesTag)
.setCoder(StringUtf8Coder.of())
.apply(WithKeys.<Void, String>of((Void) null))
.setCoder(KvCoder.of(VoidCoder.of(), StringUtf8Coder.of()))
.apply(Window.<KV<Void, String>>into(new GlobalWindows())
.triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1)))
.discardingFiredPanes())
.apply(GroupByKey.<Void, String>create())
.apply(Values.<Iterable<String>>create())
.apply(ParDo.of(new GarbageCollectTemporaryFiles()));
return writeTablesOutputs.get(mainOutputTag);
}