本文整理汇总了Java中org.apache.beam.sdk.io.Read.from方法的典型用法代码示例。如果您正苦于以下问题:Java Read.from方法的具体用法?Java Read.from怎么用?Java Read.from使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.beam.sdk.io.Read
的用法示例。
在下文中一共展示了Read.from方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: doesNotConsumeAlreadyConsumedRead
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
@Test
public void doesNotConsumeAlreadyConsumedRead() {
Unbounded<Long> transform = Read.from(CountingSource.unbounded());
final PCollection<Long> output = pipeline.apply(transform);
final Flatten.PCollections<Long> consumer = Flatten.<Long>pCollections();
PCollectionList.of(output).apply(consumer);
UnconsumedReads.ensureAllReadsConsumed(pipeline);
pipeline.traverseTopologically(
new PipelineVisitor.Defaults() {
@Override
public void visitPrimitiveTransform(Node node) {
// The output should only be consumed by a single consumer
if (node.getInputs().values().contains(output)) {
assertThat(node.getTransform(), Matchers.<PTransform<?, ?>>is(consumer));
}
}
});
}
示例2: streamEventsSource
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
/**
* Return a transform which yields a finite number of synthesized events generated
* on-the-fly in real time.
*/
public static PTransform<PBegin, PCollection<Event>> streamEventsSource(
NexmarkConfiguration configuration) {
return Read.from(new UnboundedEventSource(NexmarkUtils.standardGeneratorConfig(configuration),
configuration.numEventGenerators,
configuration.watermarkHoldbackSec,
configuration.isRateLimited));
}
示例3: testToFromProtoBounded
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
@Test
public void testToFromProtoBounded() throws Exception {
// TODO: Split into two tests.
assumeThat(source, instanceOf(BoundedSource.class));
BoundedSource<?> boundedSource = (BoundedSource<?>) this.source;
Read.Bounded<?> boundedRead = Read.from(boundedSource);
ReadPayload payload = ReadTranslation.toProto(boundedRead);
assertThat(payload.getIsBounded(), equalTo(RunnerApi.IsBounded.Enum.BOUNDED));
BoundedSource<?> deserializedSource = ReadTranslation.boundedSourceFromProto(payload);
assertThat(deserializedSource, Matchers.<Source<?>>equalTo(source));
}
示例4: testToFromProtoUnbounded
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
@Test
public void testToFromProtoUnbounded() throws Exception {
assumeThat(source, instanceOf(UnboundedSource.class));
UnboundedSource<?, ?> unboundedSource = (UnboundedSource<?, ?>) this.source;
Read.Unbounded<?> unboundedRead = Read.from(unboundedSource);
ReadPayload payload = ReadTranslation.toProto(unboundedRead);
assertThat(payload.getIsBounded(), equalTo(RunnerApi.IsBounded.Enum.UNBOUNDED));
UnboundedSource<?, ?> deserializedSource = ReadTranslation.unboundedSourceFromProto(payload);
assertThat(deserializedSource, Matchers.<Source<?>>equalTo(source));
}
示例5: matcherProducesUnconsumedValueBoundedRead
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
@Test
public void matcherProducesUnconsumedValueBoundedRead() {
Bounded<Long> transform = Read.from(CountingSource.upTo(20L));
PCollection<Long> output = pipeline.apply(transform);
UnconsumedReads.ensureAllReadsConsumed(pipeline);
validateConsumed();
}
示例6: matcherProducesUnconsumedValueUnboundedRead
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
@Test
public void matcherProducesUnconsumedValueUnboundedRead() {
Unbounded<Long> transform = Read.from(CountingSource.unbounded());
PCollection<Long> output = pipeline.apply(transform);
UnconsumedReads.ensureAllReadsConsumed(pipeline);
validateConsumed();
}
示例7: batchEventsSource
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
/**
* Return a transform which yields a finite number of synthesized events generated
* as a batch.
*/
public static PTransform<PBegin, PCollection<Event>> batchEventsSource(
NexmarkConfiguration configuration) {
return Read.from(new BoundedEventSource(standardGeneratorConfig(configuration),
configuration.numEventGenerators));
}
示例8: replaceSucceeds
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
@Test
public void replaceSucceeds() {
PTransform<?, ?> enclosingPT =
new PTransform<PInput, POutput>() {
@Override
public POutput expand(PInput input) {
return PDone.in(input.getPipeline());
}
};
TransformHierarchy.Node enclosing =
hierarchy.pushNode("Enclosing", PBegin.in(pipeline), enclosingPT);
Create.Values<Long> originalTransform = Create.of(1L);
TransformHierarchy.Node original =
hierarchy.pushNode("Create", PBegin.in(pipeline), originalTransform);
assertThat(hierarchy.getCurrent(), equalTo(original));
PCollection<Long> originalOutput = pipeline.apply(originalTransform);
hierarchy.setOutput(originalOutput);
hierarchy.popNode();
assertThat(original.finishedSpecifying, is(true));
hierarchy.setOutput(PDone.in(pipeline));
hierarchy.popNode();
assertThat(hierarchy.getCurrent(), not(equalTo(enclosing)));
Read.Bounded<Long> replacementTransform = Read.from(CountingSource.upTo(1L));
PCollection<Long> replacementOutput = pipeline.apply(replacementTransform);
Node replacement = hierarchy.replaceNode(original, PBegin.in(pipeline), replacementTransform);
assertThat(hierarchy.getCurrent(), equalTo(replacement));
hierarchy.setOutput(replacementOutput);
TaggedPValue taggedReplacement = TaggedPValue.ofExpandedValue(replacementOutput);
Map<PValue, ReplacementOutput> replacementOutputs =
Collections.<PValue, ReplacementOutput>singletonMap(
replacementOutput,
ReplacementOutput.of(
TaggedPValue.ofExpandedValue(originalOutput),
taggedReplacement));
hierarchy.replaceOutputs(replacementOutputs);
assertThat(replacement.getInputs(), equalTo(original.getInputs()));
assertThat(replacement.getEnclosingNode(), equalTo(original.getEnclosingNode()));
assertThat(replacement.getEnclosingNode(), equalTo(enclosing));
assertThat(
replacement.getTransform(), Matchers.<PTransform<?, ?>>equalTo(replacementTransform));
// THe tags of the replacement transform are matched to the appropriate PValues of the original
assertThat(
replacement.getOutputs().keySet(),
Matchers.<TupleTag<?>>contains(taggedReplacement.getTag()));
assertThat(replacement.getOutputs().values(), Matchers.<PValue>contains(originalOutput));
hierarchy.popNode();
}
示例9: read
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
private static AppliedPTransform<?, ?, ?> read(Pipeline pipeline) {
Read.Unbounded<Long> transform = Read.from(CountingSource.unbounded());
PCollection<Long> pcollection = pipeline.apply(transform);
return AppliedPTransform.<PBegin, PCollection<Long>, Read.Unbounded<Long>>of(
"ReadTheCount", pipeline.begin().expand(), pcollection.expand(), transform, pipeline);
}
示例10: readFrom
import org.apache.beam.sdk.io.Read; //导入方法依赖的package包/类
/**
* Creates a {@code Read} transform that will read from an {@code HDFSFileSource} with the given file name or
* pattern ("glob") using the given Hadoop {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}, with
* key-value types specified by the given key class and value class.
*/
public static <K, V, T extends FileInputFormat<K, V>> Read.Bounded<KV<K, V>> readFrom(String filepattern,
Class<T> formatClass, Class<K> keyClass, Class<V> valueClass) {
return Read.from(from(filepattern, formatClass, keyClass, valueClass));
}