当前位置: 首页>>代码示例>>Python>>正文


Python Pipeline.run方法代码示例

本文整理汇总了Python中apache_beam.pipeline.Pipeline.run方法的典型用法代码示例。如果您正苦于以下问题:Python Pipeline.run方法的具体用法?Python Pipeline.run怎么用?Python Pipeline.run使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在apache_beam.pipeline.Pipeline的用法示例。


在下文中一共展示了Pipeline.run方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_full_completion

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
  def test_full_completion(self):
    # Create dummy file and close it.  Note that we need to do this because
    # Windows does not allow NamedTemporaryFiles to be reopened elsewhere
    # before the temporary file is closed.
    dummy_file = tempfile.NamedTemporaryFile(delete=False)
    dummy_file_name = dummy_file.name
    dummy_file.close()

    dummy_dir = tempfile.mkdtemp()

    remote_runner = DataflowRunner()
    pipeline = Pipeline(remote_runner,
                        options=PipelineOptions([
                            '--dataflow_endpoint=ignored',
                            '--sdk_location=' + dummy_file_name,
                            '--job_name=test-job',
                            '--project=test-project',
                            '--staging_location=' + dummy_dir,
                            '--temp_location=/dev/null',
                            '--template_location=' + dummy_file_name,
                            '--no_auth=True']))

    pipeline | beam.Create([1, 2, 3]) | beam.Map(lambda x: x) # pylint: disable=expression-not-assigned
    pipeline.run().wait_until_finish()
    with open(dummy_file_name) as template_file:
      saved_job_dict = json.load(template_file)
      self.assertEqual(
          saved_job_dict['environment']['sdkPipelineOptions']
          ['options']['project'], 'test-project')
      self.assertEqual(
          saved_job_dict['environment']['sdkPipelineOptions']
          ['options']['job_name'], 'test-job')
开发者ID:amarouni,项目名称:incubator-beam,代码行数:34,代码来源:template_runner_test.py

示例2: test_biqquery_read_streaming_fail

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
 def test_biqquery_read_streaming_fail(self):
   remote_runner = DataflowRunner()
   self.default_properties.append("--streaming")
   p = Pipeline(remote_runner, PipelineOptions(self.default_properties))
   _ = p | beam.io.Read(beam.io.BigQuerySource('some.table'))
   with self.assertRaisesRegexp(ValueError,
                                r'source is not currently available'):
     p.run()
开发者ID:charlesccychen,项目名称:incubator-beam,代码行数:10,代码来源:dataflow_runner_test.py

示例3: test_remote_runner_translation

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
  def test_remote_runner_translation(self):
    remote_runner = DataflowRunner()
    p = Pipeline(remote_runner,
                 options=PipelineOptions(self.default_properties))

    (p | ptransform.Create([1, 2, 3])  # pylint: disable=expression-not-assigned
     | 'Do' >> ptransform.FlatMap(lambda x: [(x, x)])
     | ptransform.GroupByKey())
    p.run()
开发者ID:aaltay,项目名称:incubator-beam,代码行数:11,代码来源:dataflow_runner_test.py

示例4: test_remote_runner_display_data

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
  def test_remote_runner_display_data(self):
    remote_runner = DataflowRunner()
    p = Pipeline(remote_runner,
                 options=PipelineOptions(self.default_properties))

    # TODO: Should not subclass ParDo. Switch to PTransform as soon as
    # composite transforms support display data.
    class SpecialParDo(beam.ParDo):
      def __init__(self, fn, now):
        super(SpecialParDo, self).__init__(fn)
        self.fn = fn
        self.now = now

      # Make this a list to be accessible within closure
      def display_data(self):
        return {'asubcomponent': self.fn,
                'a_class': SpecialParDo,
                'a_time': self.now}

    class SpecialDoFn(beam.DoFn):
      def display_data(self):
        return {'dofn_value': 42}

      def process(self):
        pass

    now = datetime.now()
    # pylint: disable=expression-not-assigned
    (p | ptransform.Create([1, 2, 3, 4, 5])
     | 'Do' >> SpecialParDo(SpecialDoFn(), now))

    p.run()
    job_dict = json.loads(str(remote_runner.job))
    steps = [step
             for step in job_dict['steps']
             if len(step['properties'].get('display_data', [])) > 0]
    step = steps[1]
    disp_data = step['properties']['display_data']
    disp_data = sorted(disp_data, key=lambda x: x['namespace']+x['key'])
    nspace = SpecialParDo.__module__+ '.'
    expected_data = [{'type': 'TIMESTAMP', 'namespace': nspace+'SpecialParDo',
                      'value': DisplayDataItem._format_value(now, 'TIMESTAMP'),
                      'key': 'a_time'},
                     {'type': 'STRING', 'namespace': nspace+'SpecialParDo',
                      'value': nspace+'SpecialParDo', 'key': 'a_class',
                      'shortValue': 'SpecialParDo'},
                     {'type': 'INTEGER', 'namespace': nspace+'SpecialDoFn',
                      'value': 42, 'key': 'dofn_value'}]
    expected_data = sorted(expected_data, key=lambda x: x['namespace']+x['key'])
    self.assertEqual(len(disp_data), 3)
    self.assertEqual(disp_data, expected_data)
开发者ID:aaltay,项目名称:incubator-beam,代码行数:53,代码来源:dataflow_runner_test.py

示例5: test_streaming_create_translation

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
  def test_streaming_create_translation(self):
    remote_runner = DataflowRunner()
    self.default_properties.append("--streaming")
    p = Pipeline(remote_runner, PipelineOptions(self.default_properties))
    p | ptransform.Create([1])  # pylint: disable=expression-not-assigned
    p.run()
    job_dict = json.loads(str(remote_runner.job))
    self.assertEqual(len(job_dict[u'steps']), 2)

    self.assertEqual(job_dict[u'steps'][0][u'kind'], u'ParallelRead')
    self.assertEqual(
        job_dict[u'steps'][0][u'properties'][u'pubsub_subscription'],
        '_starting_signal/')
    self.assertEqual(job_dict[u'steps'][1][u'kind'], u'ParallelDo')
开发者ID:aaltay,项目名称:incubator-beam,代码行数:16,代码来源:dataflow_runner_test.py

示例6: test_direct_runner_metrics

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
  def test_direct_runner_metrics(self):

    class MyDoFn(beam.DoFn):
      def start_bundle(self):
        count = Metrics.counter(self.__class__, 'bundles')
        count.inc()

      def finish_bundle(self):
        count = Metrics.counter(self.__class__, 'finished_bundles')
        count.inc()

      def process(self, element):
        gauge = Metrics.gauge(self.__class__, 'latest_element')
        gauge.set(element)
        count = Metrics.counter(self.__class__, 'elements')
        count.inc()
        distro = Metrics.distribution(self.__class__, 'element_dist')
        distro.update(element)
        return [element]

    runner = DirectRunner()
    p = Pipeline(runner,
                 options=PipelineOptions(self.default_properties))
    pcoll = (p | ptransform.Create([1, 2, 3, 4, 5])
             | 'Do' >> beam.ParDo(MyDoFn()))
    assert_that(pcoll, equal_to([1, 2, 3, 4, 5]))
    result = p.run()
    result.wait_until_finish()
    metrics = result.metrics().query()
    namespace = '{}.{}'.format(MyDoFn.__module__,
                               MyDoFn.__name__)

    hc.assert_that(
        metrics['counters'],
        hc.contains_inanyorder(
            MetricResult(
                MetricKey('Do', MetricName(namespace, 'elements')),
                5, 5),
            MetricResult(
                MetricKey('Do', MetricName(namespace, 'bundles')),
                1, 1),
            MetricResult(
                MetricKey('Do', MetricName(namespace, 'finished_bundles')),
                1, 1)))

    hc.assert_that(
        metrics['distributions'],
        hc.contains_inanyorder(
            MetricResult(
                MetricKey('Do', MetricName(namespace, 'element_dist')),
                DistributionResult(DistributionData(15, 5, 1, 5)),
                DistributionResult(DistributionData(15, 5, 1, 5)))))

    gauge_result = metrics['gauges'][0]
    hc.assert_that(
        gauge_result.key,
        hc.equal_to(MetricKey('Do', MetricName(namespace, 'latest_element'))))
    hc.assert_that(gauge_result.committed.value, hc.equal_to(5))
    hc.assert_that(gauge_result.attempted.value, hc.equal_to(5))
开发者ID:JavierRoger,项目名称:beam,代码行数:61,代码来源:runner_test.py

示例7: test_bad_path

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
  def test_bad_path(self):
    dummy_sdk_file = tempfile.NamedTemporaryFile()
    remote_runner = DataflowRunner()
    pipeline = Pipeline(remote_runner,
                        options=PipelineOptions([
                            '--dataflow_endpoint=ignored',
                            '--sdk_location=' + dummy_sdk_file.name,
                            '--job_name=test-job',
                            '--project=test-project',
                            '--staging_location=ignored',
                            '--temp_location=/dev/null',
                            '--template_location=/bad/path',
                            '--no_auth=True']))
    remote_runner.job = apiclient.Job(pipeline._options)

    with self.assertRaises(IOError):
      pipeline.run().wait_until_finish()
开发者ID:amarouni,项目名称:incubator-beam,代码行数:19,代码来源:template_runner_test.py

示例8: run

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
 def run(self, transform, options=None):
   """Run the given transform with this runner.
   """
   # Imported here to avoid circular dependencies.
   # pylint: disable=wrong-import-order, wrong-import-position
   from apache_beam.pipeline import Pipeline
   p = Pipeline(runner=self, options=options)
   p | transform
   return p.run()
开发者ID:aaltay,项目名称:incubator-beam,代码行数:11,代码来源:runner.py

示例9: run

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
 def run(self, transform, options=None):
   """Run the given transform or callable with this runner.
   """
   # Imported here to avoid circular dependencies.
   # pylint: disable=wrong-import-order, wrong-import-position
   from apache_beam import PTransform
   from apache_beam.pvalue import PBegin
   from apache_beam.pipeline import Pipeline
   p = Pipeline(runner=self, options=options)
   if isinstance(transform, PTransform):
     p | transform
   else:
     transform(PBegin(p))
   return p.run()
开发者ID:aljoscha,项目名称:incubator-beam,代码行数:16,代码来源:runner.py

示例10: run_async

# 需要导入模块: from apache_beam.pipeline import Pipeline [as 别名]
# 或者: from apache_beam.pipeline.Pipeline import run [as 别名]
  def run_async(self, transform, options=None):
    """Run the given transform or callable with this runner.

    May return immediately, executing the pipeline in the background.
    The returned result object can be queried for progress, and
    `wait_until_finish` may be called to block until completion.
    """
    # Imported here to avoid circular dependencies.
    # pylint: disable=wrong-import-order, wrong-import-position
    from apache_beam import PTransform
    from apache_beam.pvalue import PBegin
    from apache_beam.pipeline import Pipeline
    p = Pipeline(runner=self, options=options)
    if isinstance(transform, PTransform):
      p | transform
    else:
      transform(PBegin(p))
    return p.run()
开发者ID:apsaltis,项目名称:incubator-beam,代码行数:20,代码来源:runner.py


注:本文中的apache_beam.pipeline.Pipeline.run方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。