当前位置: 首页>>代码示例>>Python>>正文


Python compiler.Compiler方法代码示例

本文整理汇总了Python中kfp.compiler.Compiler方法的典型用法代码示例。如果您正苦于以下问题:Python compiler.Compiler方法的具体用法?Python compiler.Compiler怎么用?Python compiler.Compiler使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在kfp.compiler的用法示例。


在下文中一共展示了compiler.Compiler方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: test_basic_workflow_without_decorator

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_basic_workflow_without_decorator(self):
    """Test compiling a workflow and appending pipeline params."""
    test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
    sys.path.append(test_data_dir)
    import basic_no_decorator
    tmpdir = tempfile.mkdtemp()
    try:
      compiled_workflow = compiler.Compiler().create_workflow(
          basic_no_decorator.save_most_frequent_word,
          'Save Most Frequent',
          'Get Most Frequent Word and Save to GCS',
          [
            basic_no_decorator.message_param,
            basic_no_decorator.output_path_param
          ])
      with open(os.path.join(test_data_dir, 'basic_no_decorator.yaml'), 'r') as f:
        golden = yaml.safe_load(f)

      for workflow in golden, compiled_workflow:
        del workflow['metadata']

      self.assertEqual(golden, compiled_workflow)
    finally:
      shutil.rmtree(tmpdir) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:26,代码来源:compiler_tests.py

示例2: test_set_display_name

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_set_display_name(self):
    """Test a pipeline with a customized task names."""

    import kfp
    op1 = kfp.components.load_component_from_text(
      '''
name: Component name
implementation:
  container:
    image: busybox
'''
    )

    @dsl.pipeline()
    def some_pipeline():
      op1().set_display_name('Custom name')

    workflow_dict = kfp.compiler.Compiler()._compile(some_pipeline)
    template = workflow_dict['spec']['templates'][0]
    self.assertEqual(template['metadata']['annotations']['pipelines.kubeflow.org/task_display_name'], 'Custom name') 
开发者ID:kubeflow,项目名称:pipelines,代码行数:22,代码来源:compiler_tests.py

示例3: test_set_parallelism

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_set_parallelism(self):
    """Test a pipeline with parallelism limits."""
    def some_op():
        return dsl.ContainerOp(
            name='sleep',
            image='busybox',
            command=['sleep 1'],
        )

    @dsl.pipeline()
    def some_pipeline():
      some_op()
      some_op()
      some_op()
      dsl.get_pipeline_conf().set_parallelism(1)

    workflow_dict = kfp.compiler.Compiler()._compile(some_pipeline)
    self.assertEqual(workflow_dict['spec']['parallelism'], 1) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:20,代码来源:compiler_tests.py

示例4: test_op_transformers

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_op_transformers(self):
    def some_op():
      return dsl.ContainerOp(
          name='sleep',
          image='busybox',
          command=['sleep 1'],
      )

    @dsl.pipeline(name='some_pipeline')
    def some_pipeline():
      task1 = some_op()
      task2 = some_op()
      task3 = some_op()

      dsl.get_pipeline_conf().op_transformers.append(lambda op: op.set_retry(5))

    workflow_dict = compiler.Compiler()._compile(some_pipeline)
    for template in workflow_dict['spec']['templates']:
      container = template.get('container', None)
      if container:
        self.assertEqual(template['retryStrategy']['limit'], 5) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:23,代码来源:compiler_tests.py

示例5: test_image_pull_policy

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_image_pull_policy(self):
    def some_op():
      return dsl.ContainerOp(
          name='sleep',
          image='busybox',
          command=['sleep 1'],
      )

    @dsl.pipeline(name='some_pipeline')
    def some_pipeline():
      task1 = some_op()
      task2 = some_op()
      task3 = some_op()

      dsl.get_pipeline_conf().set_image_pull_policy(policy="Always")
    workflow_dict = compiler.Compiler()._compile(some_pipeline)
    for template in workflow_dict['spec']['templates']:
      container = template.get('container', None)
      if container:
        self.assertEqual(template['container']['imagePullPolicy'], "Always") 
开发者ID:kubeflow,项目名称:pipelines,代码行数:22,代码来源:compiler_tests.py

示例6: test_image_pull_policy_invalid_setting

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_image_pull_policy_invalid_setting(self):
    def some_op():
      return dsl.ContainerOp(
          name='sleep',
          image='busybox',
          command=['sleep 1'],
      )

    with self.assertRaises(ValueError):
      @dsl.pipeline(name='some_pipeline')
      def some_pipeline():
        task1 = some_op()
        task2 = some_op()
        dsl.get_pipeline_conf().set_image_pull_policy(policy="Alwayss")

      workflow_dict = compiler.Compiler()._compile(some_pipeline) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:18,代码来源:compiler_tests.py

示例7: test_set_default_pod_node_selector

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_set_default_pod_node_selector(self):
    """Test a pipeline with node selector."""
    def some_op():
        return dsl.ContainerOp(
            name='sleep',
            image='busybox',
            command=['sleep 1'],
        )

    @dsl.pipeline()
    def some_pipeline():
      some_op()
      dsl.get_pipeline_conf().set_default_pod_node_selector(label_name="cloud.google.com/gke-accelerator", value="nvidia-tesla-p4")

    workflow_dict = kfp.compiler.Compiler()._compile(some_pipeline)
    self.assertEqual(workflow_dict['spec']['nodeSelector'], {"cloud.google.com/gke-accelerator":"nvidia-tesla-p4"}) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:18,代码来源:compiler_tests.py

示例8: test_init_container

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_init_container(self):
    echo = dsl.UserContainer(
      name='echo',
      image='alpine:latest',
      command=['echo', 'bye'])

    @dsl.pipeline(name='InitContainer', description='A pipeline with init container.')
    def init_container_pipeline():
      dsl.ContainerOp(
        name='hello',
        image='alpine:latest',
        command=['echo', 'hello'],
        init_containers=[echo])

    workflow_dict = compiler.Compiler()._compile(init_container_pipeline)
    for template in workflow_dict['spec']['templates']:
      init_containers = template.get('initContainers', None)
      if init_containers:
        self.assertEqual(len(init_containers),1)
        init_container = init_containers[0]
        self.assertEqual(init_container, {'image':'alpine:latest', 'command': ['echo', 'bye'], 'name': 'echo'}) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:23,代码来源:compiler_tests.py

示例9: hosted_kfp_test

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def hosted_kfp_test(data, context):
  logging.info('Event ID: {}'.format(context.event_id))
  logging.info('Event type: {}'.format(context.event_type))
  logging.info('Data: {}'.format(data))
  logging.info('Bucket: {}'.format(data['bucket']))
  logging.info('File: {}'.format(data['name']))
  file_uri = 'gs://%s/%s' % (data['bucket'], data['name'])
  logging.info('Using file uri: %s', file_uri)
  
  logging.info('Metageneration: {}'.format(data['metageneration']))
  logging.info('Created: {}'.format(data['timeCreated']))
  logging.info('Updated: {}'.format(data['updated']))
  
  token = get_access_token() 
  logging.info('attempting to launch pipeline run.')
  ts = int(datetime.datetime.utcnow().timestamp() * 100000)
  client = kfp.Client(host=HOST, existing_token=token)
  compiler.Compiler().compile(sequential_pipeline, '/tmp/sequential.tar.gz')
  exp = client.create_experiment(name='gcstriggered')  # this is a 'get or create' op
  res = client.run_pipeline(exp.id, 'sequential_' + str(ts), '/tmp/sequential.tar.gz',
                              params={'filename': file_uri})
  logging.info(res) 
开发者ID:amygdala,项目名称:code-snippets,代码行数:24,代码来源:main.py

示例10: test_basic_workflow

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_basic_workflow(self):
    """Test compiling a basic workflow."""

    test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
    sys.path.append(test_data_dir)
    import basic
    tmpdir = tempfile.mkdtemp()
    package_path = os.path.join(tmpdir, 'workflow.zip')
    try:
      compiler.Compiler().compile(basic.save_most_frequent_word, package_path)
      with open(os.path.join(test_data_dir, 'basic.yaml'), 'r') as f:
        golden = yaml.safe_load(f)
      compiled = self._get_yaml_from_zip(package_path)

      for workflow in golden, compiled:
        del workflow['metadata']
        for template in workflow['spec']['templates']:
          template.pop('metadata', None)

      self.maxDiff = None
      # Comment next line for generating golden yaml.
      self.assertEqual(golden, compiled)
    finally:
      # Replace next line with commented line for gathering golden yaml.
      shutil.rmtree(tmpdir)
      # print(tmpdir) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:28,代码来源:compiler_tests.py

示例11: test_composing_workflow

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_composing_workflow(self):
    """Test compiling a simple workflow, and a bigger one composed from the simple one."""

    test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
    sys.path.append(test_data_dir)
    import compose
    tmpdir = tempfile.mkdtemp()
    try:
      # First make sure the simple pipeline can be compiled.
      simple_package_path = os.path.join(tmpdir, 'simple.zip')
      compiler.Compiler().compile(compose.save_most_frequent_word, simple_package_path)

      # Then make sure the composed pipeline can be compiled and also compare with golden.
      compose_package_path = os.path.join(tmpdir, 'compose.zip')
      compiler.Compiler().compile(compose.download_save_most_frequent_word, compose_package_path)
      with open(os.path.join(test_data_dir, 'compose.yaml'), 'r') as f:
        golden = yaml.safe_load(f)
      compiled = self._get_yaml_from_zip(compose_package_path)

      for workflow in golden, compiled:
        del workflow['metadata']
        for template in workflow['spec']['templates']:
          template.pop('metadata', None)

      self.maxDiff = None
      # Comment next line for generating golden yaml.
      self.assertEqual(golden, compiled)
    finally:
      # Replace next line with commented line for gathering golden yaml.
      shutil.rmtree(tmpdir)
      # print(tmpdir) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:33,代码来源:compiler_tests.py

示例12: test_py_retry

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_py_retry(self):
    """Test retry functionality."""
    number_of_retries = 137
    def my_pipeline():
      some_op().set_retry(number_of_retries)

    workflow = kfp.compiler.Compiler()._compile(my_pipeline)
    name_to_template = {template['name']: template for template in workflow['spec']['templates']}
    main_dag_tasks = name_to_template[workflow['spec']['entrypoint']]['dag']['tasks']
    template = name_to_template[main_dag_tasks[0]['template']]

    self.assertEqual(template['retryStrategy']['limit'], number_of_retries) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:14,代码来源:compiler_tests.py

示例13: test_affinity

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_affinity(self):
    """Test affinity functionality."""
    exp_affinity = {
      'affinity': {
        'nodeAffinity': {
          'requiredDuringSchedulingIgnoredDuringExecution': {
            'nodeSelectorTerms': [
              {'matchExpressions': [
                {
                  'key': 'beta.kubernetes.io/instance-type',
                  'operator': 'In',
                  'values': ['p2.xlarge']}
              ]
              }]
          }}
      }
    }
    def my_pipeline():
      affinity = V1Affinity(
        node_affinity=V1NodeAffinity(
          required_during_scheduling_ignored_during_execution=V1NodeSelector(
            node_selector_terms=[V1NodeSelectorTerm(
              match_expressions=[V1NodeSelectorRequirement(
                key='beta.kubernetes.io/instance-type', operator='In', values=['p2.xlarge'])])])))
      some_op().add_affinity(affinity)

    workflow = kfp.compiler.Compiler()._compile(my_pipeline)

    self.assertEqual(workflow['spec']['templates'][1]['affinity'], exp_affinity['affinity']) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:31,代码来源:compiler_tests.py

示例14: test_type_checking_with_inconsistent_types

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_type_checking_with_inconsistent_types(self):
    """Test type check pipeline parameters against component metadata."""
    @component
    def a_op(field_m: {'GCSPath': {'path_type': 'file', 'file_type':'tsv'}}, field_o: Integer()):
      return ContainerOp(
          name = 'operator a',
          image = 'gcr.io/ml-pipeline/component-b',
          arguments = [
              '--field-l', field_m,
              '--field-o', field_o,
          ],
      )

    @pipeline(
        name='p1',
        description='description1'
    )
    def my_pipeline(a: {'GCSPath': {'path_type':'file', 'file_type': 'csv'}}='good', b: Integer()=12):
      a_op(field_m=a, field_o=b)

    test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
    sys.path.append(test_data_dir)
    tmpdir = tempfile.mkdtemp()
    try:
      simple_package_path = os.path.join(tmpdir, 'simple.tar.gz')
      with self.assertRaises(InconsistentTypeException):
        compiler.Compiler().compile(my_pipeline, simple_package_path, type_check=True)
      compiler.Compiler().compile(my_pipeline, simple_package_path, type_check=False)

    finally:
      shutil.rmtree(tmpdir) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:33,代码来源:compiler_tests.py

示例15: test_type_checking_with_json_schema

# 需要导入模块: from kfp import compiler [as 别名]
# 或者: from kfp.compiler import Compiler [as 别名]
def test_type_checking_with_json_schema(self):
    """Test type check pipeline parameters against the json schema."""
    @component
    def a_op(field_m: {'GCRPath': {'openapi_schema_validator': {"type": "string", "pattern": "^.*gcr\\.io/.*$"}}}, field_o: 'Integer'):
      return ContainerOp(
          name = 'operator a',
          image = 'gcr.io/ml-pipeline/component-b',
          arguments = [
              '--field-l', field_m,
              '--field-o', field_o,
          ],
      )

    @pipeline(
        name='p1',
        description='description1'
    )
    def my_pipeline(a: {'GCRPath': {'openapi_schema_validator': {"type": "string", "pattern": "^.*gcr\\.io/.*$"}}}='good', b: 'Integer'=12):
      a_op(field_m=a, field_o=b)

    test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
    sys.path.append(test_data_dir)
    tmpdir = tempfile.mkdtemp()
    try:
      simple_package_path = os.path.join(tmpdir, 'simple.tar.gz')
      import jsonschema
      with self.assertRaises(jsonschema.exceptions.ValidationError):
        compiler.Compiler().compile(my_pipeline, simple_package_path, type_check=True)

    finally:
      shutil.rmtree(tmpdir) 
开发者ID:kubeflow,项目名称:pipelines,代码行数:33,代码来源:compiler_tests.py


注:本文中的kfp.compiler.Compiler方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。