本文整理汇总了Python中kfp.dsl.pipeline方法的典型用法代码示例。如果您正苦于以下问题:Python dsl.pipeline方法的具体用法?Python dsl.pipeline怎么用?Python dsl.pipeline使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类kfp.dsl
的用法示例。
在下文中一共展示了dsl.pipeline方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: container
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def container(self):
"""`Container` object that represents the `container` property in
`io.argoproj.workflow.v1alpha1.Template`. Can be used to update the
container configurations.
Example:
import kfp.dsl as dsl
from kubernetes.client.models import V1EnvVar
@dsl.pipeline(name='example_pipeline')
def immediate_value_pipeline():
op1 = (dsl.ContainerOp(name='example', image='nginx:alpine')
.container
.add_env_variable(V1EnvVar(name='HOST', value='foo.bar'))
.add_env_variable(V1EnvVar(name='PORT', value='80'))
.parent # return the parent `ContainerOp`
)
"""
return self._container
示例2: parse_arguments
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def parse_arguments():
"""Parse command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('--py',
type=str,
help='local absolute path to a py file.')
parser.add_argument('--function',
type=str,
help='The name of the function to compile if there are multiple.')
parser.add_argument('--namespace',
type=str,
help='The namespace for the pipeline function')
parser.add_argument('--output',
type=str,
required=True,
help='local path to the output workflow yaml file.')
parser.add_argument('--disable-type-check',
action='store_true',
help='disable the type check, default is enabled.')
args = parser.parse_args()
return args
示例3: _compile_pipeline_function
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def _compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check):
if len(pipeline_funcs) == 0:
raise ValueError('A function with @dsl.pipeline decorator is required in the py file.')
if len(pipeline_funcs) > 1 and not function_name:
func_names = [x.__name__ for x in pipeline_funcs]
raise ValueError('There are multiple pipelines: %s. Please specify --function.' % func_names)
if function_name:
pipeline_func = next((x for x in pipeline_funcs if x.__name__ == function_name), None)
if not pipeline_func:
raise ValueError('The function "%s" does not exist. '
'Did you forget @dsl.pipeline decoration?' % function_name)
else:
pipeline_func = pipeline_funcs[0]
kfp.compiler.Compiler().compile(pipeline_func, output_path, type_check)
示例4: test_decorator
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def test_decorator(self):
"""Test @pipeline decorator."""
@pipeline(
name='p1',
description='description1'
)
def my_pipeline1():
pass
@pipeline(
name='p2',
description='description2'
)
def my_pipeline2():
pass
self.assertEqual(my_pipeline1._component_human_name, 'p1')
self.assertEqual(my_pipeline2._component_human_name, 'p2')
self.assertEqual(my_pipeline1._component_description, 'description1')
self.assertEqual(my_pipeline2._component_description, 'description2')
示例5: pipeline
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def pipeline(loopidy_doop: list = [3, 5, 7, 9]):
op0 = dsl.ContainerOp(
name="my-out-cop0",
image='python:alpine3.6',
command=["sh", "-c"],
arguments=[
'python -c "import json; import sys; json.dump([i for i in range(20, 31)], open(\'/tmp/out.json\', \'w\'))"'],
file_outputs={'out': '/tmp/out.json'},
)
with dsl.ParallelFor(loopidy_doop) as item:
op1 = dsl.ContainerOp(
name="my-in-cop1",
image="library/bash:4.4.23",
command=["sh", "-c"],
arguments=["echo no output global op1, item: %s" % item],
).after(op0)
op_out = dsl.ContainerOp(
name="my-out-cop2",
image="library/bash:4.4.23",
command=["sh", "-c"],
arguments=["echo no output global op2, outp: %s" % op0.output],
)
示例6: pipeline
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def pipeline(my_pipe_param: int = 10):
loop_args = [{'a': 1, 'b': 2}, {'a': 10, 'b': 20}]
with dsl.ParallelFor(loop_args) as item:
op1 = dsl.ContainerOp(
name="my-in-coop1",
image="library/bash:4.4.23",
command=["sh", "-c"],
arguments=["echo op1 %s %s" % (item.a, my_pipe_param)],
)
op2 = dsl.ContainerOp(
name="my-in-coop2",
image="library/bash:4.4.23",
command=["sh", "-c"],
arguments=["echo op2 %s" % item.b],
)
op_out = dsl.ContainerOp(
name="my-out-cop",
image="library/bash:4.4.23",
command=["sh", "-c"],
arguments=["echo %s" % my_pipe_param],
)
示例7: pipeline
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def pipeline():
op0 = dsl.ContainerOp(
name="my-out-cop0",
image='python:alpine3.6',
command=["sh", "-c"],
arguments=['python -c "import json; import sys; json.dump([{\'a\': 1, \'b\': 2}, {\'a\': 10, \'b\': 20}], open(\'/tmp/out.json\', \'w\'))"'],
file_outputs={'out': '/tmp/out.json'},
)
with dsl.ParallelFor(op0.output) as item:
op1 = dsl.ContainerOp(
name="my-in-cop1",
image="library/bash:4.4.23",
command=["sh", "-c"],
arguments=["echo do output op1 item.a: %s" % item.a],
)
op_out = dsl.ContainerOp(
name="my-out-cop2",
image="library/bash:4.4.23",
command=["sh", "-c"],
arguments=["echo do output op2, outp: %s" % op0.output],
)
示例8: save_most_frequent_word
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def save_most_frequent_word(message: str, outputpath: str):
"""A pipeline function describing the orchestration of the workflow."""
exit_op = ExitHandlerOp('exiting')
with dsl.ExitHandler(exit_op):
counter = GetFrequentWordOp(
name='get-Frequent',
message=message)
counter.set_memory_request('200M')
saver = SaveMessageOp(
name='save',
message=counter.output,
output_path=outputpath)
saver.set_cpu_limit('0.5')
saver.set_gpu_limit('2')
saver.add_node_selector_constraint('cloud.google.com/gke-accelerator', 'nvidia-tesla-k80')
saver.apply(gcp.use_tpu(tpu_cores = 8, tpu_resource = 'v2', tf_version = '1.12'))
示例9: test_basic_workflow_without_decorator
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def test_basic_workflow_without_decorator(self):
"""Test compiling a workflow and appending pipeline params."""
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
sys.path.append(test_data_dir)
import basic_no_decorator
tmpdir = tempfile.mkdtemp()
try:
compiled_workflow = compiler.Compiler().create_workflow(
basic_no_decorator.save_most_frequent_word,
'Save Most Frequent',
'Get Most Frequent Word and Save to GCS',
[
basic_no_decorator.message_param,
basic_no_decorator.output_path_param
])
with open(os.path.join(test_data_dir, 'basic_no_decorator.yaml'), 'r') as f:
golden = yaml.safe_load(f)
for workflow in golden, compiled_workflow:
del workflow['metadata']
self.assertEqual(golden, compiled_workflow)
finally:
shutil.rmtree(tmpdir)
示例10: _test_py_compile_yaml
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def _test_py_compile_yaml(self, file_base_name):
test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')
py_file = os.path.join(test_data_dir, file_base_name + '.py')
tmpdir = tempfile.mkdtemp()
try:
target_yaml = os.path.join(tmpdir, file_base_name + '-pipeline.yaml')
subprocess.check_call([
'dsl-compile', '--py', py_file, '--output', target_yaml])
with open(os.path.join(test_data_dir, file_base_name + '.yaml'), 'r') as f:
golden = yaml.safe_load(f)
with open(os.path.join(test_data_dir, target_yaml), 'r') as f:
compiled = yaml.safe_load(f)
for workflow in golden, compiled:
del workflow['metadata']
for template in workflow['spec']['templates']:
template.pop('metadata', None)
self.maxDiff = None
self.assertEqual(golden, compiled)
finally:
shutil.rmtree(tmpdir)
示例11: test_set_parallelism
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def test_set_parallelism(self):
"""Test a pipeline with parallelism limits."""
def some_op():
return dsl.ContainerOp(
name='sleep',
image='busybox',
command=['sleep 1'],
)
@dsl.pipeline()
def some_pipeline():
some_op()
some_op()
some_op()
dsl.get_pipeline_conf().set_parallelism(1)
workflow_dict = kfp.compiler.Compiler()._compile(some_pipeline)
self.assertEqual(workflow_dict['spec']['parallelism'], 1)
示例12: test_set_ttl_seconds_after_finished
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def test_set_ttl_seconds_after_finished(self):
"""Test a pipeline with ttl after finished."""
def some_op():
return dsl.ContainerOp(
name='sleep',
image='busybox',
command=['sleep 1'],
)
@dsl.pipeline()
def some_pipeline():
some_op()
dsl.get_pipeline_conf().set_ttl_seconds_after_finished(86400)
workflow_dict = kfp.compiler.Compiler()._compile(some_pipeline)
self.assertEqual(workflow_dict['spec']['ttlSecondsAfterFinished'], 86400)
示例13: test_op_transformers
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def test_op_transformers(self):
def some_op():
return dsl.ContainerOp(
name='sleep',
image='busybox',
command=['sleep 1'],
)
@dsl.pipeline(name='some_pipeline')
def some_pipeline():
task1 = some_op()
task2 = some_op()
task3 = some_op()
dsl.get_pipeline_conf().op_transformers.append(lambda op: op.set_retry(5))
workflow_dict = compiler.Compiler()._compile(some_pipeline)
for template in workflow_dict['spec']['templates']:
container = template.get('container', None)
if container:
self.assertEqual(template['retryStrategy']['limit'], 5)
示例14: test_image_pull_policy
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def test_image_pull_policy(self):
def some_op():
return dsl.ContainerOp(
name='sleep',
image='busybox',
command=['sleep 1'],
)
@dsl.pipeline(name='some_pipeline')
def some_pipeline():
task1 = some_op()
task2 = some_op()
task3 = some_op()
dsl.get_pipeline_conf().set_image_pull_policy(policy="Always")
workflow_dict = compiler.Compiler()._compile(some_pipeline)
for template in workflow_dict['spec']['templates']:
container = template.get('container', None)
if container:
self.assertEqual(template['container']['imagePullPolicy'], "Always")
示例15: test_set_default_pod_node_selector
# 需要导入模块: from kfp import dsl [as 别名]
# 或者: from kfp.dsl import pipeline [as 别名]
def test_set_default_pod_node_selector(self):
"""Test a pipeline with node selector."""
def some_op():
return dsl.ContainerOp(
name='sleep',
image='busybox',
command=['sleep 1'],
)
@dsl.pipeline()
def some_pipeline():
some_op()
dsl.get_pipeline_conf().set_default_pod_node_selector(label_name="cloud.google.com/gke-accelerator", value="nvidia-tesla-p4")
workflow_dict = kfp.compiler.Compiler()._compile(some_pipeline)
self.assertEqual(workflow_dict['spec']['nodeSelector'], {"cloud.google.com/gke-accelerator":"nvidia-tesla-p4"})