当前位置: 首页>>代码示例>>Python>>正文


Python bash_operator.BashOperator方法代码示例

本文整理汇总了Python中airflow.operators.bash_operator.BashOperator方法的典型用法代码示例。如果您正苦于以下问题:Python bash_operator.BashOperator方法的具体用法?Python bash_operator.BashOperator怎么用?Python bash_operator.BashOperator使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在airflow.operators.bash_operator的用法示例。


在下文中一共展示了bash_operator.BashOperator方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: send_slack_alert

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def send_slack_alert(context=None):
    """Send slack alert on failure to alert the team"""
    payload_vars = {
        'url': 'your_slack_hook_url_here',
        'run_id': str(context['run_id']),
        'task': str(context['task']),
        'dag_name': str(context['dag'].dag_id)
    }

    error_message = "{dag_name} Failure! Task failed: {task} Check log at: {run_id}".format(**payload_vars)
    payload_vars['json'] = """payload={{"channel":"ChuckNorris","text":"{0}"}}""".format(error_message)

    slack_cmd = """curl -x proxy:port \
    -X POST \
    --data-urlencode '{json}' \
    {url}""".format(**payload_vars)

    slack_alert = BashOperator(
        task_id='slack_alert',
        dag=dag,
        bash_command=slack_cmd,
    )
    slack_alert.execute(context) 
开发者ID:danielvdende,项目名称:data-testing-with-airflow,代码行数:25,代码来源:airflowfile.py

示例2: get_upgrade_airflow

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def get_upgrade_airflow(self, task_id=dn.UPGRADE_AIRFLOW):
        """Generate the upgrade_airflow step

        Step responsible for upgrading airflow worker. Step will
        execute the upgrade script in the background and direct
        output to null so that 'nohup.out' will not be created.
        Note that this is done intentionally so that the upgrade
        of airflow worker will only start after the completion of
        the 'update_site' workflow. This will ensure availability
        of airflow worker during update/upgrade and prevent any
        disruption to the workflow. Note that dag_id and execution
        date are required for proper execution of the script.
        """
        return BashOperator(task_id=task_id,
                            bash_command=(
                                "nohup "
                                "/usr/local/airflow/upgrade_airflow_worker.sh "
                                "{{ ti.dag_id }} {{ ti.execution_date }} "
                                ">/dev/null 2>&1 &"),
                            dag=self.dag) 
开发者ID:airshipit,项目名称:shipyard,代码行数:22,代码来源:common_step_factory.py

示例3: get_runner_operator

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def get_runner_operator(dag):
    return BashOperator(task_id="sync_commoncrawl_workflow",
                        bash_command=f"python {airflowHome}/dags/"
                        "commoncrawl_s3_syncer/SyncImageProviders.py",
                        dag=dag) 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:7,代码来源:sync_commoncrawl_workflow.py

示例4: get_runner_operator

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def get_runner_operator(dag, source, script_location):
    return BashOperator(
        task_id=f'get_{source}_images',
        bash_command=f'python {script_location} --mode default',
        dag=dag
    ) 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:8,代码来源:operator_util.py

示例5: get_log_operator

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def get_log_operator(dag, source, status):
    return BashOperator(
        task_id=f'{source}_{status}',
        bash_command=f'echo {status} {source} workflow at $(date)',
        dag=dag
    ) 
开发者ID:creativecommons,项目名称:cccatalog,代码行数:8,代码来源:operator_util.py

示例6: get_dag

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def get_dag():
    dag = DAG(dag_id='dag', default_args=default_args, schedule_interval=None,)

    templated_command = '''
    echo '{{ ds }}'
    '''

    # pylint: disable=unused-variable
    t1 = BashOperator(
        task_id='templated', depends_on_past=False, bash_command=templated_command, dag=dag,
    )

    return dag 
开发者ID:dagster-io,项目名称:dagster,代码行数:15,代码来源:test_tags.py

示例7: copy_artifacts_dev

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def copy_artifacts_dev(dag, project_id, artifact_bucket, storage_bucket):
    """Bootstrap a dataproc job for local testing.

    This job requires setting GOOGLE_APPLICATION_CREDENTIALS before starting the
    airflow container. It will copy the contents of the local jobs and
    dataproc_boostrap folders to the artifacts bucket, and create a scratch
    storage bucket for dataproc.

    :dag DAG: The dag to register the job
    :project_id str: The project id, necessary for setting the default project
    :artifact_bucket str: The bucket for storing bootstrap artifacts
    :storage_bucket str: The scratch bucket for dataproc
    """
    return BashOperator(
        task_id="copy_to_dev_artifacts",
        bash_command="""
        gcloud auth activate-service-account --key-file ~/.credentials || cat ~/.credentials
        gcloud config set project ${PROJECT_ID}

        gsutil mb gs://${ARTIFACT_BUCKET}
        gsutil mb gs://${STORAGE_BUCKET}

        gsutil -m cp -r ~/dataproc_bootstrap gs://${ARTIFACT_BUCKET}
        gsutil -m cp -r ~/jobs gs://${ARTIFACT_BUCKET}

        echo "listing artifacts..."
        gsutil ls -r gs://${ARTIFACT_BUCKET}
        """,
        env={
            # https://github.com/GoogleCloudPlatform/gsutil/issues/236
            "CLOUDSDK_PYTHON": "python",
            "PROJECT_ID": project_id,
            "ARTIFACT_BUCKET": artifact_bucket,
            "STORAGE_BUCKET": storage_bucket,
        },
        dag=dag,
    )


# parameters that can be used to reconfigure a dataproc job for dev testing 
开发者ID:mozilla,项目名称:telemetry-airflow,代码行数:42,代码来源:dataproc.py

示例8: get_skip_upgrade_airflow

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def get_skip_upgrade_airflow(self, task_id=dn.SKIP_UPGRADE_AIRFLOW):
        """Generate the skip_upgrade_airflow step

        Step will print a message stating that we do not need to
        upgrade the airflow worker
        """
        return BashOperator(task_id=task_id,
                            bash_command=(
                                "echo 'Airflow Worker Upgrade Not Required'"),
                            dag=self.dag) 
开发者ID:airshipit,项目名称:shipyard,代码行数:12,代码来源:common_step_factory.py

示例9: test_get_dag_params

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def test_get_dag_params():
    td = dagbuilder.DagBuilder("test_dag", DAG_CONFIG, DEFAULT_CONFIG)
    expected = {
        "dag_id": "test_dag",
        "default_args": {
            "owner": "custom_owner",
            "start_date": datetime.datetime(2018, 3, 1, 0, 0, tzinfo=UTC),
            "end_date": datetime.datetime(2018, 3, 5, 0, 0, tzinfo=UTC),
            "retries": 1,
            "retry_delay": datetime.timedelta(seconds=300),
        },
        "description": "this is an example dag",
        "schedule_interval": "0 3 * * *",
        "concurrency": 1,
        "max_active_runs": 1,
        "dagrun_timeout": datetime.timedelta(seconds=600),
        "tags": ["tag1", "tag2"],
        "tasks": {
            "task_1": {
                "operator": "airflow.operators.bash_operator.BashOperator",
                "bash_command": "echo 1",
                "execution_timeout_secs": 5
            },
            "task_2": {
                "operator": "airflow.operators.bash_operator.BashOperator",
                "bash_command": "echo 2",
                "dependencies": ["task_1"],
            },
            "task_3": {
                "operator": "airflow.operators.bash_operator.BashOperator",
                "bash_command": "echo 3",
                "dependencies": ["task_1"],
            },
        },
    }
    actual = td.get_dag_params()
    assert actual == expected 
开发者ID:ajbosco,项目名称:dag-factory,代码行数:39,代码来源:test_dagbuilder.py

示例10: test_make_task_valid

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def test_make_task_valid():
    td = dagbuilder.DagBuilder("test_dag", DAG_CONFIG, DEFAULT_CONFIG)
    operator = "airflow.operators.bash_operator.BashOperator"
    task_params = {"task_id": "test_task", "bash_command": "echo 1","execution_timeout_secs":5}
    actual = td.make_task(operator, task_params)
    assert actual.task_id == "test_task"
    assert actual.bash_command == "echo 1"
    assert isinstance(actual, BashOperator) 
开发者ID:ajbosco,项目名称:dag-factory,代码行数:10,代码来源:test_dagbuilder.py

示例11: test_make_task_missing_required_param

# 需要导入模块: from airflow.operators import bash_operator [as 别名]
# 或者: from airflow.operators.bash_operator import BashOperator [as 别名]
def test_make_task_missing_required_param():
    td = dagbuilder.DagBuilder("test_dag", DAG_CONFIG, DEFAULT_CONFIG)
    operator = "airflow.operators.bash_operator.BashOperator"
    task_params = {"task_id": "test_task"}
    with pytest.raises(Exception):
        td.make_task(operator, task_params) 
开发者ID:ajbosco,项目名称:dag-factory,代码行数:8,代码来源:test_dagbuilder.py


注:本文中的airflow.operators.bash_operator.BashOperator方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。