當前位置: 首頁>>代碼示例>>Python>>正文


Python models.DagRun類代碼示例

本文整理匯總了Python中airflow.models.DagRun的典型用法代碼示例。如果您正苦於以下問題:Python DagRun類的具體用法?Python DagRun怎麽用?Python DagRun使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


在下文中一共展示了DagRun類的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: test_overwrite_params_with_dag_run_conf

    def test_overwrite_params_with_dag_run_conf(self):
        task = DummyOperator(task_id='op')
        ti = TI(task=task, execution_date=datetime.datetime.now())
        dag_run = DagRun()
        dag_run.conf = {"override": True}
        params = {"override": False}

        ti.overwrite_params_with_dag_run_conf(params, dag_run)

        self.assertEqual(True, params["override"])
開發者ID:alrolorojas,項目名稱:airflow,代碼行數:10,代碼來源:test_taskinstance.py

示例2: set_dag_run_state

def set_dag_run_state(dag, execution_date, state=State.SUCCESS, commit=False):
    """
    Set the state of a dag run and all task instances associated with the dag
    run for a specific execution date.
    :param dag: the DAG of which to alter state
    :param execution_date: the execution date from which to start looking
    :param state: the state to which the DAG need to be set
    :param commit: commit DAG and tasks to be altered to the database
    :return: list of tasks that have been created and updated
    :raises: AssertionError if dag or execution_date is invalid
    """
    res = []

    if not dag or not execution_date:
        return res

    # Mark all task instances in the dag run
    for task in dag.tasks:
        task.dag = dag
        new_state = set_state(task=task, execution_date=execution_date,
                              state=state, commit=commit)
        res.extend(new_state)

    # Mark the dag run
    if commit:
        drs = DagRun.find(dag.dag_id, execution_date=execution_date)
        for dr in drs:
            dr.dag = dag
            dr.update_state()

    return res
開發者ID:ataki,項目名稱:incubator-airflow,代碼行數:31,代碼來源:mark_tasks.py

示例3: trigger_dag

def trigger_dag(args):
    dag = get_dag(args)

    if not dag:
        logging.error("Cannot find dag {}".format(args.dag_id))
        sys.exit(1)

    execution_date = datetime.now()
    run_id = args.run_id or "manual__{0}".format(execution_date.isoformat())

    dr = DagRun.find(dag_id=args.dag_id, run_id=run_id)
    if dr:
        logging.error("This run_id {} already exists".format(run_id))
        raise AirflowException()

    run_conf = {}
    if args.conf:
        run_conf = json.loads(args.conf)

    trigger = dag.create_dagrun(
        run_id=run_id,
        execution_date=execution_date,
        state=State.RUNNING,
        conf=run_conf,
        external_trigger=True
    )
    logging.info("Created {}".format(trigger))
開發者ID:chrix2,項目名稱:incubator-airflow,代碼行數:27,代碼來源:cli.py

示例4: trigger_dag

def trigger_dag(dag_id, run_id=None, conf=None, execution_date=None):
    dagbag = DagBag()

    if dag_id not in dagbag.dags:
        raise AirflowException("Dag id {} not found".format(dag_id))

    dag = dagbag.get_dag(dag_id)

    if not execution_date:
        execution_date = datetime.now()

    if not run_id:
        run_id = "manual__{0}".format(execution_date.isoformat())

    dr = DagRun.find(dag_id=dag_id, run_id=run_id)
    if dr:
        raise AirflowException("Run id {} already exists for dag id {}".format(
            run_id,
            dag_id
        ))

    run_conf = None
    if conf:
        run_conf = json.loads(conf)

    trigger = dag.create_dagrun(
        run_id=run_id,
        execution_date=execution_date,
        state=State.RUNNING,
        conf=run_conf,
        external_trigger=True
    )

    return trigger
開發者ID:owlabs,項目名稱:incubator-airflow,代碼行數:34,代碼來源:trigger_dag.py

示例5: _get_dep_statuses

    def _get_dep_statuses(self, ti, session, dep_context):
        dag = ti.task.dag
        dagrun = ti.get_dagrun(session)
        if not dagrun:
            # The import is needed here to avoid a circular dependency
            from airflow.models import DagRun
            running_dagruns = DagRun.find(
                dag_id=dag.dag_id,
                state=State.RUNNING,
                external_trigger=False,
                session=session
            )

            if len(running_dagruns) >= dag.max_active_runs:
                reason = ("The maximum number of active dag runs ({0}) for this task "
                          "instance's DAG '{1}' has been reached.".format(
                              dag.max_active_runs,
                              ti.dag_id))
            else:
                reason = "Unknown reason"
            yield self._failing_status(
                reason="Task instance's dagrun did not exist: {0}.".format(reason))
        else:
            if dagrun.state != State.RUNNING:
                yield self._failing_status(
                    reason="Task instance's dagrun was not in the 'running' state but in "
                           "the state '{}'.".format(dagrun.state))
開發者ID:ataki,項目名稱:incubator-airflow,代碼行數:27,代碼來源:dagrun_exists_dep.py

示例6: get_dag_runs

def get_dag_runs(dag_id, state=None):
    """
    Returns a list of Dag Runs for a specific DAG ID.
    :param dag_id: String identifier of a DAG
    :param state: queued|running|success...
    :return: List of DAG runs of a DAG with requested state,
    or all runs if the state is not specified
    """
    dagbag = DagBag()

    # Check DAG exists.
    if dag_id not in dagbag.dags:
        error_message = "Dag id {} not found".format(dag_id)
        raise AirflowException(error_message)

    dag_runs = list()
    state = state.lower() if state else None
    for run in DagRun.find(dag_id=dag_id, state=state):
        dag_runs.append({
            'id': run.id,
            'run_id': run.run_id,
            'state': run.state,
            'dag_id': run.dag_id,
            'execution_date': run.execution_date.isoformat(),
            'start_date': ((run.start_date or '') and
                           run.start_date.isoformat()),
            'dag_run_url': url_for('Airflow.graph', dag_id=run.dag_id,
                                   execution_date=run.execution_date)
        })

    return dag_runs
開發者ID:AdamUnger,項目名稱:incubator-airflow,代碼行數:31,代碼來源:get_dag_runs.py

示例7: dag_state

def dag_state(args):
    """
    Returns the state of a DagRun at the command line.

    >>> airflow dag_state tutorial 2015-01-01T00:00:00.000000
    running
    """
    dag = get_dag(args)
    dr = DagRun.find(dag.dag_id, execution_date=args.execution_date)
    print(dr[0].state if len(dr) > 0 else None)
開發者ID:cjquinon,項目名稱:incubator-airflow,代碼行數:10,代碼來源:cli.py

示例8: evaluate_dagrun

    def evaluate_dagrun(
            self,
            dag_id,
            expected_task_states,  # dict of task_id: state
            dagrun_state,
            run_kwargs=None,
            advance_execution_date=False,
            session=None):
        """
        Helper for testing DagRun states with simple two-task DAGS.
        This is hackish: a dag run is created but its tasks are
        run by a backfill.
        """
        if run_kwargs is None:
            run_kwargs = {}

        scheduler = SchedulerJob(**self.default_scheduler_args)
        dag = self.dagbag.get_dag(dag_id)
        dag.clear()
        dr = scheduler.create_dag_run(dag)

        if advance_execution_date:
            # run a second time to schedule a dagrun after the start_date
            dr = scheduler.create_dag_run(dag)
        ex_date = dr.execution_date

        try:
            dag.run(start_date=ex_date, end_date=ex_date, **run_kwargs)
        except AirflowException:
            pass

        # test tasks
        for task_id, expected_state in expected_task_states.items():
            task = dag.get_task(task_id)
            ti = TI(task, ex_date)
            ti.refresh_from_db()
            self.assertEqual(ti.state, expected_state)

        # load dagrun
        dr = DagRun.find(dag_id=dag_id, execution_date=ex_date)
        dr = dr[0]
        dr.dag = dag

        # dagrun is running
        self.assertEqual(dr.state, State.RUNNING)

        dr.update_state()

        # dagrun failed
        self.assertEqual(dr.state, dagrun_state)
開發者ID:ESML,項目名稱:incubator-airflow,代碼行數:50,代碼來源:jobs.py

示例9: latest_dag_runs

def latest_dag_runs():
    """Returns the latest DagRun for each DAG formatted for the UI. """
    from airflow.models import DagRun
    dagruns = DagRun.get_latest_runs()
    payload = []
    for dagrun in dagruns:
        if dagrun.execution_date:
            payload.append({
                'dag_id': dagrun.dag_id,
                'execution_date': dagrun.execution_date.strftime("%Y-%m-%d %H:%M"),
                'start_date': ((dagrun.start_date or '') and
                               dagrun.start_date.strftime("%Y-%m-%d %H:%M")),
                'dag_run_url': url_for('airflow.graph', dag_id=dagrun.dag_id,
                                       execution_date=dagrun.execution_date)
            })
    return jsonify(items=payload)  # old flask versions dont support jsonifying arrays
開發者ID:Nextdoor,項目名稱:airflow,代碼行數:16,代碼來源:endpoints.py

示例10: _create_dagruns

def _create_dagruns(dag, execution_dates, state, run_id_template):
    """
    Infers from the dates which dag runs need to be created and does so.
    :param dag: the dag to create dag runs for
    :param execution_dates: list of execution dates to evaluate
    :param state: the state to set the dag run to
    :param run_id_template:the template for run id to be with the execution date
    :return: newly created and existing dag runs for the execution dates supplied
    """
    # find out if we need to create any dag runs
    drs = DagRun.find(dag_id=dag.dag_id, execution_date=execution_dates)
    dates_to_create = list(set(execution_dates) - set([dr.execution_date for dr in drs]))

    for date in dates_to_create:
        dr = dag.create_dagrun(
            run_id=run_id_template.format(date.isoformat()),
            execution_date=date,
            start_date=timezone.utcnow(),
            external_trigger=False,
            state=state,
        )
        drs.append(dr)

    return drs
開發者ID:ataki,項目名稱:incubator-airflow,代碼行數:24,代碼來源:mark_tasks.py

示例11: set_state

def set_state(task, execution_date, upstream=False, downstream=False,
              future=False, past=False, state=State.SUCCESS, commit=False):
    """
    Set the state of a task instance and if needed its relatives. Can set state
    for future tasks (calculated from execution_date) and retroactively
    for past tasks. Will verify integrity of past dag runs in order to create
    tasks that did not exist. It will not create dag runs that are missing
    on the schedule (but it will as for subdag dag runs if needed).
    :param task: the task from which to work. task.task.dag needs to be set
    :param execution_date: the execution date from which to start looking
    :param upstream: Mark all parents (upstream tasks)
    :param downstream: Mark all siblings (downstream tasks) of task_id, including SubDags
    :param future: Mark all future tasks on the interval of the dag up until
        last execution date.
    :param past: Retroactively mark all tasks starting from start_date of the DAG
    :param state: State to which the tasks need to be set
    :param commit: Commit tasks to be altered to the database
    :return: list of tasks that have been created and updated
    """
    assert timezone.is_localized(execution_date)

    # microseconds are supported by the database, but is not handled
    # correctly by airflow on e.g. the filesystem and in other places
    execution_date = execution_date.replace(microsecond=0)

    assert task.dag is not None
    dag = task.dag

    latest_execution_date = dag.latest_execution_date
    assert latest_execution_date is not None

    # determine date range of dag runs and tasks to consider
    end_date = latest_execution_date if future else execution_date

    if 'start_date' in dag.default_args:
        start_date = dag.default_args['start_date']
    elif dag.start_date:
        start_date = dag.start_date
    else:
        start_date = execution_date

    start_date = execution_date if not past else start_date

    if dag.schedule_interval == '@once':
        dates = [start_date]
    else:
        dates = dag.date_range(start_date=start_date, end_date=end_date)

    # find relatives (siblings = downstream, parents = upstream) if needed
    task_ids = [task.task_id]
    if downstream:
        relatives = task.get_flat_relatives(upstream=False)
        task_ids += [t.task_id for t in relatives]
    if upstream:
        relatives = task.get_flat_relatives(upstream=True)
        task_ids += [t.task_id for t in relatives]

    # verify the integrity of the dag runs in case a task was added or removed
    # set the confirmed execution dates as they might be different
    # from what was provided
    confirmed_dates = []
    drs = DagRun.find(dag_id=dag.dag_id, execution_date=dates)
    for dr in drs:
        dr.dag = dag
        dr.verify_integrity()
        confirmed_dates.append(dr.execution_date)

    # go through subdagoperators and create dag runs. We will only work
    # within the scope of the subdag. We wont propagate to the parent dag,
    # but we will propagate from parent to subdag.
    session = Session()
    dags = [dag]
    sub_dag_ids = []
    while len(dags) > 0:
        current_dag = dags.pop()
        for task_id in task_ids:
            if not current_dag.has_task(task_id):
                continue

            current_task = current_dag.get_task(task_id)
            if isinstance(current_task, SubDagOperator):
                # this works as a kind of integrity check
                # it creates missing dag runs for subdagoperators,
                # maybe this should be moved to dagrun.verify_integrity
                drs = _create_dagruns(current_task.subdag,
                                      execution_dates=confirmed_dates,
                                      state=State.RUNNING,
                                      run_id_template=BackfillJob.ID_FORMAT_PREFIX)

                for dr in drs:
                    dr.dag = current_task.subdag
                    dr.verify_integrity()
                    if commit:
                        dr.state = state
                        session.merge(dr)

                dags.append(current_task.subdag)
                sub_dag_ids.append(current_task.subdag.dag_id)

    # now look for the task instances that are affected
#.........這裏部分代碼省略.........
開發者ID:ataki,項目名稱:incubator-airflow,代碼行數:101,代碼來源:mark_tasks.py


注:本文中的airflow.models.DagRun類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。