本文整理匯總了Python中airflow.models.DagRun.execution_date方法的典型用法代碼示例。如果您正苦於以下問題:Python DagRun.execution_date方法的具體用法?Python DagRun.execution_date怎麽用?Python DagRun.execution_date使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類airflow.models.DagRun
的用法示例。
在下文中一共展示了DagRun.execution_date方法的12個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: get_xcom_entry
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def get_xcom_entry(
dag_id: str,
task_id: str,
dag_run_id: str,
xcom_key: str,
session: Session
) -> XComCollectionItemSchema:
"""
Get an XCom entry
"""
query = session.query(XCom)
query = query.filter(and_(XCom.dag_id == dag_id,
XCom.task_id == task_id,
XCom.key == xcom_key))
query = query.join(DR, and_(XCom.dag_id == DR.dag_id, XCom.execution_date == DR.execution_date))
query = query.filter(DR.run_id == dag_run_id)
query_object = query.one_or_none()
if not query_object:
raise NotFound("XCom entry not found")
return xcom_collection_item_schema.dump(query_object)
示例2: get_dag_runs
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def get_dag_runs():
dag_runs = []
session = settings.Session()
query = session.query(DagRun)
if request.args.get('state') is not None:
query = query.filter(DagRun.state == request.args.get('state'))
if request.args.get('external_trigger') is not None:
# query = query.filter(DagRun.external_trigger == (request.args.get('external_trigger') is True))
query = query.filter(DagRun.external_trigger == (request.args.get('external_trigger') in ['true', 'True']))
if request.args.get('prefix') is not None:
query = query.filter(DagRun.run_id.ilike('{}%'.format(request.args.get('prefix'))))
runs = query.order_by(DagRun.execution_date).all()
for run in runs:
dag_runs.append(format_dag_run(run))
session.close()
return ApiResponse.success({'dag_runs': dag_runs})
示例3: get_xcom_entries
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def get_xcom_entries(
dag_id: str,
dag_run_id: str,
task_id: str,
session: Session,
limit: Optional[int],
offset: Optional[int] = None
) -> XComCollectionSchema:
"""
Get all XCom values
"""
query = session.query(XCom)
if dag_id != '~':
query = query.filter(XCom.dag_id == dag_id)
query.join(DR, and_(XCom.dag_id == DR.dag_id, XCom.execution_date == DR.execution_date))
else:
query.join(DR, XCom.execution_date == DR.execution_date)
if task_id != '~':
query = query.filter(XCom.task_id == task_id)
if dag_run_id != '~':
query = query.filter(DR.run_id == dag_run_id)
query = query.order_by(
XCom.execution_date, XCom.task_id, XCom.dag_id, XCom.key
)
total_entries = session.query(func.count(XCom.key)).scalar()
query = query.offset(offset).limit(limit)
return xcom_collection_schema.dump(XComCollection(xcom_entries=query.all(), total_entries=total_entries))
示例4: get_dag_runs
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def get_dag_runs(session, dag_id, start_date_gte=None, start_date_lte=None,
execution_date_gte=None, execution_date_lte=None,
end_date_gte=None, end_date_lte=None, offset=None, limit=None):
"""
Get all DAG Runs.
"""
query = session.query(DagRun)
# This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs.
if dag_id != '~':
query = query.filter(DagRun.dag_id == dag_id)
# filter start date
if start_date_gte:
query = query.filter(DagRun.start_date >= start_date_gte)
if start_date_lte:
query = query.filter(DagRun.start_date <= start_date_lte)
# filter execution date
if execution_date_gte:
query = query.filter(DagRun.execution_date >= execution_date_gte)
if execution_date_lte:
query = query.filter(DagRun.execution_date <= execution_date_lte)
# filter end date
if end_date_gte:
query = query.filter(DagRun.end_date >= end_date_gte)
if end_date_lte:
query = query.filter(DagRun.end_date <= end_date_lte)
# apply offset and limit
dag_run = query.order_by(DagRun.id).offset(offset).limit(limit).all()
total_entries = session.query(func.count(DagRun.id)).scalar()
return dagrun_collection_schema.dump(DAGRunCollection(dag_runs=dag_run,
total_entries=total_entries))
示例5: get_dag_runs
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def get_dag_runs(dag_id):
"""
.. http:get:: /trigger/<dag_id>
Get the run_ids for a dag_id, ordered by execution date
**Example request**:
.. sourcecode:: http
GET /trigger/make_fit
Host: localhost:7357
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"dag_id": "daily_processing",
"run_ids": ["my_special_run", "normal_run_17"]
}
"""
session = settings.Session()
error_response = check_dag_exists(session, dag_id)
if error_response:
return error_response
dag_runs = session.query(DagRun).filter(DagRun.dag_id == dag_id).order_by(DagRun.execution_date).all()
run_ids = [dag_run.run_id for dag_run in dag_runs]
return jsonify(dag_id=dag_id, run_ids=run_ids)
示例6: create_airflow_url
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def create_airflow_url(dag_id, start_date, end_date):
"""
Creates the airflow url to redirect to. Gets the host_server based on if it's a fabio url or host:port. Then
queries the database for the execution date, which will be in the range of the start and end date. Can have
multiple values so will only return the earliest result. If no results are found, it'll use the start date,
which will just take you to the most recent dagrun for that dag in the UI.
:param dag_id: Dag id name. String.
:param start_date: Start date. String of form %Y-%m-%d %H:%M:%S.
:param end_date: End date. String of form %Y-%m-%d %H:%M:%S.
:return: Airflow URL to redirect to. String.
"""
start_date = datetime.strptime(start_date, '%Y-%m-%d %H:%M:%S')
end_date = datetime.strptime(end_date, '%Y-%m-%d %H:%M:%S')
host_server = conf.get('webserver', 'base_url')
session = Session()
try:
dagrun_query_result = session.query(DagRun) \
.filter(DagRun.dag_id == dag_id) \
.filter(DagRun.execution_date >= start_date) \
.filter(DagRun.execution_date < end_date) \
.order_by(DagRun.execution_date.asc()) \
.first()
execution_date = dagrun_query_result.execution_date.isoformat()
except:
session.rollback()
execution_date = start_date.isoformat()
finally:
session.close()
url = '{0}/admin/airflow/graph?dag_id={1}&execution_date={2}'.format(host_server, dag_id, execution_date)
return url
示例7: clear_dag_runs
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def clear_dag_runs(dag_id, start_date, end_date):
"""
Clears all the DagRuns and corrects the DagStats for an interval passed in the clear command because the
clear command only clears the TaskInstances.
:param dag_id: Dag id name. String.
:param start_date: Start date. String of form %Y-%m-%d %H:%M:%S.
:param end_date: End date. String of form %Y-%m-%d %H:%M:%S.
:return: None.
"""
start_date = datetime.strptime(start_date, '%Y-%m-%d %H:%M:%S')
end_date = datetime.strptime(end_date, '%Y-%m-%d %H:%M:%S')
session = Session()
try:
dagrun_query = session.query(DagRun) \
.filter(DagRun.dag_id == dag_id) \
.filter(DagRun.execution_date >= start_date) \
.filter(DagRun.execution_date < end_date)
dagrun_query_result = dagrun_query.all()
# remove dagruns with this state for clear command
for result in dagrun_query_result:
session.delete(result)
# fix DagStats
for state in State.dag_states:
removed_state_counts = dagrun_query.filter(DagRun.state == state).count()
dagstat_query = session.query(DagStat) \
.filter(DagStat.dag_id == dag_id) \
.filter(DagStat.state == state)
dagstat_query_result = dagstat_query.first() # only one row every time
dagstat_query_result.count = max(dagstat_query_result.count - removed_state_counts, 0)
session.commit()
except:
session.rollback()
finally:
session.close()
示例8: format_dag_run
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def format_dag_run(dag_run):
return {
'run_id': dag_run.run_id,
'dag_id': dag_run.dag_id,
'state': dag_run.get_state(),
'start_date': (None if not dag_run.start_date else str(dag_run.start_date)),
'end_date': (None if not dag_run.end_date else str(dag_run.end_date)),
'external_trigger': dag_run.external_trigger,
'execution_date': str(dag_run.execution_date)
}
示例9: find_dag_runs
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def find_dag_runs(session, dag_id, dag_run_id, execution_date):
qry = session.query(DagRun)
qry = qry.filter(DagRun.dag_id == dag_id)
qry = qry.filter(or_(DagRun.run_id == dag_run_id, DagRun.execution_date == execution_date))
return qry.order_by(DagRun.execution_date).all()
示例10: check_py
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def check_py(session=None, **kwargs):
key = '__SYSTEM__RATE_LIMIT_EXCEEDED__'
obj = (session
.query(Variable)
.filter(Variable.key.ilike('{}%'.format(key)))
.all())
if obj is None:
raise KeyError('Variable {} does not exist'.format(key))
else:
for _ in obj:
_ = json.loads(_.val)
# Clear the rate limit operator task in the specified Dag Run.
(session
.query(TaskInstance)
.filter(and_(TaskInstance.task_id == _['task_id'],
TaskInstance.dag_id == _['dag_id'],
TaskInstance.execution_date == datetime.strptime(_['ts'],
"%Y-%m-%dT%H:%M:%S")))
.delete())
# Clear downstream tasks in the specified Dag Run.
for task in _['downstream_tasks']:
(session
.query(TaskInstance)
.filter(and_(TaskInstance.task_id == task,
TaskInstance.dag_id == _['dag_id'],
TaskInstance.execution_date == datetime.strptime(_['ts'],
"%Y-%m-%dT%H:%M:%S")))
.delete())
# Set the Dag Run state to "running"
dag_run = (session
.query(DagRun)
.filter(and_(DagRun.dag_id == _['dag_id'],
DagRun.execution_date == datetime.strptime(_['ts'],
"%Y-%m-%dT%H:%M:%S")))
.first())
dag_run.set_state('running')
# Clear the rate limit exceeded variable.
variable_identifier = '_'.join([_['dag_id'],
_['task_id'],
_['ts']])
variable_name = ''.join([key, variable_identifier])
(session
.query(Variable)
.filter(Variable.key == variable_name)
.delete())
示例11: trigger_dag
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def trigger_dag(dag_id):
"""
.. http:post:: /trigger/<dag_id>/
Triggers a defined DAG. The data must be send in json format with
a key "run_id" and the value a string of your choice. Passing the data
is optional. If no data is passed the run_id will be automatically
be generated with a timestamp and looks like
"external_trigger_2016-01-19T02:01:49.703365".
**Example request**:
.. sourcecode:: http
POST /trigger/make_fit
Host: localhost:7357
Content-Type: application/json
{
"run_id": "my_special_run"
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"dag_id": "daily_processing",
"run_id": "my_special_run"
}
"""
session = settings.Session()
error_response = check_dag_exists(session, dag_id)
if error_response:
return error_response
execution_date = datetime.now()
run_id = None
json_params = request.get_json()
if json_params and 'run_id' in json_params:
run_id = json_params['run_id']
if not run_id:
run_id = 'external_trigger_' + execution_date.isoformat()
trigger = DagRun(
dag_id=dag_id,
run_id=run_id,
state=State.RUNNING,
execution_date=execution_date,
external_trigger=True)
session.add(trigger)
session.commit()
return jsonify(dag_id=dag_id, run_id=run_id)
示例12: dag_run_status
# 需要導入模塊: from airflow.models import DagRun [as 別名]
# 或者: from airflow.models.DagRun import execution_date [as 別名]
def dag_run_status(dag_id, run_id):
"""
.. http:get:: /trigger/<dag_id>/<run_id>
Gets the status of a dag run.
Possible states are: running, success, failed
**Example request**:
.. sourcecode:: http
GET /trigger/make_fit/my_special_run
Host: localhost:7357
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"dag_id": "daily_processing",
"run_id": "my_special_run",
"state": "running",
"execution_date": "2016-06-27T15:32:57"
}
"""
session = settings.Session()
error_response = check_dag_exists(session, dag_id)
if error_response:
return error_response
try:
dag_run = session.query(DagRun).filter(and_(DagRun.dag_id == dag_id, DagRun.run_id == run_id)).one()
except NoResultFound:
return Response('RunId {} does not exist for Dag {}'.format(run_id, dag_id), httplib.BAD_REQUEST)
time_format = "%Y-%m-%dT%H:%M:%S"
return jsonify(
dag_id=dag_id,
run_id=run_id,
state=dag_run.state,
execution_date=dag_run.execution_date.strftime(time_format)
)