本文整理匯總了Python中airflow.models.BaseOperator方法的典型用法代碼示例。如果您正苦於以下問題:Python models.BaseOperator方法的具體用法?Python models.BaseOperator怎麽用?Python models.BaseOperator使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類airflow.models
的用法示例。
在下文中一共展示了models.BaseOperator方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: from airflow import models [as 別名]
# 或者: from airflow.models import BaseOperator [as 別名]
def __init__(self,
endpoint: str,
method: str = 'POST',
data: Any = None,
headers: Optional[Dict[str, str]] = None,
response_check: Optional[Callable] = None,
extra_options: Optional[Dict[str, Any]] = None,
http_conn_id: str = 'http_default',
log_response: bool = False,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.http_conn_id = http_conn_id
self.method = method
self.endpoint = endpoint
self.headers = headers or {}
self.data = data or {}
self.response_check = response_check
self.extra_options = extra_options or {}
self.log_response = log_response
if kwargs.get('xcom_push') is not None:
raise AirflowException("'xcom_push' was deprecated, use 'BaseOperator.do_xcom_push' instead")
示例2: __init__
# 需要導入模塊: from airflow import models [as 別名]
# 或者: from airflow.models import BaseOperator [as 別名]
def __init__(self,
bucket: str,
file: str,
namespace: Optional[str] = None,
entity_filter: Optional[dict] = None,
labels: Optional[dict] = None,
datastore_conn_id: str = 'google_cloud_default',
delegate_to: Optional[str] = None,
polling_interval_in_seconds: float = 10,
project_id: Optional[str] = None,
*args,
**kwargs) -> None:
super().__init__(*args, **kwargs)
self.datastore_conn_id = datastore_conn_id
self.delegate_to = delegate_to
self.bucket = bucket
self.file = file
self.namespace = namespace
self.entity_filter = entity_filter
self.labels = labels
self.polling_interval_in_seconds = polling_interval_in_seconds
self.project_id = project_id
if kwargs.get('xcom_push') is not None:
raise AirflowException("'xcom_push' was deprecated, use 'BaseOperator.do_xcom_push' instead")
示例3: rename_deprecated_modules
# 需要導入模塊: from airflow import models [as 別名]
# 或者: from airflow.models import BaseOperator [as 別名]
def rename_deprecated_modules(self) -> None:
"""
Renames back to deprecated modules imported. Example diff generated:
.. code-block:: diff
--- ./airflow/providers/dingding/operators/dingding.py
+++ ./airflow/providers/dingding/operators/dingding.py
@@ -16,7 +16,7 @@
# specific language governing permissions and limitations
# under the License.
-from airflow.operators.bash import BaseOperator
+from airflow.operators.bash_operator import BaseOperator
from airflow.providers.dingding.hooks.dingding import DingdingHook
from airflow.utils.decorators import apply_defaults
"""
changes = [
("airflow.operators.bash", "airflow.operators.bash_operator"),
("airflow.operators.python", "airflow.operators.python_operator"),
("airflow.utils.session", "airflow.utils.db"),
(
"airflow.providers.cncf.kubernetes.operators.kubernetes_pod",
"airflow.contrib.operators.kubernetes_pod_operator"
),
]
for new, old in changes:
self.qry.select_module(new).rename(old)
示例4: make_task
# 需要導入模塊: from airflow import models [as 別名]
# 或者: from airflow.models import BaseOperator [as 別名]
def make_task(operator: str, task_params: Dict[str, Any]) -> BaseOperator:
"""
Takes an operator and params and creates an instance of that operator.
:returns: instance of operator object
"""
try:
# class is a Callable https://stackoverflow.com/a/34578836/3679900
operator_obj: Callable[..., BaseOperator] = import_string(operator)
except Exception as err:
raise Exception(f"Failed to import operator: {operator}. err: {err}")
try:
if operator_obj == PythonOperator:
if not task_params.get("python_callable_name") and not task_params.get(
"python_callable_file"
):
raise Exception(
"Failed to create task. PythonOperator requires `python_callable_name` \
and `python_callable_file` parameters."
)
task_params["python_callable"]: Callable = utils.get_python_callable(
task_params["python_callable_name"],
task_params["python_callable_file"],
)
if utils.check_dict_key(task_params, "execution_timeout_secs"):
task_params["execution_timeout"]: timedelta = timedelta(
seconds=task_params["execution_timeout_secs"]
)
del task_params["execution_timeout_secs"]
task: BaseOperator = operator_obj(**task_params)
except Exception as err:
raise Exception(f"Failed to create {operator_obj} task. err: {err}")
return task
示例5: refactor_amazon_package
# 需要導入模塊: from airflow import models [as 別名]
# 或者: from airflow.models import BaseOperator [as 別名]
def refactor_amazon_package(self):
"""
Fixes to "amazon" providers package.
Copies some of the classes used from core Airflow to "common.utils" package of the
the provider and renames imports to use them from there.
We copy typing_compat.py and change import as in example diff:
.. code-block:: diff
--- ./airflow/providers/amazon/aws/operators/ecs.py
+++ ./airflow/providers/amazon/aws/operators/ecs.py
@@ -24,7 +24,7 @@
from airflow.models import BaseOperator
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
-from airflow.typing_compat import Protocol, runtime_checkable
+from airflow.providers.amazon.common.utils.typing_compat import Protocol, runtime_checkable
from airflow.utils.decorators import apply_defaults
"""
# noinspection PyUnusedLocal
def amazon_package_filter(node: LN, capture: Capture, filename: Filename) -> bool:
return filename.startswith("./airflow/providers/amazon/")
os.makedirs(os.path.join(get_target_providers_package_folder("amazon"), "common", "utils"),
exist_ok=True)
copyfile(
os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"),
os.path.join(get_target_providers_package_folder("amazon"), "common", "__init__.py")
)
copyfile(
os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"),
os.path.join(get_target_providers_package_folder("amazon"), "common", "utils", "__init__.py")
)
copyfile(
os.path.join(get_source_airflow_folder(), "airflow", "typing_compat.py"),
os.path.join(get_target_providers_package_folder("amazon"), "common", "utils", "typing_compat.py")
)
(
self.qry.
select_module("airflow.typing_compat").
filter(callback=amazon_package_filter).
rename("airflow.providers.amazon.common.utils.typing_compat")
)