本文整理汇总了Python中airflow.operators.BashOperator方法的典型用法代码示例。如果您正苦于以下问题:Python operators.BashOperator方法的具体用法?Python operators.BashOperator怎么用?Python operators.BashOperator使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类airflow.operators
的用法示例。
在下文中一共展示了operators.BashOperator方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: dbt_dag
# 需要导入模块: from airflow import operators [as 别名]
# 或者: from airflow.operators import BashOperator [as 别名]
def dbt_dag(start_date, schedule_interval, default_args):
temp_dag = DAG('gospel_.dbt_sub_dag', start_date=start_date, schedule_interval=schedule_interval, default_args=default_args)
G = nx.read_gpickle('/home/airflowuser/project/graph.gpickle')
def make_dbt_task(model_name):
simple_model_name = model_name.split('.')[-1]
dbt_task = BashOperator(
task_id=model_name,
bash_command='cd ~/gospel && dbt run --profile=warehouse --target=prod --non-destructive --models {simple_model_name}'.format(simple_model_name=simple_model_name),
dag=temp_dag
)
return dbt_task
dbt_tasks = {}
for node_name in set(G.nodes()):
dbt_task = make_dbt_task(node_name)
dbt_tasks[node_name] = dbt_task
for edge in G.edges():
dbt_tasks[edge[0]].set_downstream(dbt_tasks[edge[1]])
return temp_dag
示例2: execute
# 需要导入模块: from airflow import operators [as 别名]
# 或者: from airflow.operators import BashOperator [as 别名]
def execute(self, context):
input_paths = context["task_instance"].xcom_pull(self.get_inputs_from, key=XCOM_RETURN_KEY)
if input_paths is None:
log.info("Nothing to process")
return None
output_paths = []
for input_path in input_paths:
levels = get_overview_levels(self.max_overview_level)
log.info("Generating overviews for {!r}...".format(input_path))
command = get_gdaladdo_command(
input_path, overview_levels=levels,
resampling_method=self.resampling_method,
compress_overview=self.compress_overview
)
output_path = input_path
output_paths.append(output_path)
bo = BashOperator(
task_id='bash_operator_addo_{}'.format(
os.path.basename(input_path)),
bash_command=command
)
bo.execute(context)
return output_paths
示例3: execute
# 需要导入模块: from airflow import operators [as 别名]
# 或者: from airflow.operators import BashOperator [as 别名]
def execute(self, context):
log.info(context)
log.info("###########")
log.info("## RSYNC ##")
log.info('Host: %s', self.host)
log.info('User: %s', self.remote_usr)
log.info('Remote dir: %s', self.remote_dir)
log.info('SSH Key: %s', self.ssh_key_file)
# check default XCOM key in task_id 'get_inputs_from'
files_str = ""
files = context['task_instance'].xcom_pull(task_ids=self.get_inputs_from, key=XCOM_RETURN_KEY)
# stop processing if there are no products
if files is None:
log.info("Nothing to process.")
return
if isinstance(files, six.string_types):
files_str = files
else:
for f in files:
files_str += " " + f
log.info("Retrieving input from task_id '{}'' and key '{}'".format(self.get_inputs_from, XCOM_RETURN_KEY))
bash_command = 'rsync -avHPze "ssh -i ' + self.ssh_key_file + ' -o StrictHostKeyChecking=no" ' + files_str + ' ' + self.remote_usr + '@' + self.host + ':' + self.remote_dir
bo = BashOperator(task_id='bash_operator_rsync_', bash_command=bash_command)
bo.execute(context)
# construct list of filenames uploaded to remote host
files_list = files_str.split()
filenames_list = list(os.path.join(self.remote_dir, os.path.basename(path)) for path in files_list)
log.info("Uploaded files: {}".format(pprint.pformat(files_list)))
return filenames_list
示例4: execute
# 需要导入模块: from airflow import operators [as 别名]
# 或者: from airflow.operators import BashOperator [as 别名]
def execute(self, context):
products=list()
ids=[]
if self.input_product is not None:
log.info("Processing single product: " +self.input_product)
products.append(self.input_product)
elif self.get_inputs_from is not None:
log.info("Getting inputs from: " +self.get_inputs_from)
inputs=context['task_instance'].xcom_pull(task_ids=self.get_inputs_from, key=XCOM_RETURN_KEY)
for input in inputs:
products.append(input)
else:
self.downloaded_products = context['task_instance'].xcom_pull('dhus_download_task', key='downloaded_products')
if self.downloaded_products is not None and len(self.downloaded_products)!=0:
products=self.downloaded_products.keys()
log.info(self.downloaded_products)
for p in self.downloaded_products:
ids.append(self.downloaded_products[p]["id"])
print "downloaded products keys :",self.downloaded_products.keys()[0]
if products is None or len(products)==0:
log.info("Nothing to process.")
return
thumbnail_paths=list()
for product in products:
log.info("Processing {}".format(product))
with s2reader.open(product) as safe_product:
for granule in safe_product.granules:
try:
zipf = zipfile.ZipFile(product, 'r')
imgdata = zipf.read(granule.pvi_path,'r')
img = Blob(imgdata)
img = Image(img)
img.scale(self.thumb_size_x+'x'+self.thumb_size_y)
img.quality(80)
thumbnail_name = product.strip(".zip")+"/thumbnail.jpg"
if os.path.isdir(product.strip(".zip")):
product_rmdir_cmd = "rm -r {} ".format(product.strip(".zip"))
product_rmdir_BO = BashOperator(task_id="product_rmdir_{}".format(product.split("/")[-1].strip(".zip")), bash_command = product_rmdir_cmd)
product_rmdir_BO.execute(context)
product_mkdir_cmd = "mkdir {} ".format(product.strip(".zip"))
product_mkdir_BO = BashOperator(task_id="product_mkdir_{}".format(product.split("/")[-1].strip(".zip")), bash_command = product_mkdir_cmd)
product_mkdir_BO.execute(context)
if self.output_dir is not None:
thumbnail_name=os.path.join(self.output_dir,"thumbnail.jpeg")
log.info("Writing thumbnail to {}".format(thumbnail_name))
img.write(thumbnail_name)
else:
img.write(str(thumbnail_name))
thumbnail_paths.append(thumbnail_name)
# XCOM expects a single file so we push it here:
context['task_instance'].xcom_push(key='thumbnail_jpeg_abs_path', value=str(thumbnail_name))
context['task_instance'].xcom_push(key='ids', value=ids)
break
except BaseException as e:
log.error("Unable to extract thumbnail from {}: {}".format(product, e))
return thumbnail_paths