本文整理汇总了Python中notebook.connectors.base.Notebook.execute方法的典型用法代码示例。如果您正苦于以下问题:Python Notebook.execute方法的具体用法?Python Notebook.execute怎么用?Python Notebook.execute使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类notebook.connectors.base.Notebook
的用法示例。
在下文中一共展示了Notebook.execute方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: extract_archive_in_hdfs
# 需要导入模块: from notebook.connectors.base import Notebook [as 别名]
# 或者: from notebook.connectors.base.Notebook import execute [as 别名]
def extract_archive_in_hdfs(request, upload_path, file_name):
_upload_extract_archive_script_to_hdfs(request.fs)
output_path = upload_path + '/' + file_name.split('.')[0]
start_time = json.loads(request.POST.get('start_time', '-1'))
shell_notebook = Notebook(
name=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name},
isManaged=True,
onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path})
)
shell_notebook.add_shell_snippet(
shell_command='extract_archive_in_hdfs.sh',
arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}],
archives=[],
files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.quote(file_name)}],
env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}],
last_executed=start_time
)
return shell_notebook.execute(request, batch=True)
示例2: compress_files_in_hdfs
# 需要导入模块: from notebook.connectors.base import Notebook [as 别名]
# 或者: from notebook.connectors.base.Notebook import execute [as 别名]
def compress_files_in_hdfs(request, file_names, upload_path, archive_name):
_upload_compress_files_script_to_hdfs(request.fs)
files = [{"value": upload_path + '/' + file_name} for file_name in file_names]
files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'})
start_time = json.loads(request.POST.get('start_time', '-1'))
shell_notebook = Notebook(
name=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path},
isManaged=True,
onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': upload_path})
)
shell_notebook.add_shell_snippet(
shell_command='compress_files_in_hdfs.sh',
arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-n=' + archive_name}],
archives=[],
files=files,
env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}],
last_executed=start_time
)
return shell_notebook.execute(request, batch=True)