本文整理汇总了Python中hdfs3.HDFileSystem.mv方法的典型用法代码示例。如果您正苦于以下问题:Python HDFileSystem.mv方法的具体用法?Python HDFileSystem.mv怎么用?Python HDFileSystem.mv使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类hdfs3.HDFileSystem
的用法示例。
在下文中一共展示了HDFileSystem.mv方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: HadoopFileSystem
# 需要导入模块: from hdfs3 import HDFileSystem [as 别名]
# 或者: from hdfs3.HDFileSystem import mv [as 别名]
#.........这里部分代码省略.........
Will overwrite files and directories in destination_file_names.
Raises ``BeamIOError`` if any error occurred.
Args:
source_file_names: iterable of URLs.
destination_file_names: iterable of URLs.
"""
if len(source_file_names) != len(destination_file_names):
raise BeamIOError(
'source_file_names and destination_file_names should '
'be equal in length: %d != %d' % (
len(source_file_names), len(destination_file_names)))
def _copy_file(source, destination):
with self._open(source) as f1:
with self._create(destination) as f2:
while True:
buf = f1.read(_COPY_BUFFER_SIZE)
if not buf:
break
f2.write(buf)
def _copy_path(source, destination):
"""Recursively copy the file tree from the source to the destination."""
if not self._hdfs_client.isdir(source):
_copy_file(source, destination)
return
for path, dirs, files in self._hdfs_client.walk(source):
for dir in dirs:
new_dir = self._join(destination, dir)
if not self._exists(new_dir):
self._mkdirs(new_dir)
rel_path = posixpath.relpath(path, source)
if rel_path == '.':
rel_path = ''
for file in files:
_copy_file(self._join(path, file),
self._join(destination, rel_path, file))
exceptions = {}
for source, destination in zip(source_file_names, destination_file_names):
try:
rel_source = self._parse_url(source)
rel_destination = self._parse_url(destination)
_copy_path(rel_source, rel_destination)
except Exception as e: # pylint: disable=broad-except
exceptions[(source, destination)] = e
if exceptions:
raise BeamIOError('Copy operation failed', exceptions)
def rename(self, source_file_names, destination_file_names):
exceptions = {}
for source, destination in zip(source_file_names, destination_file_names):
try:
rel_source = self._parse_url(source)
rel_destination = self._parse_url(destination)
if not self._hdfs_client.mv(rel_source, rel_destination):
raise BeamIOError(
'libhdfs error in renaming %s to %s' % (source, destination))
except Exception as e: # pylint: disable=broad-except
exceptions[(source, destination)] = e
if exceptions:
raise BeamIOError('Rename operation failed', exceptions)
def exists(self, url):
"""Checks existence of url in HDFS.
Args:
url: String in the form hdfs://...
Returns:
True if url exists as a file or directory in HDFS.
"""
path = self._parse_url(url)
return self._exists(path)
def _exists(self, path):
"""Returns True if path exists as a file or directory in HDFS.
Args:
path: String in the form /...
"""
return self._hdfs_client.exists(path)
def delete(self, urls):
exceptions = {}
for url in urls:
try:
path = self._parse_url(url)
self._hdfs_client.rm(path, recursive=True)
except Exception as e: # pylint: disable=broad-except
exceptions[url] = e
if exceptions:
raise BeamIOError("Delete operation failed", exceptions)