本文整理汇总了Python中hdfs3.HDFileSystem.open方法的典型用法代码示例。如果您正苦于以下问题:Python HDFileSystem.open方法的具体用法?Python HDFileSystem.open怎么用?Python HDFileSystem.open使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类hdfs3.HDFileSystem
的用法示例。
在下文中一共展示了HDFileSystem.open方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: open_file_write_direct
# 需要导入模块: from hdfs3 import HDFileSystem [as 别名]
# 或者: from hdfs3.HDFileSystem import open [as 别名]
def open_file_write_direct(path, hdfs=None, **kwargs):
if hdfs is None:
hdfs = HDFileSystem(kwargs.get('host'), kwargs.get('port'))
return hdfs.open(path, 'wb')
示例2: hdfs_open_file
# 需要导入模块: from hdfs3 import HDFileSystem [as 别名]
# 或者: from hdfs3.HDFileSystem import open [as 别名]
def hdfs_open_file(path, auth):
hdfs = HDFileSystem(**auth)
return hdfs.open(path, mode='rb')
示例3: HadoopFileSystem
# 需要导入模块: from hdfs3 import HDFileSystem [as 别名]
# 或者: from hdfs3.HDFileSystem import open [as 别名]
#.........这里部分代码省略.........
def _mkdirs(self, path):
self._hdfs_client.makedirs(path)
def match(self, url_patterns, limits=None):
if limits is None:
limits = [None] * len(url_patterns)
if len(url_patterns) != len(limits):
raise BeamIOError(
'Patterns and limits should be equal in length: %d != %d' % (
len(url_patterns), len(limits)))
# TODO(udim): Update client to allow batched results.
def _match(path_pattern, limit):
"""Find all matching paths to the pattern provided."""
file_infos = self._hdfs_client.ls(path_pattern, detail=True)[:limit]
metadata_list = [FileMetadata(file_info['name'], file_info['size'])
for file_info in file_infos]
return MatchResult(path_pattern, metadata_list)
exceptions = {}
result = []
for url_pattern, limit in zip(url_patterns, limits):
try:
path_pattern = self._parse_url(url_pattern)
result.append(_match(path_pattern, limit))
except Exception as e: # pylint: disable=broad-except
exceptions[url_pattern] = e
if exceptions:
raise BeamIOError('Match operation failed', exceptions)
return result
def _open_hdfs(self, path, mode, mime_type, compression_type):
if mime_type != 'application/octet-stream':
logging.warning('Mime types are not supported. Got non-default mime_type:'
' %s', mime_type)
if compression_type == CompressionTypes.AUTO:
compression_type = CompressionTypes.detect_compression_type(path)
res = self._hdfs_client.open(path, mode)
if compression_type != CompressionTypes.UNCOMPRESSED:
res = CompressedFile(res)
return res
def create(self, url, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""
Returns:
*hdfs3.core.HDFile*: An Python File-like object.
"""
path = self._parse_url(url)
return self._create(path, mime_type, compression_type)
def _create(self, path, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
return self._open_hdfs(path, 'wb', mime_type, compression_type)
def open(self, url, mime_type='application/octet-stream',
compression_type=CompressionTypes.AUTO):
"""
Returns:
*hdfs3.core.HDFile*: An Python File-like object.
"""
path = self._parse_url(url)
return self._open(path, mime_type, compression_type)
示例4: hdfs_open_file
# 需要导入模块: from hdfs3 import HDFileSystem [as 别名]
# 或者: from hdfs3.HDFileSystem import open [as 别名]
def hdfs_open_file(path, auth):
from hdfs3 import HDFileSystem
hdfs = HDFileSystem(**auth)
return hdfs.open(path, mode='rb')
示例5: open
# 需要导入模块: from hdfs3 import HDFileSystem [as 别名]
# 或者: from hdfs3.HDFileSystem import open [as 别名]
def open(self, path, mode='rb', **kwargs):
if path.startswith('hdfs://'):
path = path[len('hdfs://'):]
return HDFileSystem.open(self, path, mode, **kwargs)