本文整理汇总了Python中galaxy.datatypes.binary.Binary.is_sniffable_binary方法的典型用法代码示例。如果您正苦于以下问题:Python Binary.is_sniffable_binary方法的具体用法?Python Binary.is_sniffable_binary怎么用?Python Binary.is_sniffable_binary使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类galaxy.datatypes.binary.Binary
的用法示例。
在下文中一共展示了Binary.is_sniffable_binary方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: add_file
# 需要导入模块: from galaxy.datatypes.binary import Binary [as 别名]
# 或者: from galaxy.datatypes.binary.Binary import is_sniffable_binary [as 别名]
def add_file(dataset, registry, json_file, output_path):
data_type = None
line_count = None
converted_path = None
stdout = None
link_data_only = dataset.get('link_data_only', 'copy_files')
run_as_real_user = in_place = dataset.get('in_place', True)
purge_source = dataset.get('purge_source', True)
# in_place is True if there is no external chmod in place,
# however there are other instances where modifications should not occur in_place:
# when a file is added from a directory on the local file system (ftp import folder or any other path).
if dataset.type in ('server_dir', 'path_paste', 'ftp_import'):
in_place = False
check_content = dataset.get('check_content' , True)
auto_decompress = dataset.get('auto_decompress', True)
try:
ext = dataset.file_type
except AttributeError:
file_err('Unable to process uploaded file, missing file_type parameter.', dataset, json_file)
return
if dataset.type == 'url':
try:
page = urlopen(dataset.path) # page will be .close()ed by sniff methods
temp_name, dataset.is_multi_byte = sniff.stream_to_file(page, prefix='url_paste', source_encoding=util.get_charset_from_http_headers(page.headers))
except Exception as e:
file_err('Unable to fetch %s\n%s' % (dataset.path, str(e)), dataset, json_file)
return
dataset.path = temp_name
# See if we have an empty file
if not os.path.exists(dataset.path):
file_err('Uploaded temporary file (%s) does not exist.' % dataset.path, dataset, json_file)
return
if not os.path.getsize(dataset.path) > 0:
file_err('The uploaded file is empty', dataset, json_file)
return
if not dataset.type == 'url':
# Already set is_multi_byte above if type == 'url'
try:
dataset.is_multi_byte = multi_byte.is_multi_byte(codecs.open(dataset.path, 'r', 'utf-8').read(100))
except UnicodeDecodeError as e:
dataset.is_multi_byte = False
# Is dataset an image?
i_ext = get_image_ext(dataset.path)
if i_ext:
ext = i_ext
data_type = ext
# Is dataset content multi-byte?
elif dataset.is_multi_byte:
data_type = 'multi-byte char'
ext = sniff.guess_ext(dataset.path, registry.sniff_order, is_multi_byte=True)
# Is dataset content supported sniffable binary?
else:
# FIXME: This ignores the declared sniff order in datatype_conf.xml
# resulting in improper behavior
type_info = Binary.is_sniffable_binary(dataset.path)
if type_info:
data_type = type_info[0]
ext = type_info[1]
if not data_type:
root_datatype = registry.get_datatype_by_extension(dataset.file_type)
if getattr(root_datatype, 'compressed', False):
data_type = 'compressed archive'
ext = dataset.file_type
else:
# See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
is_gzipped, is_valid = check_gzip(dataset.path, check_content=check_content)
if is_gzipped and not is_valid:
file_err('The gzipped uploaded file contains inappropriate content', dataset, json_file)
return
elif is_gzipped and is_valid and auto_decompress:
if link_data_only == 'copy_files':
# We need to uncompress the temp_name file, but BAM files must remain compressed in the BGZF format
CHUNK_SIZE = 2 ** 20 # 1Mb
fd, uncompressed = tempfile.mkstemp(prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname(output_path), text=False)
gzipped_file = gzip.GzipFile(dataset.path, 'rb')
while 1:
try:
chunk = gzipped_file.read(CHUNK_SIZE)
except IOError:
os.close(fd)
os.remove(uncompressed)
file_err('Problem decompressing gzipped data', dataset, json_file)
return
if not chunk:
break
os.write(fd, chunk)
os.close(fd)
gzipped_file.close()
# Replace the gzipped file with the decompressed file if it's safe to do so
if not in_place:
dataset.path = uncompressed
else:
shutil.move(uncompressed, dataset.path)
os.chmod(dataset.path, 0o644)
dataset.name = dataset.name.rstrip('.gz')
data_type = 'gzip'
if not data_type:
# See if we have a bz2 file, much like gzip
is_bzipped, is_valid = check_bz2(dataset.path, check_content)
#.........这里部分代码省略.........
示例2: check_image
# 需要导入模块: from galaxy.datatypes.binary import Binary [as 别名]
# 或者: from galaxy.datatypes.binary.Binary import is_sniffable_binary [as 别名]
image = check_image( dataset.path )
if image:
if not PIL:
image = None
# get_image_ext() returns None if nor a supported Image type
ext = get_image_ext( dataset.path, image )
data_type = ext
# Is dataset content multi-byte?
elif dataset.is_multi_byte:
data_type = 'multi-byte char'
ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
# Is dataset content supported sniffable binary?
else:
# FIXME: This ignores the declared sniff order in datatype_conf.xml
# resulting in improper behavior
type_info = Binary.is_sniffable_binary( dataset.path )
if type_info:
data_type = type_info[0]
ext = type_info[1]
if not data_type:
root_datatype = registry.get_datatype_by_extension( dataset.file_type )
if getattr( root_datatype, 'compressed', False ):
data_type = 'compressed archive'
ext = dataset.file_type
else:
# See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
is_gzipped, is_valid = check_gzip( dataset.path )
if is_gzipped and not is_valid:
file_err( 'The gzipped uploaded file contains inappropriate content', dataset, json_file )
return
elif is_gzipped and is_valid: