本文整理匯總了Python中zipfile.read方法的典型用法代碼示例。如果您正苦於以下問題:Python zipfile.read方法的具體用法?Python zipfile.read怎麽用?Python zipfile.read使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類zipfile
的用法示例。
在下文中一共展示了zipfile.read方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: get_releases
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def get_releases():
"""
get a list of all MOPS releases on github.
:return: a list of releases (version numbers).
"""
releases = list()
with contextlib.closing(urllib2.urlopen(urllib2.Request(MOPS_URL + "/releases"), context=ssl._create_unverified_context())) as response:
data = response.read()
# print(data)
if data == "":
raise ValueError("No response from release server: {}".format(MOPS_URL + "/releases"))
j_data = json.loads(data.decode('utf-8'))
try:
for release in j_data:
releases.append(release["tag_name"])
except TypeError:
raise ValueError("Rate limit reached. Please try again later.")
return releases
示例2: download_url
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def download_url(source):
"""
Download the url source to the local temp folder.
:param source: the source URL
:return: the local file path
"""
filename = 'MOPS_' + os.path.basename(source) + '.zip'
local_path = os.path.join(HOU_TEMP_PATH, filename)
# print(local_path)
if not os.path.exists(os.path.dirname(local_path)):
os.makedirs(os.path.dirname(local_path))
try:
zipfile = urllib2.urlopen(source, context=ssl._create_unverified_context())
with open(local_path, 'wb') as output:
output.write(zipfile.read())
except:
raise ValueError("Unable to download release from server: {}".format(traceback.format_exc()))
return
return local_path
示例3: get_original_crash_test_case_of_zipfile
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def get_original_crash_test_case_of_zipfile(crash_test_case, original_test_case):
import zipfile
zipfile = zipfile.ZipFile(io.BytesIO(original_test_case))
max_similarity = 0
for name in zipfile.namelist():
possible_original_test_case = zipfile.read(name)
similarity = SequenceMatcher(None, base64.b64encode(possible_original_test_case),
base64.b64encode(crash_test_case)).ratio()
if similarity > max_similarity:
max_similarity = similarity
original_test_case = possible_original_test_case
return original_test_case
示例4: parsed_wheel_info
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def parsed_wheel_info(self):
"""Parse wheel metadata (the .data/WHEEL file)"""
return read_pkg_info_bytes(self.zipfile.read(self.wheelinfo_name))
示例5: _parse_workspaces
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def _parse_workspaces(fn_manifest):
"Generate a schema function that is based on parsing state point manifest files."
def _parse_workspace(path):
try:
with open(os.path.join(path, fn_manifest), 'rb') as file:
return json.loads(file.read().decode())
except (IOError, OSError) as error:
if error.errno != errno.ENOENT:
raise error
return _parse_workspace
示例6: __call__
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def __call__(self, copytree=None):
assert copytree is None
for name in self.names:
fn_dst = self.job.fn(os.path.relpath(name, self.root))
_mkdir_p(os.path.dirname(fn_dst))
with open(fn_dst, 'wb') as dst:
dst.write(self.zipfile.read(name))
return self.job.workspace()
示例7: get_download_path
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def get_download_path(release):
"""
get the .ZIP download URL given a version and branch.
:param release: the release version
:return: the download URL
"""
# response = urllib.urlopen(MOPS_URL + "/releases/tags/" + release)
jdata = dict()
with contextlib.closing(urllib2.urlopen(urllib2.Request(MOPS_URL + "/releases/tags/" + release), context=ssl._create_unverified_context())) as response:
data = response.read()
# print(data)
if data == "":
raise ValueError("No response from release server: {}".format(MOPS_URL + "/releases/tags" + release))
j_data = json.loads(data.decode('utf-8'))
return j_data.get('zipball_url')
示例8: verify
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def verify(self, zipfile=None):
"""Configure the VerifyingZipFile `zipfile` by verifying its signature
and setting expected hashes for every hash in RECORD.
Caller must complete the verification process by completely reading
every file in the archive (e.g. with extractall)."""
sig = None
if zipfile is None:
zipfile = self.zipfile
zipfile.strict = True
record_name = '/'.join((self.distinfo_name, 'RECORD'))
sig_name = '/'.join((self.distinfo_name, 'RECORD.jws'))
# tolerate s/mime signatures:
smime_sig_name = '/'.join((self.distinfo_name, 'RECORD.p7s'))
zipfile.set_expected_hash(record_name, None)
zipfile.set_expected_hash(sig_name, None)
zipfile.set_expected_hash(smime_sig_name, None)
record = zipfile.read(record_name)
record_digest = urlsafe_b64encode(hashlib.sha256(record).digest())
try:
sig = from_json(native(zipfile.read(sig_name)))
except KeyError: # no signature
pass
if sig:
headers, payload = signatures.verify(sig)
if payload['hash'] != "sha256=" + native(record_digest):
msg = "RECORD.sig claimed RECORD hash {0} != computed hash {1}."
raise BadWheelFile(msg.format(payload['hash'],
native(record_digest)))
reader = csv.reader((native(r) for r in record.splitlines()))
for row in reader:
filename = row[0]
hash = row[1]
if not hash:
if filename not in (record_name, sig_name):
sys.stderr.write("%s has no hash!\n" % filename)
continue
algo, data = row[1].split('=', 1)
assert algo == "sha256", "Unsupported hash algorithm"
zipfile.set_expected_hash(filename, urlsafe_b64decode(binary(data)))
示例9: _analyze_zipfile_for_import
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def _analyze_zipfile_for_import(zipfile, project, schema):
names = zipfile.namelist()
def read_sp_manifest_file(path):
# Must use forward slashes, not os.path.sep.
fn_manifest = path + '/' + project.Job.FN_MANIFEST
if fn_manifest in names:
return json.loads(zipfile.read(fn_manifest).decode())
if schema is None:
schema_function = read_sp_manifest_file
elif callable(schema):
schema_function = _with_consistency_check(schema, read_sp_manifest_file)
elif isinstance(schema, str):
schema_function = _with_consistency_check(
_make_path_based_schema_function(schema), read_sp_manifest_file)
else:
raise TypeError("The schema variable must be None, callable, or a string.")
mappings = dict()
skip_subdirs = set()
dirs = {os.path.dirname(name) for name in names}
for name in sorted(dirs):
cont = False
for skip in skip_subdirs:
if name.startswith(skip):
cont = True
break
if cont:
continue
sp = schema_function(name)
if sp is not None:
job = project.open_job(sp)
if os.path.exists(job.workspace()):
raise DestinationExistsError(job)
mappings[name] = job
skip_subdirs.add(name)
# Check uniqueness
if len(set(mappings.values())) != len(mappings):
raise RuntimeError("The jobs identified with the given schema function are not unique!")
for path, job in mappings.items():
_names = [name for name in names if name.startswith(path)]
yield path, _CopyFromZipFileExecutor(zipfile, path, job, _names)
示例10: _analyze_tarfile_for_import
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def _analyze_tarfile_for_import(tarfile, project, schema, tmpdir):
def read_sp_manifest_file(path):
# Must use forward slashes, not os.path.sep.
fn_manifest = _tarfile_path_join(path, project.Job.FN_MANIFEST)
try:
with closing(tarfile.extractfile(fn_manifest)) as file:
if sys.version_info < (3, 6):
return json.loads(file.read().decode())
else:
return json.loads(file.read())
except KeyError:
pass
if schema is None:
schema_function = read_sp_manifest_file
elif callable(schema):
schema_function = _with_consistency_check(schema, read_sp_manifest_file)
elif isinstance(schema, str):
schema_function = _with_consistency_check(
_make_path_based_schema_function(schema), read_sp_manifest_file)
else:
raise TypeError("The schema variable must be None, callable, or a string.")
mappings = dict()
skip_subdirs = set()
dirs = [member.name for member in tarfile.getmembers() if member.isdir()]
for name in sorted(dirs):
if os.path.dirname(name) in skip_subdirs: # skip all sub-dirs of identified dirs
skip_subdirs.add(name)
continue
sp = schema_function(name)
if sp is not None:
job = project.open_job(sp)
if os.path.exists(job.workspace()):
raise DestinationExistsError(job)
mappings[name] = job
skip_subdirs.add(name)
# Check uniqueness
if len(set(mappings.values())) != len(mappings):
raise StatepointParsingError(
"The jobs identified with the given schema function are not unique!")
tarfile.extractall(path=tmpdir)
for path, job in mappings.items():
src = os.path.join(tmpdir, path)
assert os.path.isdir(tmpdir)
assert os.path.isdir(src)
yield src, _CopyFromTarFileExecutor(src, job)
示例11: verify
# 需要導入模塊: import zipfile [as 別名]
# 或者: from zipfile import read [as 別名]
def verify(self, zipfile=None):
"""Configure the VerifyingZipFile `zipfile` by verifying its signature
and setting expected hashes for every hash in RECORD.
Caller must complete the verification process by completely reading
every file in the archive (e.g. with extractall)."""
sig = None
if zipfile is None:
zipfile = self.zipfile
zipfile.strict = True
record_name = '/'.join((self.distinfo_name, 'RECORD'))
sig_name = '/'.join((self.distinfo_name, 'RECORD.jws'))
# tolerate s/mime signatures:
smime_sig_name = '/'.join((self.distinfo_name, 'RECORD.p7s'))
zipfile.set_expected_hash(record_name, None)
zipfile.set_expected_hash(sig_name, None)
zipfile.set_expected_hash(smime_sig_name, None)
record = zipfile.read(record_name)
record_digest = urlsafe_b64encode(hashlib.sha256(record).digest())
try:
sig = from_json(native(zipfile.read(sig_name)))
except KeyError: # no signature
pass
if sig:
headers, payload = signatures.verify(sig)
if payload['hash'] != "sha256=" + native(record_digest):
msg = "RECORD.sig claimed RECORD hash {0} != computed hash {1}."
raise BadWheelFile(msg.format(payload['hash'],
native(record_digest)))
reader = csv.reader((native(r) for r in record.splitlines()))
for row in reader:
filename = row[0]
hash = row[1]
if not hash:
if filename not in (record_name, sig_name):
sys.stderr.write("%s has no hash!\n" % filename)
continue
algo, data = row[1].split('=', 1)
assert algo == "sha256", "Unsupported hash algorithm"
zipfile.set_expected_hash(filename, urlsafe_b64decode(binary(data)))