本文整理汇总了Python中common.Common.hashfile方法的典型用法代码示例。如果您正苦于以下问题:Python Common.hashfile方法的具体用法?Python Common.hashfile怎么用?Python Common.hashfile使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类common.Common
的用法示例。
在下文中一共展示了Common.hashfile方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: verify
# 需要导入模块: from common import Common [as 别名]
# 或者: from common.Common import hashfile [as 别名]
def verify(self):
self.project.log("transaction", "Verifying all downloaded files...", "highlight", True)
verification_file = os.path.join(self.project.working_dir, Common.timely_filename("verification", ".csv"))
errors = 0
pct = 0
tot_hashes = 0
with open(verification_file, 'w') as f:
f.write("TIME_PROCESSED,REMOTE_FILE,LOCAL_FILE,REMOTE_HASH,LOCAL_HASH,MATCH\n")
for item in self.verification:
rh = ""
match = ""
lh = Common.hashfile(open(item['local_file'], 'rb'), hashlib.md5())
lf = item['local_file']
rf = item['remote_file']
if 'remote_hash' in item:
tot_hashes += 1
rh = item['remote_hash']
if lh == item['remote_hash']:
match = "YES"
else:
match = "NO"
errors += 1
self.project.log("exception", "Verification failed for remote file {} and local file {}".format(rf,lf), "critical", True)
else:
rh = "NONE PROVIDED"
match = "N/A"
f.write('"{date}","{rf}","{lf}","{rh}","{lh}","{m}"\n'.format(date=Common.utc_get_datetime_as_string(),rf=rf,lf=lf,rh=rh,lh=lh,m=match))
pct = ((tot_hashes - errors) / tot_hashes) * 100
self.project.log("transaction", "Verification of {} items completed with {} errors. ({:.2f}% Success rate)".format(tot_hashes, errors, pct), "highlight", True)
示例2: sync
# 需要导入模块: from common import Common [as 别名]
# 或者: from common.Common import hashfile [as 别名]
def sync(self):
d1 = datetime.now()
d = Downloader.Downloader
if self.project.args.mode == "full":
self.project.log("transaction", "Full acquisition initiated", "info", True)
d = Downloader.Downloader(self.project, self.oauth_provider.http_intercept, self._save_file, self.oauth_provider.get_auth_header,
self.project.threads)
else:
self.project.log("transaction", "Metadata acquisition initiated", "info", True)
self.initialize_items()
cnt = len(self.files)
self.project.log("transaction", "Total items queued for acquisition: " + str(cnt), "info", True)
self.metadata()
trash_folder = os.path.join(self.project.acquisition_dir, "trash")
trash_metadata_folder = os.path.join(self.project.acquisition_dir, "trash_metadata")
for file in self.files:
self.project.log("transaction", "Calculating " + file['title'], "info", True)
download_uri = self._get_download_url(file)
parentmap = self._get_parent_mapping(file, self.files)
filetitle = self._get_file_name(file)
if filetitle != file['title']:
self.project.log("exception", "Normalized '" + file['title'] + "' to '" + filetitle + "'", "warning",
True)
if file['labels']['trashed'] == True:
save_download_path = os.path.join(trash_folder, parentmap)
save_metadata_path = os.path.join(trash_metadata_folder, parentmap)
save_download_path = os.path.normpath(os.path.join(save_download_path, filetitle))
save_metadata_path = os.path.normpath(os.path.join(save_metadata_path, filetitle + '.json'))
else:
save_download_path = os.path.normpath(os.path.join(os.path.join(self.project.project_folders["data"], parentmap), filetitle))
save_metadata_path = os.path.normpath(os.path.join(os.path.join(self.project.project_folders["metadata"], parentmap), filetitle + ".json"))
save_download_path = Common.assert_path(save_download_path, self.project)
save_metadata_path = Common.assert_path(save_metadata_path, self.project)
if self.project.args.mode == "full":
if save_download_path:
v = {"remote_file": os.path.join(parentmap, file['title']),
"local_file": save_download_path}
download_file = True
if 'md5Checksum' in file:
v['remote_hash'] = file['md5Checksum']
if os.path.isfile(save_download_path):
if 'md5Checksum' in file:
file_hash = Common.hashfile(open(save_download_path, 'rb'), hashlib.md5())
if file_hash == file['md5Checksum']:
download_file = False
self.project.log("exception", "Local and remote hash matches for " + file[
'title'] + " ... Skipping download", "warning", True)
else:
self.project.log("exception", "Local and remote hash differs for " + file[
'title'] + " ... Queuing for download", "critical", True)
else:
self.project.log("exception", "No hash information for file ' " + file['title'] + "'", "warning", True)
if download_file and download_uri:
self.project.log("transaction", "Queueing " + file['title'] + " for download...", "info", True)
d.put(Downloader.DownloadSlip(download_uri, file, save_download_path, 'title'))
if 'fileSize' in file:
self.file_size_bytes += int(file['fileSize'])
# If it's a file we can add it to verification file
if download_uri:
self.verification.append(v)
if save_metadata_path:
self._save_file(json.dumps(file, sort_keys=True, indent=4), Downloader.DownloadSlip(download_uri, file, save_metadata_path, 'title'), False)
self.project.log("transaction", "Total size of files to be acquired is {}".format(
Common.sizeof_fmt(self.file_size_bytes, "B")), "highlight", True)
if self.project.args.prompt:
IO.get("Press ENTER to begin acquisition...")
d.start()
d.wait_for_complete()
d2 = datetime.now()
delt = d2 - d1
self.verify()
self.project.log("transaction", "Acquisition completed in {}".format(str(delt)), "highlight", True)
示例3: run
# 需要导入模块: from common import Common [as 别名]
# 或者: from common.Common import hashfile [as 别名]
#.........这里部分代码省略.........
for path in paths:
if os.path.exists(current_output_path+'/'+os.path.basename(path)):
Common.message(padding+" WARNING: file naming collision when unfolding: "+os.path.basename(path))
continue
if os.path.isdir(path):
shutil.copytree(path, current_output_path+'/'+os.path.basename(path))
else:
shutil.copy2(path, current_output_path+'/'+os.path.basename(path))
else:
Common.message(padding+" ERROR: could not process step type: "+step_type)
current_output_path = current_input_path
position += 1
else:
if step['name'] == test or test == '*' or step['name'] == '*':
if 'context' in step and not step['context'] == pipeline_context_name:
continue
success = True
Common.message(padding+'-- Evaluating test: '+step['name'])
if 'expect' in step:
tests_run += 1
expected_files = {}
actual_files = {}
actual_path = current_output_path if current_output_path else input_path
expected_path = os.path.dirname(self.path)+"/tests/"+step['expect']
if os.path.isdir(actual_path):
for root, subdirs, files in os.walk(actual_path):
for file in files:
actual_files[os.path.relpath(root, actual_path)+'/'+file] = {'hash': Common.hashfile(root+'/'+file),
'fullpath': root+'/'+file }
else:
actual_files['./'+os.path.basename(actual_path)] = {'hash': Common.hashfile(actual_path),
'fullpath': actual_path }
if os.path.isdir(expected_path):
for root, subdirs, files in os.walk(expected_path):
for file in files:
expected_files[os.path.relpath(root, expected_path)+'/'+file] = {'hash': Common.hashfile(root+'/'+file),
'fullpath': root+'/'+file }
else:
expected_files['./'+os.path.basename(expected_path)] = {'hash': Common.hashfile(expected_path),
'fullpath': expected_path }
for file in actual_files:
if not file in expected_files:
Common.message(padding+" FAILED: file should not be present in output: "+file)
success = False
elif expected_files[file]['hash'] != actual_files[file]['hash']:
Common.message(padding+" FAILED: content in actual and expected files differ: "+file)
success = False
mime = mimetypes.guess_type(file)
if mime[0].startswith('text/') and os.stat(actual_files[file]['fullpath']).st_size < 1000000 and os.stat(expected_files[file]['fullpath']).st_size < 1000000:
with open(expected_files[file]['fullpath'], 'r') as expected_file:
with open(actual_files[file]['fullpath'], 'r') as actual_file:
expected_lines = expected_file.readlines()
actual_lines = actual_file.readlines()
max_lines = 30
for diffline in difflib.unified_diff(expected_lines, actual_lines, fromfile=file, tofile=file):
Common.message(padding+diffline.strip())
max_lines -= 1
if max_lines <= 0:
break