本文整理汇总了Python中spacewalk.common.checksum.getFileChecksum函数的典型用法代码示例。如果您正苦于以下问题:Python getFileChecksum函数的具体用法?Python getFileChecksum怎么用?Python getFileChecksum使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了getFileChecksum函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: repomd_up_to_date
def repomd_up_to_date(self):
repomd_old_path = os.path.join(self.repo.basecachedir, self.name, "repomd.xml")
# No cached repomd?
if not os.path.isfile(repomd_old_path):
return False
repomd_new_path = os.path.join(self.repo.basecachedir, self.name, "repomd.xml.new")
# Newer file not available? Don't do anything. It should be downloaded before this.
if not os.path.isfile(repomd_new_path):
return True
return (checksum.getFileChecksum('sha256', filename=repomd_old_path) ==
checksum.getFileChecksum('sha256', filename=repomd_new_path))
示例2: __is_file_done
def __is_file_done(local_path=None, file_obj=None, checksum_type=None, checksum=None):
if checksum_type and checksum:
if local_path and os.path.isfile(local_path):
return getFileChecksum(checksum_type, filename=local_path) == checksum
elif file_obj:
return getFileChecksum(checksum_type, file_obj=file_obj) == checksum
if local_path and os.path.isfile(local_path):
return True
elif file_obj:
return True
return False
示例3: copyFiles
def copyFiles(options):
""" copies SSL cert and GPG key to --pub-tree if not in there already
existence check should have already been done.
"""
pubDir = cleanupAbsPath(options.pub_tree or DEFAULT_APACHE_PUB_DIRECTORY)
def copyFile(file0, file1):
if not os.path.exists(os.path.dirname(file1)):
sys.stderr.write("ERROR: directory does not exist:\n %s\n"
% os.path.dirname(file1))
sys.exit(errnoBadPath)
if not os.path.exists(file0):
sys.stderr.write("ERROR: file does not exist:\n %s\n"
% file0)
sys.exit(errnoCANotFound)
sys.stderr.write("""\
Coping file into public directory tree:
%s to
%s
""" % (file0, file1))
shutil.copy(file0, file1)
# CA SSL cert
if not options.no_ssl and options.ssl_cert:
writeYN = 1
dest = os.path.join(pubDir, os.path.basename(options.ssl_cert))
if os.path.dirname(options.ssl_cert) != pubDir:
if os.path.isfile(dest) \
and getFileChecksum('md5', options.ssl_cert) != getFileChecksum('md5', dest):
rotateFile(dest, options.verbose)
elif os.path.isfile(dest):
writeYN = 0
if writeYN:
copyFile(options.ssl_cert, dest)
# corp GPG keys
if not options.no_gpg and options.gpg_key:
for gpg_key in options.gpg_key.split(","):
writeYN = 1
dest = os.path.join(pubDir, os.path.basename(gpg_key))
if os.path.dirname(gpg_key) != pubDir:
if os.path.isfile(dest) \
and getFileChecksum('md5', gpg_key) != getFileChecksum('md5', dest):
rotateFile(dest, options.verbose)
elif os.path.isfile(dest):
writeYN = 0
if writeYN:
copyFile(gpg_key, dest)
示例4: copy_package
def copy_package(fd, basedir, relpath, checksum_type, checksum, force=None):
"""
Copies the information from the file descriptor to a file
Checks the file's checksum, raising FileConflictErrror if it's different
The force flag prevents the exception from being raised, and copies the
file even if the checksum has changed
"""
packagePath = basedir + "/" + relpath
# Is the file there already?
if os.path.isfile(packagePath) and not force:
# Get its checksum
localsum = getFileChecksum(checksum_type, packagePath)
if checksum == localsum:
# Same file, so get outa here
return
raise FileConflictError(os.path.basename(packagePath))
dir = os.path.dirname(packagePath)
# Create the directory where the file will reside
if not os.path.exists(dir):
createPath(dir)
pkgfd = os.open(packagePath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC)
os.lseek(fd, 0, 0)
while 1:
buffer = os.read(fd, 65536)
if not buffer:
break
n = os.write(pkgfd, buffer)
if n != len(buffer):
# Error writing to the file
raise IOError, "Wrote %s out of %s bytes in file %s" % (
n, len(buffer), packagePath)
os.close(pkgfd)
# set the path perms readable by all users
setPermsPath(packagePath, chmod=0644)
示例5: _uploadPackage
def _uploadPackage(self, channels, org_id, force, info):
""" Write the bits to a temporary file """
packageBits = info['package']
package_stream = tempfile.TemporaryFile()
package_stream.write(packageBits)
package_stream.seek(0, 0)
del packageBits
header, payload_stream, header_start, header_end = \
rhnPackageUpload.load_package(package_stream)
checksum_type = header.checksum_type()
checksum = getFileChecksum(header.checksum_type(), file=payload_stream)
relative_path = rhnPackageUpload.relative_path_from_header(
header, org_id=org_id,checksum=checksum, checksum_type=checksum_type)
package_dict, diff_level = rhnPackageUpload.push_package(
header, payload_stream, checksum_type, checksum, org_id=org_id, force=force,
header_start=header_start, header_end=header_end,
relative_path=relative_path)
if diff_level:
return package_dict, diff_level
return 0
示例6: move_package
def move_package(filename, basedir, relpath, checksum_type, checksum, force=None):
"""
Copies the information from the file descriptor to a file
Checks the file's checksum, raising FileConflictErrror if it's different
The force flag prevents the exception from being raised, and copies the
file even if the checksum has changed
"""
packagePath = basedir + "/" + relpath
# Is the file there already?
if os.path.isfile(packagePath):
if force:
os.unlink(packagePath)
else:
# Get its checksum
localsum = getFileChecksum(checksum_type, packagePath)
if checksum == localsum:
# Same file, so get outa here
return
raise FileConflictError(os.path.basename(packagePath))
dir = os.path.dirname(packagePath)
# Create the directory where the file will reside
if not os.path.exists(dir):
createPath(dir)
# Check if the RPM has been downloaded from a remote repository
# If so, it is stored in CFG.MOUNT_POINT and we have to move it
# If not, the repository is local to the server, so the rpm should be copied
if filename.startswith(CFG.MOUNT_POINT):
shutil.move(filename, packagePath)
else:
shutil.copy(filename, packagePath)
# set the path perms readable by all users
os.chmod(packagePath, int('0644', 8))
示例7: upload_package
def upload_package(self, package, path):
temp_file = open(path, 'rb')
header, payload_stream, header_start, header_end = \
rhnPackageUpload.load_package(temp_file)
package.checksum_type = header.checksum_type()
package.checksum = getFileChecksum(package.checksum_type, file=temp_file)
rel_package_path = rhnPackageUpload.relative_path_from_header(
header, self.channel['org_id'],
package.checksum_type, package.checksum)
package_dict, diff_level = rhnPackageUpload.push_package(header,
payload_stream, package.checksum_type, package.checksum,
force=False,
header_start=header_start, header_end=header_end,
relative_path=rel_package_path,
org_id=self.channel['org_id'])
temp_file.close()
示例8: handler
def handler(self, req):
ret = basePackageUpload.BasePackageUpload.handler(self, req)
if ret != apache.OK:
return ret
temp_stream = rhnPackageUpload.write_temp_file(req, 16384, self.packaging)
header, payload_stream, header_start, header_end = \
rhnPackageUpload.load_package(temp_stream)
# Sanity check - removed, the package path can no longer be determined
# without the header
checksum_type = header.checksum_type()
checksum = getFileChecksum(checksum_type, file=payload_stream)
self.rel_package_path = rhnPackageUpload.relative_path_from_header(
header, org_id=self.org_id, checksum_type=checksum_type, checksum=checksum)
self.package_path = os.path.join(CFG.MOUNT_POINT,
self.rel_package_path)
# Verify the checksum of the bytes we downloaded against the checksum
# presented by rhnpush in the HTTP headers
if not (checksum_type == self.file_checksum_type
and checksum == self.file_checksum):
log_debug(1, "Mismatching checksums: expected",
self.file_checksum_type, self.file_checksum,
"; got:", checksum_type, checksum)
raise rhnFault(104, "Mismatching information")
package_dict, diff_level = rhnPackageUpload.push_package(header,
payload_stream, checksum_type, checksum, force=self.force,
header_start=header_start, header_end=header_end,
relative_path=self.rel_package_path, org_id=self.org_id)
if diff_level:
return self._send_package_diff(req, diff_level, package_dict)
# Everything went fine
rhnSQL.commit()
reply = "All OK"
req.headers_out['Content-Length'] = str(len(reply))
req.send_http_header()
req.write(reply)
log_debug(2, "Returning with OK")
return apache.OK
示例9: _populateFromFile
def _populateFromFile(self, f_path, relpath=None, org_id=None, channels=[],
source=None):
f_obj = file(f_path)
import server.rhnPackageUpload as rhnPackageUpload
header, payload_stream, header_start, header_end = \
rhnPackageUpload.load_package(f_obj)
if (source and not header.is_source) or (not source and header.is_source):
raise ValueError("Unexpected RPM package type")
# Get the size
size = os.path.getsize(f_path)
path = None
if relpath:
# Strip trailing slashes
path = "%s/%s" % (sanitizePath(relpath), os.path.basename(f_path))
checksum_type = header.checksum_type()
checksum = getFileChecksum(header.checksum_type(), file=payload_stream)
self.populate(header, size, checksum_type, checksum, path, org_id,
header_start, header_end, channels)
示例10: match_package_checksum
def match_package_checksum(abspath, checksum_type, checksum):
if (os.path.exists(abspath) and
getFileChecksum(checksum_type, filename=abspath) == checksum):
return 1
return 0
示例11: rotateFile
def rotateFile(filepath, depth=5, suffix='.', verbosity=0):
""" backup/rotate a file
depth (-1==no limit) refers to num. of backups (rotations) to keep.
Behavior:
(1)
x.txt (current)
x.txt.1 (old)
x.txt.2 (older)
x.txt.3 (oldest)
(2)
all file stats preserved. Doesn't blow away original file.
(3)
if x.txt and x.txt.1 are identical (size or checksum), None is
returned
"""
# check argument sanity (should really be down outside of this function)
if not filepath or not isinstance(filepath, type('')):
raise ValueError("filepath '%s' is not a valid arguement" % filepath)
if not isinstance(depth, type(0)) or depth < -1 \
or depth > MaxInt - 1 or depth == 0:
raise ValueError("depth must fall within range "
"[-1, 1...%s]" % (MaxInt - 1))
# force verbosity to be a numeric value
verbosity = verbosity or 0
if not isinstance(verbosity, type(0)) or verbosity < -1 \
or verbosity > MaxInt - 1:
raise ValueError('invalid verbosity value: %s' % (verbosity))
filepath = cleanupAbsPath(filepath)
if not os.path.isfile(filepath):
raise ValueError("filepath '%s' does not lead to a file" % filepath)
pathNSuffix = filepath + suffix
pathNSuffix1 = pathNSuffix + '1'
if verbosity > 1:
sys.stderr.write("Working dir: %s\n"
% os.path.dirname(pathNSuffix))
# is there anything to do? (existence, then size, then checksum)
checksum_type = 'sha1'
if os.path.exists(pathNSuffix1) and os.path.isfile(pathNSuffix1) \
and os.stat(filepath)[6] == os.stat(pathNSuffix1)[6] \
and getFileChecksum(checksum_type, filepath) == \
getFileChecksum(checksum_type, pathNSuffix1):
# nothing to do
if verbosity:
sys.stderr.write("File '%s' is identical to its rotation. "
"Nothing to do.\n" % os.path.basename(filepath))
return None
# find last in series (of rotations):
last = 0
while os.path.exists('%s%d' % (pathNSuffix, last + 1)):
last = last + 1
# percolate renames:
for i in range(last, 0, -1):
os.rename('%s%d' % (pathNSuffix, i), '%s%d' % (pathNSuffix, i + 1))
if verbosity > 1:
filename = os.path.basename(pathNSuffix)
sys.stderr.write("Moving file: %s%d --> %s%d\n" % (filename, i,
filename, i + 1))
# blow away excess rotations:
if depth != -1:
last = last + 1
for i in range(depth + 1, last + 1):
path = '%s%d' % (pathNSuffix, i)
os.unlink(path)
if verbosity:
sys.stderr.write("Rotated out: '%s'\n" % (
os.path.basename(path)))
# do the actual rotation
shutil.copy2(filepath, pathNSuffix1)
if os.path.exists(pathNSuffix1) and verbosity:
sys.stderr.write("Backup made: '%s' --> '%s'\n"
% (os.path.basename(filepath),
os.path.basename(pathNSuffix1)))
# return the full filepath of the backed up file
return pathNSuffix1
示例12: import_kickstart
#.........这里部分代码省略.........
treeinfo_parser = None
for path in treeinfo_path:
log(1, "Trying " + path)
treeinfo = plug.get_file(path, os.path.join(CFG.MOUNT_POINT, ks_path))
if treeinfo:
try:
treeinfo_parser = TreeInfoParser(treeinfo)
break
except TreeInfoError:
pass
if not treeinfo_parser:
log(0, "Kickstartable tree not detected (no valid treeinfo file)")
return
# Make sure images are included
to_download = []
for repo_path in treeinfo_parser.get_images():
local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)
# TODO: better check
if not os.path.exists(local_path):
to_download.append(repo_path)
if row:
log(0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label)
ks_id = row['id']
else:
row = rhnSQL.fetchone_dict("""
select sequence_nextval('rhn_kstree_id_seq') as id from dual
""")
ks_id = row['id']
rhnSQL.execute("""
insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type,
install_type, last_modified, created, modified)
values (:id, :org_id, :label, :base_path, :channel_id,
( select id from rhnKSTreeType where label = :ks_tree_type),
( select id from rhnKSInstallType where label = :ks_install_type),
current_timestamp, current_timestamp, current_timestamp)""", id=ks_id,
org_id=self.channel['org_id'], label=ks_tree_label, base_path=db_path,
channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type,
ks_install_type=self.ks_install_type)
log(0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label)
insert_h = rhnSQL.prepare("""
insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created,
modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size,
epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp)
""")
delete_h = rhnSQL.prepare("""
delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path
""")
# Downloading/Updating content of KS Tree
# start from root dir
dirs_queue = ['']
log(0, "Gathering all files in kickstart repository...")
while len(dirs_queue) > 0:
cur_dir_name = dirs_queue.pop(0)
cur_dir_html = plug.get_file(cur_dir_name)
if cur_dir_html is None:
continue
parser = KSDirParser(cur_dir_html)
for ks_file in parser.get_content():
repo_path = cur_dir_name + ks_file['name']
# if this is a directory, just add a name into queue (like BFS algorithm)
if ks_file['type'] == 'DIR':
dirs_queue.append(repo_path)
continue
if repo_path not in to_download:
to_download.append(repo_path)
if to_download:
log(0, "Downloading %d files." % len(to_download))
for item in to_download:
for retry in range(3):
try:
log(1, "Retrieving %s" % item)
plug.get_file(item, os.path.join(CFG.MOUNT_POINT, ks_path))
st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item))
break
except OSError: # os.stat if the file wasn't downloaded
if retry < 3:
log(2, "Retry download %s: attempt #%d" % (item, retry + 1))
else:
raise
# update entity about current file in a database
delete_h.execute(id=ks_id, path=item)
insert_h.execute(id=ks_id, path=item,
checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)),
st_size=st.st_size, st_time=st.st_mtime)
else:
log(0, "Nothing to download.")
rhnSQL.commit()
示例13: md5sum_for_stream
def md5sum_for_stream(data_stream):
"""Calcualte the md5sum for a datastream and return it in a utf8 friendly
format"""
return checksum.getFileChecksum("md5", file_obj=data_stream)
示例14: process_sha256_packages
def process_sha256_packages():
if debug:
log = rhnLog('/var/log/rhn/update-packages.log', 5)
_get_sha256_packages_sql = rhnSQL.prepare(_get_sha256_packages_query)
_get_sha256_packages_sql.execute()
packages = _get_sha256_packages_sql.fetchall_dict()
if not packages:
print("No SHA256 capable packages to process.")
if debug:
log.writeMessage("No SHA256 capable packages to process.")
return
if verbose:
print("Processing %s SHA256 capable packages" % len(packages))
pb = ProgressBar(prompt='standby: ', endTag=' - Complete!',
finalSize=len(packages), finalBarLength=40, stream=sys.stdout)
pb.printAll(1)
_update_sha256_package_sql = rhnSQL.prepare(_update_sha256_package)
_update_package_files_sql = rhnSQL.prepare(_update_package_files)
for package in packages:
pb.addTo(1)
pb.printIncrement()
old_abs_path = os.path.join(CFG.MOUNT_POINT, package['path'])
if debug and verbose:
log.writeMessage("Processing package: %s" % old_abs_path)
temp_file = open(old_abs_path, 'rb')
header, _payload_stream, _header_start, _header_end = \
rhnPackageUpload.load_package(temp_file)
checksum_type = header.checksum_type()
checksum = getFileChecksum(checksum_type, file_obj=temp_file)
old_path = package['path'].split('/')
nevra = parseRPMFilename(old_path[-1])
org_id = old_path[1]
new_path = get_package_path(nevra, org_id, prepend=old_path[0], checksum=checksum)
new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path)
# Filer content relocation
try:
if old_abs_path != new_abs_path:
if debug:
log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path))
new_abs_dir = os.path.dirname(new_abs_path)
if not os.path.isdir(new_abs_dir):
os.makedirs(new_abs_dir)
# link() the old path to the new path
if not os.path.exists(new_abs_path):
os.link(old_abs_path, new_abs_path)
elif debug:
log.writeMessage("File %s already exists" % new_abs_path)
# Make the new path readable
os.chmod(new_abs_path, int('0644', 8))
except OSError:
e = sys.exc_info()[1]
message = "Error when relocating %s to %s on filer: %s" % \
(old_abs_path, new_abs_path, str(e))
print(message)
if debug:
log.writeMessage(message)
sys.exit(1)
# Update package checksum in the database
_update_sha256_package_sql.execute(ctype=checksum_type, csum=checksum,
path=new_path, id=package['id'])
_select_checksum_type_id_sql = rhnSQL.prepare(_select_checksum_type_id)
_select_checksum_type_id_sql.execute(ctype=checksum_type)
checksum_type_id = _select_checksum_type_id_sql.fetchone()[0]
# Update checksum of every single file in a package
for i, f in enumerate(header['filenames']):
csum = header['filemd5s'][i]
# Do not update checksums for directories & links
if not csum:
continue
_update_package_files_sql.execute(ctype_id=checksum_type_id, csum=csum,
pid=package['id'], filename=f)
rhnSQL.commit()
try:
if os.path.exists(old_abs_path):
os.unlink(old_abs_path)
if os.path.exists(os.path.dirname(old_abs_path)):
os.removedirs(os.path.dirname(old_abs_path))
except OSError:
e = sys.exc_info()[1]
message = "Error when removing %s: %s" % (old_abs_path, str(e))
#.........这里部分代码省略.........
示例15: get_package
def get_package(self, pack):
url = self.base_url + "/" + pack["path"]
file_path = self._download(url)
if getFileChecksum(pack["checksum_type"], filename=file_path) != pack["checksum"]:
raise IOError("Package file does not match intended download.")
return file_path