本文整理匯總了Python中hashlib.algorithms_available方法的典型用法代碼示例。如果您正苦於以下問題:Python hashlib.algorithms_available方法的具體用法?Python hashlib.algorithms_available怎麽用?Python hashlib.algorithms_available使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類hashlib
的用法示例。
在下文中一共展示了hashlib.algorithms_available方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: validate_signature
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def validate_signature(request):
"""Validate that the signature in the header matches the payload."""
if CONFIG["SECRET"] is None:
return
try:
signature = request.headers["X-Hub-Signature"]
hashname, hashval = signature.split("=")
except (KeyError, ValueError):
raise BadRequestError()
if (hashname in CONFIG["HASHLIB_BLACKLIST"]) or (
hashname not in hashlib.algorithms_available
):
raise BadRequestError("X-Hub-Signature hash algorithm unavailable")
digest = hmac.new(
CONFIG["SECRET"].encode(), request.raw_body.encode(), hashname
).hexdigest()
if not hmac.compare_digest(digest.encode(), hashval.encode("utf-8")):
raise UnauthorizedError("X-Hub-Signature mismatch")
示例2: process
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def process(self, internal_data, attribute, **kwargs):
salt = kwargs.get(CONFIG_KEY_HASHALGO, CONFIG_DEFAULT_SALT)
hash_algo = kwargs.get(CONFIG_KEY_HASHALGO, CONFIG_DEFAULT_HASHALGO)
if hash_algo not in hashlib.algorithms_available:
raise AttributeProcessorError(
"Hash algorithm not supported: {}".format(hash_algo))
attributes = internal_data.attributes
value = attributes.get(attribute, [None])[0]
if value is None:
raise AttributeProcessorError(
"No value for attribute: {}".format(attribute))
hasher = hashlib.new(hash_algo)
hasher.update(value.encode('utf-8'))
hasher.update(salt.encode('utf-8'))
value_hashed = hasher.hexdigest()
attributes[attribute][0] = value_hashed
示例3: list
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def list(self, ctx, *, group=None):
"""Lists available choices for other commands"""
choices = {
"documentations": self.documented,
"hashing": sorted([h for h in algorithms if h.islower()]),
"references": self.referred,
"wrapped argument": self.wrapping,
}
if group == 'languages':
emb = discord.Embed(title=f"Available for {group}: {len(self.bot.languages)}",
description=f'View them on [tio.run](https://tio.run/#), or in [JSON format](https://tio.run/languages.json)')
return await ctx.send(embed=emb)
if not group in choices:
emb = discord.Embed(title="Available listed commands", description=f"`languages`, `{'`, `'.join(choices)}`")
return await ctx.send(embed=emb)
availables = choices[group]
description=f"`{'`, `'.join([*availables])}`"
emb = discord.Embed(title=f"Available for {group}: {len(availables)}", description=description)
await ctx.send(embed=emb)
示例4: _set_algorithm
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def _set_algorithm(self, value):
"""Setter for algorithm property.
Args:
value (str): The hashing algorithm to use. Defaults
to sha1. See `hashlib.algorithms_available` for a list of
options.
value (str): The hashing algorithm to use.
"""
if value not in hashlib.algorithms_available:
raise ValueError("Invalid hashing algorithm: {!r}".format(value))
if value != self._algorithm:
# reset cache when changing algorithm
self._cache = {}
self._algorithm = value
示例5: process_object
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def process_object(self, file_object):
'''
This function must be implemented by the plugin.
Analysis result must be a dict stored in file_object.processed_analysis[self.NAME]
If you want to propagate results to parent objects store a list of strings 'summary' entry of your result dict
'''
file_object.processed_analysis[self.NAME] = {}
for h in self.hashes_to_create:
if h in algorithms_available:
file_object.processed_analysis[self.NAME][h] = get_hash(h, file_object.binary)
else:
logging.debug('algorithm {} not available'.format(h))
file_object.processed_analysis[self.NAME]['ssdeep'] = get_ssdeep(file_object.binary)
file_object.processed_analysis[self.NAME]['imphash'] = get_imphash(file_object)
tlsh_hash = get_tlsh(file_object.binary)
if tlsh_hash:
file_object.processed_analysis[self.NAME]['tlsh'] = get_tlsh(file_object.binary)
return file_object
示例6: hasher
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def hasher(data, algos=ALGOS):
"""Create the hash(es) of a given string or file."""
try:
data = data.encode()
except Exception:
pass
result = {}
for algo in sorted(hashlib.algorithms_available):
if algo in algos:
h = hashlib.new(algo)
h.update(data)
result[algo] = h.hexdigest()
return result
示例7: compute_file_checksum
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def compute_file_checksum(path, read_chunksize=65536, algorithm='sha256'):
"""Compute checksum of a file's contents.
:param path: Path to the file
:param read_chunksize: Maximum number of bytes to be read from the file
at once. Default is 65536 bytes or 64KB
:param algorithm: The hash algorithm name to use. For example, 'md5',
'sha256', 'sha512' and so on. Default is 'sha256'. Refer to
hashlib.algorithms_available for available algorithms
:return: Hex digest string of the checksum
.. versionadded:: 3.31.0
"""
checksum = hashlib.new(algorithm) # Raises appropriate exceptions.
with open(path, 'rb') as f:
for chunk in iter(lambda: f.read(read_chunksize), b''):
checksum.update(chunk)
# Release greenthread, if greenthreads are not used it is a noop.
time.sleep(0)
return checksum.hexdigest()
示例8: _chromium_hashes_generator
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def _chromium_hashes_generator(hashes_path):
with hashes_path.open(encoding=ENCODING) as hashes_file:
hash_lines = hashes_file.read().splitlines()
for hash_name, hash_hex, _ in map(lambda x: x.lower().split(' '), hash_lines):
if hash_name in hashlib.algorithms_available:
yield hash_name, hash_hex
else:
get_logger().warning('Skipping unknown hash algorithm: %s', hash_name)
示例9: test_algorithms_available
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def test_algorithms_available(self):
self.assertTrue(set(hashlib.algorithms_guaranteed).
issubset(hashlib.algorithms_available))
示例10: validate_hash
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def validate_hash(hash: str, cracked: str, algo: Algo) -> bool:
# NOTE: I'm all for trusting the third parties added here and not double checking
# Hence removing the cross-check with multiple services
res = False
if algo.name in hashlib.algorithms_available:
h = hashlib.new(algo.name)
h.update(cracked.encode("utf-8"))
if h.hexdigest().lower() == hash:
res = True
elif algo in [Algo.LDAP_MD5, Algo.LDAP_SHA1]:
# NOTE: this is fucking ugly, almost tempted to leave the user do the pre-work
# leaving for now for compatibility purposes
alg = algo.name.split('_')[1]
ahash = base64.decodestring(hash.split('}')[1])
h = hashlib.new(alg)
h.update(cracked.encode("utf-8"))
if h.digest() == ahash:
res = True
elif algo == Algo.NTLM or (algo == Algo.LM and ':' in hash):
candidate = hashlib.new('md4', cracked.split()[-1].encode('utf-16le')).hexdigest()
# It's a LM:NTLM combination or a single NTLM hash
if (':' in hash and candidate == hash.split(':')[1]) or (':' not in hash and candidate == hash):
res = True
else:
# Can't and won't validate the hash, assuming it's correct
res = True
return res
示例11: store_add_to_backend_with_multihash
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def store_add_to_backend_with_multihash(
image_id, data, size, hashing_algo, store,
context=None, verifier=None):
"""
A wrapper around a call to each store's add() method that requires
a hashing_algo identifier and returns a 5-tuple including the
"multihash" computed using the specified hashing_algo. (This
is an enhanced version of store_add_to_backend(), which is left
as-is for backward compatibility.)
:param image_id: The image add to which data is added
:param data: The data to be stored
:param size: The length of the data in bytes
:param store: The store to which the data is being added
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: The request context
:param verifier: An object used to verify signatures for images
:return: The url location of the file,
the size amount of data,
the checksum of the data,
the multihash of the data,
the storage system's metadata dictionary for the location
:raises: ``glance_store.exceptions.BackendException``
``glance_store.exceptions.UnknownHashingAlgo``
"""
if hashing_algo not in hashlib.algorithms_available:
raise exceptions.UnknownHashingAlgo(algo=hashing_algo)
(location, size, checksum, multihash, metadata) = store.add(
image_id, data, size, hashing_algo, context=context, verifier=verifier)
if metadata is not None:
_check_metadata(store, metadata)
return (location, size, checksum, multihash, metadata)
示例12: __init__
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def __init__(self, file, mode='r'):
basename = os.path.basename(file)
self.parsed_filename = WHEEL_INFO_RE.match(basename)
if not basename.endswith('.whl') or self.parsed_filename is None:
raise WheelError("Bad wheel filename {!r}".format(basename))
super(WheelFile, self).__init__(file, mode, compression=ZIP_DEFLATED, allowZip64=True)
self.dist_info_path = '{}.dist-info'.format(self.parsed_filename.group('namever'))
self.record_path = self.dist_info_path + '/RECORD'
self._file_hashes = OrderedDict()
self._file_sizes = {}
if mode == 'r':
# Ignore RECORD and any embedded wheel signatures
self._file_hashes[self.record_path] = None, None
self._file_hashes[self.record_path + '.jws'] = None, None
self._file_hashes[self.record_path + '.p7s'] = None, None
# Fill in the expected hashes by reading them from RECORD
try:
record = self.open(self.record_path)
except KeyError:
raise WheelError('Missing {} file'.format(self.record_path))
with record:
for line in record:
line = line.decode('utf-8')
path, hash_sum, size = line.rsplit(u',', 2)
if hash_sum:
algorithm, hash_sum = hash_sum.split(u'=')
if algorithm not in hashlib.algorithms_available:
raise WheelError('Unsupported hash algorithm: {}'.format(algorithm))
elif algorithm.lower() in {'md5', 'sha1'}:
raise WheelError(
'Weak hash algorithm ({}) is not permitted by PEP 427'
.format(algorithm))
self._file_hashes[path] = (
algorithm, urlsafe_b64decode(hash_sum.encode('ascii')))
示例13: file_hash
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def file_hash(fname, alg="sha256"):
"""
Calculate the hash of a given file.
Useful for checking if a file has changed or been corrupted.
Parameters
----------
fname : str
The name of the file.
alg : str
The type of the hashing algorithm
Returns
-------
hash : str
The hash of the file.
Examples
--------
>>> fname = "test-file-for-hash.txt"
>>> with open(fname, "w") as f:
... __ = f.write("content of the file")
>>> print(file_hash(fname))
0fc74468e6a9a829f103d069aeb2bb4f8646bad58bf146bb0e3379b759ec4a00
>>> import os
>>> os.remove(fname)
"""
if alg not in hashlib.algorithms_available:
raise ValueError("Algorithm '{}' not available in hashlib".format(alg))
# Calculate the hash in chunks to avoid overloading the memory
chunksize = 65536
hasher = hashlib.new(alg)
with open(fname, "rb") as fin:
buff = fin.read(chunksize)
while buff:
hasher.update(buff)
buff = fin.read(chunksize)
return hasher.hexdigest()
示例14: makehash
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def makehash(_cmd, pld):
"""
:param _cmd: The command object referenced in the command.
:type _cmd: sigma.core.mechanics.command.SigmaCommand
:param pld: The payload with execution data and details.
:type pld: sigma.core.mechanics.payload.CommandPayload
"""
if pld.args:
if len(pld.args) >= 2:
hash_name = pld.args[0]
hashes = hashlib.algorithms_available
if hash_name in hashes:
qry = ' '.join(pld.args[1:])
crypt = hashlib.new(hash_name)
crypt.update(qry.encode('utf-8'))
final = crypt.hexdigest()
response = discord.Embed(color=0x66cc66)
response.add_field(name=f'✅ Hashing With {hash_name.upper()} Done', value=f'```\n{final}\n```')
else:
response = discord.Embed(color=0xBE1931)
response.add_field(name='❗ Unknown Hashing Method', value=f'Available:\n```\n{", ".join(hashes)}\n```')
else:
response = error('Not enough arguments.')
else:
response = error('Nothing inputted.')
await pld.msg.channel.send(embed=response)
示例15: make_digesters
# 需要導入模塊: import hashlib [as 別名]
# 或者: from hashlib import algorithms_available [as 別名]
def make_digesters(fpath, families, include_CRCs=False):
"""
Create and return a dictionary of all our active hash algorithms.
Each digester is a 2-tuple ``( digester.update_func(bytes), digest_func(digester) -> int)``.
"""
## TODO: simplify digester-tuple API, ie: (digester, update_func(d), digest_func(d))
families = set(f.upper() for f in families)
digesters = OrderedDict()
digesters['LENGTH'] = (LenDigester(), LenDigester.digest)
# Default Algos
for algo in sorted(hashlib.algorithms_available):
# algorithms_available can have duplicates
aname = algo.upper()
if aname not in digesters and is_algo_in_families(aname, families):
digesters[aname] = (hashlib.new(algo), lambda d: d.hexdigest())
# CRC
if include_CRCs:
for name in sorted(crcmod._crc_definitions_by_name):
crc_name = crcmod._crc_definitions_by_name[name]['name']
aname = crc_name.upper()
if is_algo_in_families(aname, families):
digesters[aname] = (crcmod.PredefinedCrc(crc_name),
lambda d: hex(d.crcValue))
add_git_digesters(digesters, fpath)
## Append plugin digesters.
#
digesters.update(known_digesters)
for digester in list(digesters.keys()):
if not is_algo_in_families(digester.upper(), families):
digesters.pop(digester, None)
return digesters