本文整理汇总了Python中checksumdir.dirhash方法的典型用法代码示例。如果您正苦于以下问题:Python checksumdir.dirhash方法的具体用法?Python checksumdir.dirhash怎么用?Python checksumdir.dirhash使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类checksumdir
的用法示例。
在下文中一共展示了checksumdir.dirhash方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_KerasBackend_Train_twice
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def test_KerasBackend_Train_twice(clgen_cache_dir, abc_keras_model_config):
"""Test that TensorFlow checkpoint does not change after training twice."""
del clgen_cache_dir
abc_keras_model_config.training.num_epochs = 1
m = models.Model(abc_keras_model_config)
m.Train()
f1a = checksumdir.dirhash(m.cache.path / "checkpoints")
f1b = crypto.md5_file(m.cache.path / "META.pbtxt")
m.Train()
f2a = checksumdir.dirhash(m.cache.path / "checkpoints")
f2b = crypto.md5_file(m.cache.path / "META.pbtxt")
assert f1a == f2a
assert f1b == f2b
# TODO(cec): Add tests on incrementally trained model predictions and losses.
# KerasBackend.Sample() tests.
示例2: GetHashOfArchiveContents
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def GetHashOfArchiveContents(archive: pathlib.Path) -> str:
"""Compute the checksum of the contents of a directory.
Args:
archive: Path of the archive.
Returns:
Checksum of the archive.
Raises:
UserError: If the requested archive does not exist, or cannot be unpacked.
"""
if not archive.is_file():
raise errors.UserError(f"Archive not found: '{archive}'")
with tempfile.TemporaryDirectory(prefix="clgen_corpus_") as d:
cmd = ["tar", "-xf", str(archive), "-C", d]
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
raise errors.UserError(f"Archive unpack failed: '{archive}'")
return checksumdir.dirhash(d, "sha1")
示例3: update_addon
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def update_addon(self, url, update, force):
old = self.check_if_installed(url)
if old:
new = self.parse_url(old['URL'])
oldversion = old['Version']
if old['URL'] in self.checksumCache:
modified = self.checksumCache[old['URL']]
else:
modified = self.check_checksum(old, False)
if force or (new.currentVersion != old['Version'] and update and not modified):
new.get_addon()
self.cleanup(old['Directories'])
new.install(self.path)
checksums = {}
for directory in new.directories:
checksums[directory] = dirhash(self.path / directory)
old['Name'] = new.name
old['Version'] = new.currentVersion
old['Directories'] = new.directories
old['Checksums'] = checksums
self.save_config()
return new.name, new.currentVersion, oldversion, modified if not force else False
return url, False, False, False
示例4: get_dir_hash
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def get_dir_hash(dir):
if not os.listdir(dir):
raise Exception(f"Cannot compute hash of folder {dir}: folder is empty.")
return dirhash(dir, 'sha256')
示例5: test_training_should_be_reproducible
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def test_training_should_be_reproducible(self):
# Given
random_state = 42
dataset_stream = io.StringIO("""
---
type: intent
name: MakeTea
utterances:
- make me a [beverage_temperature:Temperature](hot) cup of tea
- make me [number_of_cups:snips/number](five) tea cups
---
type: intent
name: MakeCoffee
utterances:
- make me [number_of_cups:snips/number](one) cup of coffee please
- brew [number_of_cups] cups of coffee""")
dataset = Dataset.from_yaml_files("en", [dataset_stream]).json
# When
parser1 = DeterministicIntentParser(random_state=random_state)
parser1.fit(dataset)
parser2 = DeterministicIntentParser(random_state=random_state)
parser2.fit(dataset)
# Then
with temp_dir() as tmp_dir:
dir_parser1 = tmp_dir / "parser1"
dir_parser2 = tmp_dir / "parser2"
parser1.persist(dir_parser1)
parser2.persist(dir_parser2)
hash1 = dirhash(str(dir_parser1), 'sha256')
hash2 = dirhash(str(dir_parser2), 'sha256')
self.assertEqual(hash1, hash2)
示例6: test_training_should_be_reproducible
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def test_training_should_be_reproducible(self):
# Given
random_state = 42
dataset_stream = io.StringIO("""
---
type: intent
name: MakeTea
utterances:
- make me a hot cup of tea
- make me five tea cups
---
type: intent
name: MakeCoffee
utterances:
- make me one cup of coffee please
- brew two cups of coffee""")
dataset = Dataset.from_yaml_files("en", [dataset_stream]).json
# When
engine1 = SnipsNLUEngine(random_state=random_state)
engine1.fit(dataset)
engine2 = SnipsNLUEngine(random_state=random_state)
engine2.fit(dataset)
# Then
with temp_dir() as tmp_dir:
dir_engine1 = tmp_dir / "engine1"
dir_engine2 = tmp_dir / "engine2"
engine1.persist(dir_engine1)
engine2.persist(dir_engine2)
hash1 = dirhash(str(dir_engine1), 'sha256')
hash2 = dirhash(str(dir_engine2), 'sha256')
self.assertEqual(hash1, hash2)
示例7: test_training_should_be_reproducible
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def test_training_should_be_reproducible(self):
# Given
random_state = 40
dataset_stream = io.StringIO("""
---
type: intent
name: MakeTea
utterances:
- make me a [beverage_temperature:Temperature](hot) cup of tea
- make me [number_of_cups:snips/number](five) tea cups
---
type: intent
name: MakeCoffee
utterances:
- make me [number_of_cups:snips/number](one) cup of coffee please
- brew [number_of_cups] cups of coffee""")
dataset = Dataset.from_yaml_files("en", [dataset_stream]).json
# When
classifier1 = LogRegIntentClassifier(random_state=random_state)
classifier1.fit(dataset)
classifier2 = LogRegIntentClassifier(random_state=random_state)
classifier2.fit(dataset)
# Then
with temp_dir() as tmp_dir:
dir_classifier1 = tmp_dir / "classifier1"
dir_classifier2 = tmp_dir / "classifier2"
classifier1.persist(dir_classifier1)
classifier2.persist(dir_classifier2)
hash1 = dirhash(str(dir_classifier1), 'sha256')
hash2 = dirhash(str(dir_classifier2), 'sha256')
self.assertEqual(hash1, hash2)
示例8: test_training_should_be_reproducible
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def test_training_should_be_reproducible(self):
# Given
dataset_stream = io.StringIO("""
---
type: intent
name: MakeTea
utterances:
- make me a [beverage_temperature:Temperature](hot) cup of tea
- make me [number_of_cups:snips/number](five) tea cups
---
type: intent
name: MakeCoffee
utterances:
- make me [number_of_cups:snips/number](one) cup of coffee please
- brew [number_of_cups] cups of coffee""")
dataset = Dataset.from_yaml_files("en", [dataset_stream]).json
utterances = [
text_to_utterance("please make me two hots cups of tea"),
text_to_utterance("i want a cup of coffee"),
]
classes = np.array([0, 1])
shared = self.get_shared_data(dataset)
shared["random_state"] = 42
# When
featurizer1 = Featurizer(**shared)
featurizer1.fit(dataset, utterances, classes, max(classes))
featurizer2 = Featurizer(**shared)
featurizer2.fit(dataset, utterances, classes, max(classes))
# Then
with temp_dir() as tmp_dir:
dir_featurizer1 = tmp_dir / "featurizer1"
dir_featurizer2 = tmp_dir / "featurizer2"
featurizer1.persist(dir_featurizer1)
featurizer2.persist(dir_featurizer2)
hash1 = dirhash(str(dir_featurizer1), 'sha256')
hash2 = dirhash(str(dir_featurizer2), 'sha256')
self.assertEqual(hash1, hash2)
示例9: _get_hashtag
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def _get_hashtag(file_path):
'''Get sha256 of given directory or file'''
if os.path.isdir(file_path):
return dirhash(file_path, 'sha256')
else:
return _get_file_sha256(file_path)
示例10: __init__
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def __init__(self, tokenizer):
self.tokenizer = tokenizer
self._tokenizer_class_name = tokenizer.__class__.__name__
with tempfile.TemporaryDirectory() as tmp_dir:
tokenizer.save_pretrained(str(tmp_dir))
tokenizer_hash = dirhash(str(tmp_dir), "sha256")
self._setting_hash = hashlib.sha256(dumps(["pretrained", tokenizer_hash])).hexdigest()
示例11: _get_dirhash
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def _get_dirhash(absolute_dirpath):
return checksumdir.dirhash(absolute_dirpath)
示例12: get_dirhash
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def get_dirhash(absolute_dirpath):
return checksumdir.dirhash(absolute_dirpath)
示例13: calculate_custom_check_bins_hash
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def calculate_custom_check_bins_hash(custom_check_bins_provided, custom_check_bins_dir):
if custom_check_bins_provided == 'true':
return checksumdir.dirhash(custom_check_bins_dir, 'sha1')
return ''
示例14: _HashDirectory
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def _HashDirectory(self, absolute_path: pathlib.Path) -> str:
if fs.directory_is_empty(absolute_path):
last_modified_fn = lambda path: int(time.time())
else:
last_modified_fn = lambda path: GetDirectoryMTime(path)
return self._InMemoryWrapper(
absolute_path,
last_modified_fn,
lambda x: checksumdir.dirhash(x, self.hash_fn_name),
)
示例15: test_TensorFlowBackend_Train_twice
# 需要导入模块: import checksumdir [as 别名]
# 或者: from checksumdir import dirhash [as 别名]
def test_TensorFlowBackend_Train_twice(
clgen_cache_dir, abc_tensorflow_model_config
):
"""Test that TensorFlow checkpoint does not change after training twice."""
del clgen_cache_dir
abc_tensorflow_model_config.training.num_epochs = 1
m = models.Model(abc_tensorflow_model_config)
m.Train()
f1a = checksumdir.dirhash(m.cache.path / "checkpoints")
f1b = crypto.md5_file(m.cache.path / "META.pbtxt")
m.Train()
f2a = checksumdir.dirhash(m.cache.path / "checkpoints")
f2b = crypto.md5_file(m.cache.path / "META.pbtxt")
assert f1a == f2a
assert f1b == f2b