本文整理汇总了Python中tensorflow.python.lib.io.file_io.create_dir函数的典型用法代码示例。如果您正苦于以下问题:Python create_dir函数的具体用法?Python create_dir怎么用?Python create_dir使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了create_dir函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: save
def save(self, as_text=False):
"""Writes a `SavedModel` protocol buffer to disk.
The function writes the SavedModel protocol buffer to the export directory
in serialized format.
Args:
as_text: Writes the SavedModel protocol buffer in text format to disk.
Returns:
The path to which the SavedModel protocol buffer was written.
"""
if not file_io.file_exists(self._export_dir):
file_io.create_dir(self._export_dir)
if as_text:
path = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PBTXT))
file_io.write_string_to_file(path, str(self._saved_model))
else:
path = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
file_io.write_string_to_file(path, self._saved_model.SerializeToString())
tf_logging.info("SavedModel written to: %s", path)
return path
示例2: _save_and_write_assets
def _save_and_write_assets(self, assets_collection_to_add=None):
"""Saves asset to the meta graph and writes asset files to disk.
Args:
assets_collection_to_add: The collection where the asset paths are setup.
"""
asset_source_filepath_list = self._save_assets(assets_collection_to_add)
# Return if there are no assets to write.
if len(asset_source_filepath_list) is 0:
tf_logging.info("No assets to write.")
return
assets_destination_dir = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY))
if not file_io.file_exists(assets_destination_dir):
file_io.create_dir(assets_destination_dir)
# Copy each asset from source path to destination path.
for asset_source_filepath in asset_source_filepath_list:
asset_source_filename = os.path.basename(asset_source_filepath)
asset_destination_filepath = os.path.join(
compat.as_bytes(assets_destination_dir),
compat.as_bytes(asset_source_filename))
file_io.copy(
asset_source_filepath, asset_destination_filepath, overwrite=True)
tf_logging.info("Assets written to: %s", assets_destination_dir)
示例3: create_dir_test
def create_dir_test():
"""Verifies file_io directory handling methods ."""
starttime = int(round(time.time() * 1000))
dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime)
print("Creating dir %s" % dir_name)
file_io.create_dir(dir_name)
elapsed = int(round(time.time() * 1000)) - starttime
print("Created directory in: %d milliseconds" % elapsed)
# Check that the directory exists.
dir_exists = file_io.is_directory(dir_name)
print("%s directory exists: %s" % (dir_name, dir_exists))
# List contents of just created directory.
print("Listing directory %s." % dir_name)
starttime = int(round(time.time() * 1000))
print(file_io.list_directory(dir_name))
elapsed = int(round(time.time() * 1000)) - starttime
print("Listed directory %s in %s milliseconds" % (dir_name, elapsed))
# Delete directory.
print("Deleting directory %s." % dir_name)
starttime = int(round(time.time() * 1000))
file_io.delete_recursively(dir_name)
elapsed = int(round(time.time() * 1000)) - starttime
print("Deleted directory %s in %s milliseconds" % (dir_name, elapsed))
示例4: create_object_test
def create_object_test():
"""Verifies file_io's object manipulation methods ."""
starttime = int(round(time.time() * 1000))
dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime)
print("Creating dir %s." % dir_name)
file_io.create_dir(dir_name)
# Create a file in this directory.
file_name = "%s/test_file.txt" % dir_name
print("Creating file %s." % file_name)
file_io.write_string_to_file(file_name, "test file creation.")
list_files_pattern = "%s/test_file*.txt" % dir_name
print("Getting files matching pattern %s." % list_files_pattern)
files_list = file_io.get_matching_files(list_files_pattern)
print(files_list)
assert len(files_list) == 1
assert files_list[0] == file_name
# Cleanup test files.
print("Deleting file %s." % file_name)
file_io.delete_file(file_name)
# Delete directory.
print("Deleting directory %s." % dir_name)
file_io.delete_recursively(dir_name)
示例5: setUpClass
def setUpClass(cls):
# Set up dirs.
cls.working_dir = tempfile.mkdtemp()
cls.source_dir = os.path.join(cls.working_dir, 'source')
cls.analysis_dir = os.path.join(cls.working_dir, 'analysis')
cls.output_dir = os.path.join(cls.working_dir, 'output')
file_io.create_dir(cls.source_dir)
# Make test image files.
img1_file = os.path.join(cls.source_dir, 'img1.jpg')
image1 = Image.new('RGB', size=(300, 300), color=(155, 0, 0))
image1.save(img1_file)
img2_file = os.path.join(cls.source_dir, 'img2.jpg')
image2 = Image.new('RGB', size=(50, 50), color=(125, 240, 0))
image2.save(img2_file)
img3_file = os.path.join(cls.source_dir, 'img3.jpg')
image3 = Image.new('RGB', size=(800, 600), color=(33, 55, 77))
image3.save(img3_file)
# Download inception checkpoint. Note that gs url doesn't work because
# we may not have gcloud signed in when running the test.
url = ('https://storage.googleapis.com/cloud-ml-data/img/' +
'flower_photos/inception_v3_2016_08_28.ckpt')
checkpoint_path = os.path.join(cls.working_dir, "checkpoint")
response = urlopen(url)
with open(checkpoint_path, 'wb') as f:
f.write(response.read())
# Make csv input file
cls.csv_input_filepath = os.path.join(cls.source_dir, 'input.csv')
file_io.write_string_to_file(
cls.csv_input_filepath,
'1,Monday,23.0,red blue,%s\n' % img1_file +
'0,Friday,18.0,green,%s\n' % img2_file +
'0,Sunday,12.0,green red blue green,%s\n' % img3_file)
# Call analyze.py to create analysis results.
schema = [{'name': 'target_col', 'type': 'FLOAT'},
{'name': 'cat_col', 'type': 'STRING'},
{'name': 'num_col', 'type': 'FLOAT'},
{'name': 'text_col', 'type': 'STRING'},
{'name': 'img_col', 'type': 'STRING'}]
schema_file = os.path.join(cls.source_dir, 'schema.json')
file_io.write_string_to_file(schema_file, json.dumps(schema))
features = {'target_col': {'transform': 'target'},
'cat_col': {'transform': 'one_hot'},
'num_col': {'transform': 'identity'},
'text_col': {'transform': 'multi_hot'},
'img_col': {'transform': 'image_to_vec', 'checkpoint': checkpoint_path}}
features_file = os.path.join(cls.source_dir, 'features.json')
file_io.write_string_to_file(features_file, json.dumps(features))
cmd = ['python ' + os.path.join(CODE_PATH, 'analyze.py'),
'--output=' + cls.analysis_dir,
'--csv=' + cls.csv_input_filepath,
'--schema=' + schema_file,
'--features=' + features_file]
subprocess.check_call(' '.join(cmd), shell=True)
示例6: testGetMatchingFiles
def testGetMatchingFiles(self):
dir_path = os.path.join(self._base_dir, "temp_dir")
file_io.create_dir(dir_path)
files = ["file1.txt", "file2.txt", "file3.txt"]
for name in files:
file_path = os.path.join(dir_path, name)
file_io.FileIO(file_path, mode="w").write("testing")
expected_match = [os.path.join(dir_path, name) for name in files]
self.assertItemsEqual(file_io.get_matching_files(os.path.join(dir_path, "file*.txt")), expected_match)
file_io.delete_recursively(dir_path)
self.assertFalse(file_io.file_exists(os.path.join(dir_path, "file3.txt")))
示例7: testIsDirectory
def testIsDirectory(self):
dir_path = os.path.join(self._base_dir, "test_dir")
# Failure for a non-existing dir.
with self.assertRaises(errors.NotFoundError):
file_io.is_directory(dir_path)
file_io.create_dir(dir_path)
self.assertTrue(file_io.is_directory(dir_path))
file_path = os.path.join(dir_path, "test_file")
file_io.FileIO(file_path, mode="w").write("test")
# False for a file.
self.assertFalse(file_io.is_directory(file_path))
示例8: end
def end(self, session=None):
super(ExportLastModelMonitor, self).end(session)
file_io.recursive_create_dir(self._dest)
_recursive_copy(self.last_export_dir, self._dest)
if self._additional_assets:
# TODO(rhaertel): use the actual assets directory. For now, metadata.yaml
# must be a sibling of the export.meta file.
assets_dir = self._dest
file_io.create_dir(assets_dir)
_copy_all(self._additional_assets, assets_dir)
示例9: setUpClass
def setUpClass(cls):
# Set up dirs.
cls.working_dir = tempfile.mkdtemp()
cls.source_dir = os.path.join(cls.working_dir, 'source')
cls.analysis_dir = os.path.join(cls.working_dir, 'analysis')
cls.output_dir = os.path.join(cls.working_dir, 'output')
file_io.create_dir(cls.source_dir)
# Make test image files.
img1_file = os.path.join(cls.source_dir, 'img1.jpg')
image1 = Image.new('RGBA', size=(300, 300), color=(155, 0, 0))
image1.save(img1_file)
img2_file = os.path.join(cls.source_dir, 'img2.jpg')
image2 = Image.new('RGBA', size=(50, 50), color=(125, 240, 0))
image2.save(img2_file)
img3_file = os.path.join(cls.source_dir, 'img3.jpg')
image3 = Image.new('RGBA', size=(800, 600), color=(33, 55, 77))
image3.save(img3_file)
# Make csv input file
cls.csv_input_filepath = os.path.join(cls.source_dir, 'input.csv')
file_io.write_string_to_file(
cls.csv_input_filepath,
'1,1,Monday,23.0,%s\n' % img1_file +
'2,0,Friday,18.0,%s\n' % img2_file +
'3,0,Sunday,12.0,%s\n' % img3_file)
# Call analyze.py to create analysis results.
schema = [{'name': 'key_col', 'type': 'INTEGER'},
{'name': 'target_col', 'type': 'FLOAT'},
{'name': 'cat_col', 'type': 'STRING'},
{'name': 'num_col', 'type': 'FLOAT'},
{'name': 'img_col', 'type': 'STRING'}]
schema_file = os.path.join(cls.source_dir, 'schema.json')
file_io.write_string_to_file(schema_file, json.dumps(schema))
features = {'key_col': {'transform': 'key'},
'target_col': {'transform': 'target'},
'cat_col': {'transform': 'one_hot'},
'num_col': {'transform': 'identity'},
'img_col': {'transform': 'image_to_vec'}}
features_file = os.path.join(cls.source_dir, 'features.json')
file_io.write_string_to_file(features_file, json.dumps(features))
cmd = ['python ' + os.path.join(CODE_PATH, 'analyze.py'),
'--output=' + cls.analysis_dir,
'--csv=' + cls.csv_input_filepath,
'--schema=' + schema_file,
'--features=' + features_file]
subprocess.check_call(' '.join(cmd), shell=True)
# Setup a temp GCS bucket.
cls.bucket_root = 'gs://temp_mltoolbox_test_%s' % uuid.uuid4().hex
subprocess.check_call('gsutil mb %s' % cls.bucket_root, shell=True)
示例10: end
def end(self, session=None):
super(ExportLastModelMonitor, self).end(session)
# Recursively copy the last location export dir from the exporter into the
# main export location.
file_io.recursive_create_dir(self._final_model_location)
_recursive_copy(self.last_export_dir, self._final_model_location)
if self._additional_assets:
# TODO(rhaertel): use the actual assets directory. For now, metadata.json
# must be a sibling of the export.meta file.
assets_dir = self._final_model_location
file_io.create_dir(assets_dir)
_copy_all(self._additional_assets, assets_dir)
示例11: testListDirectory
def testListDirectory(self):
dir_path = os.path.join(self._base_dir, "test_dir")
file_io.create_dir(dir_path)
files = [b"file1.txt", b"file2.txt", b"file3.txt"]
for name in files:
file_path = os.path.join(dir_path, compat.as_str_any(name))
file_io.write_string_to_file(file_path, "testing")
subdir_path = os.path.join(dir_path, "sub_dir")
file_io.create_dir(subdir_path)
subdir_file_path = os.path.join(subdir_path, "file4.txt")
file_io.write_string_to_file(subdir_file_path, "testing")
dir_list = file_io.list_directory(dir_path)
self.assertItemsEqual(files + [b"sub_dir"], dir_list)
示例12: testIsDirectory
def testIsDirectory(self):
dir_path = os.path.join(self._base_dir, "test_dir")
# Failure for a non-existing dir.
self.assertFalse(file_io.is_directory(dir_path))
file_io.create_dir(dir_path)
self.assertTrue(file_io.is_directory(dir_path))
file_path = os.path.join(dir_path, "test_file")
file_io.FileIO(file_path, mode="w").write("test")
# False for a file.
self.assertFalse(file_io.is_directory(file_path))
# Test that the value returned from `stat()` has `is_directory` set.
file_statistics = file_io.stat(dir_path)
self.assertTrue(file_statistics.is_directory)
示例13: testListDirectory
def testListDirectory(self):
dir_path = os.path.join(self._base_dir, "test_dir")
file_io.create_dir(dir_path)
files = ["file1.txt", "file2.txt", "file3.txt"]
for name in files:
file_path = os.path.join(dir_path, name)
file_io.FileIO(file_path, mode="w").write("testing")
subdir_path = os.path.join(dir_path, "sub_dir")
file_io.create_dir(subdir_path)
subdir_file_path = os.path.join(subdir_path, "file4.txt")
file_io.FileIO(subdir_file_path, mode="w").write("testing")
dir_list = file_io.list_directory(dir_path)
self.assertItemsEqual(files + ["sub_dir"], dir_list)
示例14: testGetMatchingFiles
def testGetMatchingFiles(self):
dir_path = os.path.join(self.get_temp_dir(), "temp_dir")
file_io.create_dir(dir_path)
files = ["file1.txt", "file2.txt", "file3.txt"]
for name in files:
file_path = os.path.join(dir_path, name)
file_io.write_string_to_file(file_path, "testing")
expected_match = [os.path.join(dir_path, name) for name in files]
self.assertItemsEqual(file_io.get_matching_files(os.path.join(dir_path,
"file*.txt")),
expected_match)
for name in files:
file_path = os.path.join(dir_path, name)
file_io.delete_file(file_path)
示例15: _recursive_copy
def _recursive_copy(src_dir, dest_dir):
"""Copy the contents of src_dir into the folder dest_dir.
When called, dest_dir should exist.
"""
for dir_name, sub_dirs, leaf_files in file_io.walk(src_dir):
# copy all the files over
for leaf_file in leaf_files:
leaf_file_path = os.path.join(dir_name, leaf_file)
_copy_all([leaf_file_path], dest_dir)
# Now make all the folders.
for sub_dir in sub_dirs:
file_io.create_dir(os.path.join(dest_dir, sub_dir))