本文整理汇总了Python中tensorflow.python.lib.io.file_io.recursive_create_dir方法的典型用法代码示例。如果您正苦于以下问题:Python file_io.recursive_create_dir方法的具体用法?Python file_io.recursive_create_dir怎么用?Python file_io.recursive_create_dir使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类tensorflow.python.lib.io.file_io
的用法示例。
在下文中一共展示了file_io.recursive_create_dir方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: save_pipeline_config
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def save_pipeline_config(pipeline_config, directory):
"""Saves a pipeline config text file to disk.
Args:
pipeline_config: A pipeline_pb2.TrainEvalPipelineConfig.
directory: The model directory into which the pipeline config file will be
saved.
"""
if not file_io.file_exists(directory):
file_io.recursive_create_dir(directory)
pipeline_config_path = os.path.join(directory, "pipeline.config")
config_text = text_format.MessageToString(pipeline_config)
with tf.gfile.Open(pipeline_config_path, "wb") as f:
tf.logging.info("Writing pipeline config file to %s",
pipeline_config_path)
f.write(config_text)
示例2: __init__
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def __init__(self, export_dir):
self._saved_model = saved_model_pb2.SavedModel()
self._saved_model.saved_model_schema_version = (
constants.SAVED_MODEL_SCHEMA_VERSION)
self._export_dir = export_dir
if file_io.file_exists(export_dir):
raise AssertionError(
"Export directory already exists. Please specify a different export "
"directory: %s" % export_dir)
file_io.recursive_create_dir(self._export_dir)
# Boolean to track whether variables and assets corresponding to the
# SavedModel have been saved. Specifically, the first meta graph to be added
# MUST use the add_meta_graph_and_variables() API. Subsequent add operations
# on the SavedModel MUST use the add_meta_graph() API which does not save
# weights.
self._has_saved_variables = False
示例3: start
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def start(self):
"""Performs startup logic, including building graphs.
"""
if self._config.master:
# Save out job information for later reference alongside all other outputs.
job_args = ' '.join(self._model_builder.args._args).replace(' --', '\n--').split('\n')
job_info = {
'config': self._config._env,
'args': job_args
}
job_spec = yaml.safe_dump(job_info, default_flow_style=False)
job_file = os.path.join(self._output, 'job.yaml')
tfio.recursive_create_dir(self._output)
tfio.write_string_to_file(job_file, job_spec)
# Create a checkpoints directory. This is needed to ensure checkpoint restoration logic
# can lookup an existing directory.
tfio.recursive_create_dir(self.checkpoints_path)
# Build the graphs that will be used during the course of the job.
self._training, self._evaluation, self._prediction = \
self._model_builder.build_graph_interfaces(self._inputs, self._config)
示例4: _recursive_copy
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def _recursive_copy(src_dir, dest_dir):
"""Copy the contents of src_dir into the folder dest_dir.
Args:
src_dir: gsc or local path.
dest_dir: gcs or local path.
When called, dest_dir should exist.
"""
src_dir = python_portable_string(src_dir)
dest_dir = python_portable_string(dest_dir)
file_io.recursive_create_dir(dest_dir)
for file_name in file_io.list_directory(src_dir):
old_path = os.path.join(src_dir, file_name)
new_path = os.path.join(dest_dir, file_name)
if file_io.is_directory(old_path):
_recursive_copy(old_path, new_path)
else:
file_io.copy(old_path, new_path, overwrite=True)
示例5: recursive_copy
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def recursive_copy(src_dir, dest_dir):
"""Copy the contents of src_dir into the folder dest_dir.
Args:
src_dir: gsc or local path.
dest_dir: gcs or local path.
"""
file_io.recursive_create_dir(dest_dir)
for file_name in file_io.list_directory(src_dir):
old_path = os.path.join(src_dir, file_name)
new_path = os.path.join(dest_dir, file_name)
if file_io.is_directory(old_path):
recursive_copy(old_path, new_path)
else:
file_io.copy(old_path, new_path, overwrite=True)
示例6: local_analysis
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def local_analysis(args):
if args.analysis:
# Already analyzed.
return
if not args.schema or not args.features:
raise ValueError('Either --analysis, or both --schema and --features are provided.')
tf_config = json.loads(os.environ.get('TF_CONFIG', '{}'))
cluster_spec = tf_config.get('cluster', {})
if len(cluster_spec.get('worker', [])) > 0:
raise ValueError('If "schema" and "features" are provided, local analysis will run and ' +
'only BASIC scale-tier (no workers node) is supported.')
if cluster_spec and not (args.schema.startswith('gs://') and args.features.startswith('gs://')):
raise ValueError('Cloud trainer requires GCS paths for --schema and --features.')
print('Running analysis.')
schema = json.loads(file_io.read_file_to_string(args.schema).decode())
features = json.loads(file_io.read_file_to_string(args.features).decode())
args.analysis = os.path.join(args.job_dir, 'analysis')
args.transform = True
file_io.recursive_create_dir(args.analysis)
feature_analysis.run_local_analysis(args.analysis, args.train, schema, features)
print('Analysis done.')
示例7: setUp
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def setUp(self):
self._test_dir = tempfile.mkdtemp()
self._analysis_output = os.path.join(self._test_dir, 'analysis_output')
self._transform_output = os.path.join(self._test_dir, 'transform_output')
self._train_output = os.path.join(self._test_dir, 'train_output')
file_io.recursive_create_dir(self._analysis_output)
file_io.recursive_create_dir(self._transform_output)
file_io.recursive_create_dir(self._train_output)
self._csv_train_filename = os.path.join(self._test_dir, 'train_csv_data.csv')
self._csv_eval_filename = os.path.join(self._test_dir, 'eval_csv_data.csv')
self._csv_predict_filename = os.path.join(self._test_dir, 'predict_csv_data.csv')
self._schema_filename = os.path.join(self._test_dir, 'schema_file.json')
self._features_filename = os.path.join(self._test_dir, 'features_file.json')
示例8: setUp
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def setUp(self):
random.seed(12321)
self._local_dir = tempfile.mkdtemp() # Local folder for temp files.
self._gs_dir = 'gs://temp_pydatalab_test_%s' % uuid.uuid4().hex
subprocess.check_call('gsutil mb %s' % self._gs_dir, shell=True)
self._input_files = os.path.join(self._gs_dir, 'input_files')
self._analysis_output = os.path.join(self._gs_dir, 'analysis_output')
self._transform_output = os.path.join(self._gs_dir, 'transform_output')
self._train_output = os.path.join(self._gs_dir, 'train_output')
self._prediction_output = os.path.join(self._gs_dir, 'prediction_output')
file_io.recursive_create_dir(self._input_files)
self._csv_train_filename = os.path.join(self._input_files, 'train_csv_data.csv')
self._csv_eval_filename = os.path.join(self._input_files, 'eval_csv_data.csv')
self._csv_predict_filename = os.path.join(self._input_files, 'predict_csv_data.csv')
self._schema_filename = os.path.join(self._input_files, 'schema_file.json')
self._features_filename = os.path.join(self._input_files, 'features_file.json')
self._image_files = None
示例9: _write_assets
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def _write_assets(assets_directory, assets_filename):
"""Writes asset files to be used with SavedModel for half plus two.
Args:
assets_directory: The directory to which the assets should be written.
assets_filename: Name of the file to which the asset contents should be
written.
Returns:
The path to which the assets file was written.
"""
if not file_io.file_exists(assets_directory):
file_io.recursive_create_dir(assets_directory)
path = os.path.join(
compat.as_bytes(assets_directory), compat.as_bytes(assets_filename))
file_io.write_string_to_file(path, "asset-file-contents")
return path
示例10: __init__
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def __init__(self, export_dir):
self._saved_model = saved_model_pb2.SavedModel()
self._saved_model.saved_model_schema_version = (
constants.SAVED_MODEL_SCHEMA_VERSION)
self._export_dir = export_dir
if file_io.file_exists(export_dir):
raise AssertionError(
"Export directory already exists. Please specify a different export "
"directory.")
file_io.recursive_create_dir(self._export_dir)
# Boolean to track whether variables and assets corresponding to the
# SavedModel have been saved. Specifically, the first meta graph to be added
# MUST use the add_meta_graph_and_variables() API. Subsequent add operations
# on the SavedModel MUST use the add_meta_graph() API which does not save
# weights.
self._has_saved_variables = False
示例11: _write_assets
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def _write_assets(assets_directory, assets_filename):
"""Writes asset files to be used with SavedModel for half plus two.
Args:
assets_directory: The directory to which the assets should be written.
assets_filename: Name of the file to which the asset contents should be
written.
Returns:
The path to which the assets file was written.
"""
if not file_io.file_exists(assets_directory):
file_io.recursive_create_dir(assets_directory)
path = os.path.join(
tf.compat.as_bytes(assets_directory), tf.compat.as_bytes(assets_filename))
file_io.write_string_to_file(path, "asset-file-contents")
return path
示例12: _save_and_write_assets
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def _save_and_write_assets(self, assets_collection_to_add=None):
"""Saves asset to the meta graph and writes asset files to disk.
Args:
assets_collection_to_add: The collection where the asset paths are setup.
"""
asset_source_filepath_list = _maybe_save_assets(assets_collection_to_add)
# Return if there are no assets to write.
if len(asset_source_filepath_list) is 0:
tf_logging.info("No assets to write.")
return
assets_destination_dir = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY))
if not file_io.file_exists(assets_destination_dir):
file_io.recursive_create_dir(assets_destination_dir)
# Copy each asset from source path to destination path.
for asset_source_filepath in asset_source_filepath_list:
asset_source_filename = os.path.basename(asset_source_filepath)
asset_destination_filepath = os.path.join(
compat.as_bytes(assets_destination_dir),
compat.as_bytes(asset_source_filename))
# Only copy the asset file to the destination if it does not already
# exist. This is to ensure that an asset with the same name defined as
# part of multiple graphs is only copied the first time.
if not file_io.file_exists(asset_destination_filepath):
file_io.copy(asset_source_filepath, asset_destination_filepath)
tf_logging.info("Assets written to: %s", assets_destination_dir)
示例13: save
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def save(self, as_text=False):
"""Writes a `SavedModel` protocol buffer to disk.
The function writes the SavedModel protocol buffer to the export directory
in serialized format.
Args:
as_text: Writes the SavedModel protocol buffer in text format to disk.
Returns:
The path to which the SavedModel protocol buffer was written.
"""
if not file_io.file_exists(self._export_dir):
file_io.recursive_create_dir(self._export_dir)
if as_text:
path = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PBTXT))
file_io.write_string_to_file(path, str(self._saved_model))
else:
path = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
file_io.write_string_to_file(path, self._saved_model.SerializeToString())
tf_logging.info("SavedModel written to: %s", path)
return path
示例14: _save_and_write_assets
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def _save_and_write_assets(self, assets_collection_to_add=None):
"""Saves asset to the meta graph and writes asset files to disk.
Args:
assets_collection_to_add: The collection where the asset paths are setup.
"""
asset_source_filepath_list = self._maybe_save_assets(
assets_collection_to_add)
# Return if there are no assets to write.
if len(asset_source_filepath_list) is 0:
tf_logging.info("No assets to write.")
return
assets_destination_dir = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY))
if not file_io.file_exists(assets_destination_dir):
file_io.recursive_create_dir(assets_destination_dir)
# Copy each asset from source path to destination path.
for asset_source_filepath in asset_source_filepath_list:
asset_source_filename = os.path.basename(asset_source_filepath)
asset_destination_filepath = os.path.join(
compat.as_bytes(assets_destination_dir),
compat.as_bytes(asset_source_filename))
# Only copy the asset file to the destination if it does not already
# exist. This is to ensure that an asset with the same name defined as
# part of multiple graphs is only copied the first time.
if not file_io.file_exists(asset_destination_filepath):
file_io.copy(asset_source_filepath, asset_destination_filepath)
tf_logging.info("Assets written to: %s", assets_destination_dir)
示例15: parse_arguments
# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import recursive_create_dir [as 别名]
def parse_arguments(argv):
"""Parse command line arguments.
Args:
argv: list of command line arguments, includeing programe name.
Returns:
An argparse Namespace object.
"""
parser = argparse.ArgumentParser(
description='Runs Preprocessing on structured CSV data.')
parser.add_argument('--input-file-pattern',
type=str,
required=True,
help='Input CSV file names. May contain a file pattern')
parser.add_argument('--output-dir',
type=str,
required=True,
help='Google Cloud Storage which to place outputs.')
parser.add_argument('--schema-file',
type=str,
required=True,
help=('BigQuery json schema file'))
args = parser.parse_args(args=argv[1:])
# Make sure the output folder exists if local folder.
file_io.recursive_create_dir(args.output_dir)
return args