当前位置: 首页>>代码示例>>Python>>正文


Python file_io.create_dir方法代码示例

本文整理汇总了Python中tensorflow.python.lib.io.file_io.create_dir方法的典型用法代码示例。如果您正苦于以下问题:Python file_io.create_dir方法的具体用法?Python file_io.create_dir怎么用?Python file_io.create_dir使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在tensorflow.python.lib.io.file_io的用法示例。


在下文中一共展示了file_io.create_dir方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: create_dir_test

# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import create_dir [as 别名]
def create_dir_test():
  """Verifies file_io directory handling methods ."""

  starttime = int(round(time.time() * 1000))
  dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime)
  print("Creating dir %s" % dir_name)
  file_io.create_dir(dir_name)
  elapsed = int(round(time.time() * 1000)) - starttime
  print("Created directory in: %d milliseconds" % elapsed)
  # Check that the directory exists.
  dir_exists = file_io.is_directory(dir_name)
  print("%s directory exists: %s" % (dir_name, dir_exists))

  # List contents of just created directory.
  print("Listing directory %s." % dir_name)
  starttime = int(round(time.time() * 1000))
  print(file_io.list_directory(dir_name))
  elapsed = int(round(time.time() * 1000)) - starttime
  print("Listed directory %s in %s milliseconds" % (dir_name, elapsed))

  # Delete directory.
  print("Deleting directory %s." % dir_name)
  starttime = int(round(time.time() * 1000))
  file_io.delete_recursively(dir_name)
  elapsed = int(round(time.time() * 1000)) - starttime
  print("Deleted directory %s in %s milliseconds" % (dir_name, elapsed)) 
开发者ID:tobegit3hub,项目名称:deep_image_model,代码行数:28,代码来源:gcs_smoke.py

示例2: create_object_test

# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import create_dir [as 别名]
def create_object_test():
  """Verifies file_io's object manipulation methods ."""
  starttime = int(round(time.time() * 1000))
  dir_name = "%s/tf_gcs_test_%s" % (FLAGS.gcs_bucket_url, starttime)
  print("Creating dir %s." % dir_name)
  file_io.create_dir(dir_name)

  # Create a file in this directory.
  file_name = "%s/test_file.txt" % dir_name
  print("Creating file %s." % file_name)
  file_io.write_string_to_file(file_name, "test file creation.")

  list_files_pattern = "%s/test_file*.txt" % dir_name
  print("Getting files matching pattern %s." % list_files_pattern)
  files_list = file_io.get_matching_files(list_files_pattern)
  print(files_list)

  assert len(files_list) == 1
  assert files_list[0] == file_name

  # Cleanup test files.
  print("Deleting file %s." % file_name)
  file_io.delete_file(file_name)

  # Delete directory.
  print("Deleting directory %s." % dir_name)
  file_io.delete_recursively(dir_name) 
开发者ID:tobegit3hub,项目名称:deep_image_model,代码行数:29,代码来源:gcs_smoke.py

示例3: setUpClass

# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import create_dir [as 别名]
def setUpClass(cls):

    # Set up dirs.
    cls.working_dir = tempfile.mkdtemp()
    cls.source_dir = os.path.join(cls.working_dir, 'source')
    cls.analysis_dir = os.path.join(cls.working_dir, 'analysis')
    cls.output_dir = os.path.join(cls.working_dir, 'output')
    file_io.create_dir(cls.source_dir)

    # Make test image files.
    img1_file = os.path.join(cls.source_dir, 'img1.jpg')
    image1 = Image.new('RGB', size=(300, 300), color=(155, 0, 0))
    image1.save(img1_file)
    img2_file = os.path.join(cls.source_dir, 'img2.jpg')
    image2 = Image.new('RGB', size=(50, 50), color=(125, 240, 0))
    image2.save(img2_file)
    img3_file = os.path.join(cls.source_dir, 'img3.jpg')
    image3 = Image.new('RGB', size=(800, 600), color=(33, 55, 77))
    image3.save(img3_file)

    # Download inception checkpoint. Note that gs url doesn't work because
    # we may not have gcloud signed in when running the test.
    url = ('https://storage.googleapis.com/cloud-ml-data/img/' +
           'flower_photos/inception_v3_2016_08_28.ckpt')
    checkpoint_path = os.path.join(cls.working_dir, "checkpoint")
    response = urlopen(url)
    with open(checkpoint_path, 'wb') as f:
      f.write(response.read())

    # Make csv input file
    cls.csv_input_filepath = os.path.join(cls.source_dir, 'input.csv')
    file_io.write_string_to_file(
        cls.csv_input_filepath,
        '1,Monday,23.0,red blue,%s\n' % img1_file +
        '0,Friday,18.0,green,%s\n' % img2_file +
        '0,Sunday,12.0,green red blue green,%s\n' % img3_file)

    # Call analyze.py to create analysis results.
    schema = [{'name': 'target_col', 'type': 'FLOAT'},
              {'name': 'cat_col', 'type': 'STRING'},
              {'name': 'num_col', 'type': 'FLOAT'},
              {'name': 'text_col', 'type': 'STRING'},
              {'name': 'img_col', 'type': 'STRING'}]
    schema_file = os.path.join(cls.source_dir, 'schema.json')
    file_io.write_string_to_file(schema_file, json.dumps(schema))
    features = {'target_col': {'transform': 'target'},
                'cat_col': {'transform': 'one_hot'},
                'num_col': {'transform': 'identity'},
                'text_col': {'transform': 'multi_hot'},
                'img_col': {'transform': 'image_to_vec', 'checkpoint': checkpoint_path}}
    features_file = os.path.join(cls.source_dir, 'features.json')
    file_io.write_string_to_file(features_file, json.dumps(features))
    cmd = ['python ' + os.path.join(CODE_PATH, 'analyze.py'),
           '--output=' + cls.analysis_dir,
           '--csv=' + cls.csv_input_filepath,
           '--schema=' + schema_file,
           '--features=' + features_file]
    subprocess.check_call(' '.join(cmd), shell=True) 
开发者ID:googledatalab,项目名称:pydatalab,代码行数:60,代码来源:test_transform.py

示例4: setUpClass

# 需要导入模块: from tensorflow.python.lib.io import file_io [as 别名]
# 或者: from tensorflow.python.lib.io.file_io import create_dir [as 别名]
def setUpClass(cls):

    # Set up dirs.
    cls.working_dir = tempfile.mkdtemp()
    cls.source_dir = os.path.join(cls.working_dir, 'source')
    cls.analysis_dir = os.path.join(cls.working_dir, 'analysis')
    cls.output_dir = os.path.join(cls.working_dir, 'output')
    file_io.create_dir(cls.source_dir)

    # Make test image files.
    img1_file = os.path.join(cls.source_dir, 'img1.jpg')
    image1 = Image.new('RGB', size=(300, 300), color=(155, 0, 0))
    image1.save(img1_file)
    img2_file = os.path.join(cls.source_dir, 'img2.jpg')
    image2 = Image.new('RGB', size=(50, 50), color=(125, 240, 0))
    image2.save(img2_file)
    img3_file = os.path.join(cls.source_dir, 'img3.jpg')
    image3 = Image.new('RGB', size=(800, 600), color=(33, 55, 77))
    image3.save(img3_file)

    # Download inception checkpoint. Note that gs url doesn't work because
    # we may not have gcloud signed in when running the test.
    url = ('https://storage.googleapis.com/cloud-ml-data/img/' +
           'flower_photos/inception_v3_2016_08_28.ckpt')
    checkpoint_path = os.path.join(cls.working_dir, "checkpoint")
    response = urlopen(url)
    with open(checkpoint_path, 'wb') as f:
      f.write(response.read())

    # Make csv input file
    cls.csv_input_filepath = os.path.join(cls.source_dir, 'input.csv')
    file_io.write_string_to_file(
        cls.csv_input_filepath,
        '1,1,Monday,23.0,%s\n' % img1_file +
        '2,0,Friday,18.0,%s\n' % img2_file +
        '3,0,Sunday,12.0,%s\n' % img3_file)

    # Call analyze.py to create analysis results.
    schema = [{'name': 'key_col', 'type': 'INTEGER'},
              {'name': 'target_col', 'type': 'FLOAT'},
              {'name': 'cat_col', 'type': 'STRING'},
              {'name': 'num_col', 'type': 'FLOAT'},
              {'name': 'img_col', 'type': 'STRING'}]
    schema_file = os.path.join(cls.source_dir, 'schema.json')
    file_io.write_string_to_file(schema_file, json.dumps(schema))
    features = {'key_col': {'transform': 'key'},
                'target_col': {'transform': 'target'},
                'cat_col': {'transform': 'one_hot'},
                'num_col': {'transform': 'identity'},
                'img_col': {'transform': 'image_to_vec', 'checkpoint': checkpoint_path}}
    features_file = os.path.join(cls.source_dir, 'features.json')
    file_io.write_string_to_file(features_file, json.dumps(features))
    cmd = ['python ' + os.path.join(CODE_PATH, 'analyze.py'),
           '--output=' + cls.analysis_dir,
           '--csv=' + cls.csv_input_filepath,
           '--schema=' + schema_file,
           '--features=' + features_file]
    subprocess.check_call(' '.join(cmd), shell=True) 
开发者ID:googledatalab,项目名称:pydatalab,代码行数:60,代码来源:test_transform.py


注:本文中的tensorflow.python.lib.io.file_io.create_dir方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。