当前位置: 首页>>代码示例>>Python>>正文


Python plurality_checkable_iterator.PluralityCheckableIterator类代码示例

本文整理汇总了Python中gslib.plurality_checkable_iterator.PluralityCheckableIterator的典型用法代码示例。如果您正苦于以下问题:Python PluralityCheckableIterator类的具体用法?Python PluralityCheckableIterator怎么用?Python PluralityCheckableIterator使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了PluralityCheckableIterator类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _RecursePrint

  def _RecursePrint(self, blr):
    """
    Expands a bucket listing reference and recurses to its children, calling
    _PrintInfoAboutBucketListingRef for each expanded object found.

    Args:
      blr: An instance of BucketListingRef.

    Returns:
      Tuple containing (number of object, total number of bytes)
    """
    num_bytes = 0
    num_objs = 0

    if blr.HasKey():
      blr_iterator = iter([blr])
    elif blr.HasPrefix():
      blr_iterator = self.WildcardIterator(
          '%s/*' % blr.GetRStrippedUriString(), all_versions=self.all_versions)
    elif blr.NamesBucket():
      blr_iterator = self.WildcardIterator(
          '%s*' % blr.GetUriString(), all_versions=self.all_versions)
    else:
      # This BLR didn't come from a bucket listing. This case happens for
      # BLR's instantiated from a user-provided URI.
      blr_iterator = PluralityCheckableIterator(
          UriOnlyBlrExpansionIterator(
              self, blr, all_versions=self.all_versions))
      if blr_iterator.is_empty() and not ContainsWildcard(blr.GetUriString()):
        raise CommandException('No such object %s' % blr.GetUriString())

    for cur_blr in blr_iterator:
      if self.exclude_patterns:
        tomatch = cur_blr.GetUriString()
        skip = False
        for pattern in self.exclude_patterns:
          if fnmatch.fnmatch(tomatch, pattern):
            skip = True
            break
        if skip:
          continue
      if cur_blr.HasKey():
        # Object listing.
        no, nb = self._PrintInfoAboutBucketListingRef(cur_blr)
      else:
        # Subdir listing.
        if cur_blr.GetUriString().endswith('//'):
          # Expand gs://bucket// into gs://bucket//* so we don't infinite
          # loop. This case happens when user has uploaded an object whose
          # name begins with a /.
          cur_blr = BucketListingRef(self.suri_builder.StorageUri(
              '%s*' % cur_blr.GetUriString()), None, None, cur_blr.headers)
        no, nb = self._RecursePrint(cur_blr)
      num_bytes += nb
      num_objs += no

    if blr.HasPrefix() and not self.summary_only:
      self._PrintSummaryLine(num_bytes, blr.GetUriString().encode('utf-8'))

    return num_objs, num_bytes
开发者ID:Hex29A,项目名称:gsutil,代码行数:60,代码来源:du.py

示例2: testPluralityCheckableIteratorWith3Elems

 def testPluralityCheckableIteratorWith3Elems(self):
   """Tests PluralityCheckableIterator with 3 elements."""
   input_list = range(3)
   it = iter(input_list)
   pcit = PluralityCheckableIterator(it)
   self.assertFalse(pcit.is_empty())
   self.assertTrue(pcit.has_plurality())
   output_list = list(pcit)
   self.assertEqual(input_list, output_list)
开发者ID:FYJen,项目名称:GoogleCloud,代码行数:9,代码来源:test_plurality_checkable_iterator.py

示例3: __init__

    def __init__(self, command_obj, base_src_url, base_dst_url):
        self.command_obj = command_obj
        self.compute_file_checksums = command_obj.compute_file_checksums
        self.delete_extras = command_obj.delete_extras
        self.recursion_requested = command_obj.recursion_requested
        self.logger = self.command_obj.logger
        self.base_src_url = base_src_url
        self.base_dst_url = base_dst_url
        self.logger.info("Building synchronization state...")

        (src_fh, self.sorted_list_src_file_name) = tempfile.mkstemp(prefix="gsutil-rsync-src-")
        _tmp_files.append(self.sorted_list_src_file_name)
        (dst_fh, self.sorted_list_dst_file_name) = tempfile.mkstemp(prefix="gsutil-rsync-dst-")
        _tmp_files.append(self.sorted_list_dst_file_name)
        # Close the file handles; the file will be opened in write mode by
        # _ListUrlRootFunc.
        os.close(src_fh)
        os.close(dst_fh)

        # Build sorted lists of src and dst URLs in parallel. To do this, pass args
        # to _ListUrlRootFunc as tuple (base_url_str, out_filename, desc)
        # where base_url_str is the starting URL string for listing.
        args_iter = iter(
            [
                (self.base_src_url.url_string, self.sorted_list_src_file_name, "source"),
                (self.base_dst_url.url_string, self.sorted_list_dst_file_name, "destination"),
            ]
        )

        # Contains error message from non-retryable listing failure.
        command_obj.non_retryable_listing_failures = 0
        shared_attrs = ["non_retryable_listing_failures"]
        command_obj.Apply(
            _ListUrlRootFunc,
            args_iter,
            _RootListingExceptionHandler,
            shared_attrs,
            arg_checker=DummyArgChecker,
            parallel_operations_override=True,
            fail_on_error=True,
        )

        if command_obj.non_retryable_listing_failures:
            raise CommandException("Caught non-retryable exception - aborting rsync")

        self.sorted_list_src_file = open(self.sorted_list_src_file_name, "r")
        self.sorted_list_dst_file = open(self.sorted_list_dst_file_name, "r")

        # Wrap iterators in PluralityCheckableIterator so we can check emptiness.
        self.sorted_src_urls_it = PluralityCheckableIterator(iter(self.sorted_list_src_file))
        self.sorted_dst_urls_it = PluralityCheckableIterator(iter(self.sorted_list_dst_file))
开发者ID:feczo,项目名称:gsutil,代码行数:51,代码来源:rsync.py

示例4: __init__

  def __init__(self, command_obj, base_src_url, base_dst_url):
    self.command_obj = command_obj
    self.compute_checksums = command_obj.compute_checksums
    self.delete_extras = command_obj.delete_extras
    self.recursion_requested = command_obj.recursion_requested
    self.logger = self.command_obj.logger
    self.base_src_url = base_src_url
    self.base_dst_url = base_dst_url
    self.logger.info('Building synchronization state...')

    (src_fh, self.sorted_list_src_file_name) = tempfile.mkstemp(
        prefix='gsutil-rsync-src-')
    (dst_fh, self.sorted_list_dst_file_name) = tempfile.mkstemp(
        prefix='gsutil-rsync-dst-')
    # Close the file handles; the file will be opened in write mode by
    # _ListUrlRootFunc.
    os.close(src_fh)
    os.close(dst_fh)

    # Build sorted lists of src and dst URLs in parallel. To do this, pass args
    # to _ListUrlRootFunc as tuple (url_str, out_file_name, desc).
    args_iter = iter([
        (self.base_src_url.GetUrlString(), self.sorted_list_src_file_name,
         'source'),
        (self.base_dst_url.GetUrlString(), self.sorted_list_dst_file_name,
         'destination')
    ])
    if IS_WINDOWS:
      # Don't use multi-processing on Windows (very broken).
      thread_count = 2
      process_count = 1
    else:
      # Otherwise use multi-processing, to avoid Python global thread lock
      # contention.
      thread_count = 1
      process_count = 2
    command_obj.Apply(_ListUrlRootFunc, args_iter, _RootListingExceptionHandler,
                      arg_checker=DummyArgChecker,
                      parallel_operations_override=True,
                      thread_count=thread_count, process_count=process_count,
                      fail_on_error=True)

    self.sorted_list_src_file = open(self.sorted_list_src_file_name, 'r')
    self.sorted_list_dst_file = open(self.sorted_list_dst_file_name, 'r')

    # Wrap iterators in PluralityCheckableIterator so we can check emptiness.
    self.sorted_src_urls_it = PluralityCheckableIterator(
        iter(self.sorted_list_src_file))
    self.sorted_dst_urls_it = PluralityCheckableIterator(
        iter(self.sorted_list_dst_file))
开发者ID:bsterner,项目名称:development,代码行数:50,代码来源:rsync.py

示例5: __iter__

 def __iter__(self):
   for blr in self.blr_iter:
     uri = blr.GetUri()
     if uri.names_object():
       # URI could be a bucket subdir.
       implicit_subdir_iterator = PluralityCheckableIterator(
           self.name_expansion_instance._WildcardIterator(
               self.name_expansion_instance.suri_builder.StorageUri(
                   '%s/%s' % (uri.uri.rstrip('/'),
                   self.name_expansion_instance._flatness_wildcard[
                       self.flat]))))
       if not implicit_subdir_iterator.is_empty():
         for exp_blr in implicit_subdir_iterator:
           yield (True, exp_blr)
       else:
         yield (False, blr)
     else:
       yield (False, blr)
开发者ID:davidjamesca,项目名称:gsutil,代码行数:18,代码来源:name_expansion.py

示例6: NameExpansionIterator

def NameExpansionIterator(command_name, proj_id_handler, headers, debug,
                          bucket_storage_uri_class, uri_strs,
                          recursion_requested,
                          have_existing_dst_container=None, flat=True):
  """
  Static factory function for instantiating _NameExpansionIterator, which
  wraps the resulting iterator in a PluralityCheckableIterator and checks
  that it is non-empty.

  Args are as documented in constructor for _NameExpansionIterator class.
  """
  name_expansion_iterator = _NameExpansionIterator(
      command_name, proj_id_handler, headers, debug, bucket_storage_uri_class,
      uri_strs, recursion_requested, have_existing_dst_container, flat)
  name_expansion_iterator = PluralityCheckableIterator(name_expansion_iterator)
  if name_expansion_iterator.is_empty():
    raise CommandException('No URIs matched')
  return name_expansion_iterator
开发者ID:lygstate,项目名称:bleeding_edge,代码行数:18,代码来源:name_expansion.py

示例7: __init__

  def __init__(self, command_obj, base_src_url, base_dst_url):
    self.command_obj = command_obj
    self.compute_checksums = command_obj.compute_checksums
    self.delete_extras = command_obj.delete_extras
    self.recursion_requested = command_obj.recursion_requested
    self.logger = self.command_obj.logger
    self.base_src_url = base_src_url
    self.base_dst_url = base_dst_url
    self.logger.info('Building synchronization state...')

    (src_fh, self.sorted_list_src_file_name) = tempfile.mkstemp(
        prefix='gsutil-rsync-src-')
    (dst_fh, self.sorted_list_dst_file_name) = tempfile.mkstemp(
        prefix='gsutil-rsync-dst-')
    # Close the file handles; the file will be opened in write mode by
    # _ListUrlRootFunc.
    os.close(src_fh)
    os.close(dst_fh)

    # Build sorted lists of src and dst URLs in parallel. To do this, pass args
    # to _ListUrlRootFunc as tuple (url_str, out_file_name, desc).
    args_iter = iter([
        (self.base_src_url.GetUrlString(), self.sorted_list_src_file_name,
         'source'),
        (self.base_dst_url.GetUrlString(), self.sorted_list_dst_file_name,
         'destination')
    ])
    command_obj.Apply(_ListUrlRootFunc, args_iter, _RootListingExceptionHandler,
                      arg_checker=DummyArgChecker,
                      parallel_operations_override=True,
                      fail_on_error=True)

    self.sorted_list_src_file = open(self.sorted_list_src_file_name, 'rb')
    self.sorted_list_dst_file = open(self.sorted_list_dst_file_name, 'rb')

    # Wrap iterators in PluralityCheckableIterator so we can check emptiness.
    self.sorted_src_urls_it = PluralityCheckableIterator(
        iter(self.sorted_list_src_file))
    self.sorted_dst_urls_it = PluralityCheckableIterator(
        iter(self.sorted_list_dst_file))
开发者ID:altock,项目名称:dev,代码行数:40,代码来源:rsync.py

示例8: testPluralityCheckableIteratorReadsAheadAsNeeded

  def testPluralityCheckableIteratorReadsAheadAsNeeded(self):
    """Tests that the PCI does not unnecessarily read new elements."""

    class IterTest(object):

      def __init__(self):
        self.position = 0

      def __iter__(self):
        return self

      def next(self):
        if self.position == 3:
          raise StopIteration()
        self.position += 1

    # IsEmpty and PeekException should retrieve only 1 element from the
    # underlying iterator.
    pcit = PluralityCheckableIterator(IterTest())
    pcit.IsEmpty()
    pcit.PeekException()
    self.assertEquals(pcit.orig_iterator.position, 1)
    # HasPlurality requires populating 2 elements into the iterator.
    pcit.HasPlurality()
    self.assertEquals(pcit.orig_iterator.position, 2)
    # next should yield already-populated elements without advancing the
    # iterator.
    pcit.next()  # Yields element 1
    self.assertEquals(pcit.orig_iterator.position, 2)
    pcit.next()  # Yields element 2
    self.assertEquals(pcit.orig_iterator.position, 2)
    pcit.next()  # Yields element 3
    self.assertEquals(pcit.orig_iterator.position, 3)
    try:
      pcit.next()  # Underlying iterator is empty
      self.fail('Expected StopIteration')
    except StopIteration:
      pass
开发者ID:catapult-project,项目名称:catapult,代码行数:38,代码来源:test_plurality_checkable_iterator.py

示例9: _DiffIterator

class _DiffIterator(object):
    """Iterator yielding sequence of _DiffToApply objects."""

    def __init__(self, command_obj, base_src_url, base_dst_url):
        self.command_obj = command_obj
        self.compute_file_checksums = command_obj.compute_file_checksums
        self.delete_extras = command_obj.delete_extras
        self.recursion_requested = command_obj.recursion_requested
        self.logger = self.command_obj.logger
        self.base_src_url = base_src_url
        self.base_dst_url = base_dst_url
        self.logger.info("Building synchronization state...")

        (src_fh, self.sorted_list_src_file_name) = tempfile.mkstemp(prefix="gsutil-rsync-src-")
        _tmp_files.append(self.sorted_list_src_file_name)
        (dst_fh, self.sorted_list_dst_file_name) = tempfile.mkstemp(prefix="gsutil-rsync-dst-")
        _tmp_files.append(self.sorted_list_dst_file_name)
        # Close the file handles; the file will be opened in write mode by
        # _ListUrlRootFunc.
        os.close(src_fh)
        os.close(dst_fh)

        # Build sorted lists of src and dst URLs in parallel. To do this, pass args
        # to _ListUrlRootFunc as tuple (base_url_str, out_filename, desc)
        # where base_url_str is the starting URL string for listing.
        args_iter = iter(
            [
                (self.base_src_url.url_string, self.sorted_list_src_file_name, "source"),
                (self.base_dst_url.url_string, self.sorted_list_dst_file_name, "destination"),
            ]
        )

        # Contains error message from non-retryable listing failure.
        command_obj.non_retryable_listing_failures = 0
        shared_attrs = ["non_retryable_listing_failures"]
        command_obj.Apply(
            _ListUrlRootFunc,
            args_iter,
            _RootListingExceptionHandler,
            shared_attrs,
            arg_checker=DummyArgChecker,
            parallel_operations_override=True,
            fail_on_error=True,
        )

        if command_obj.non_retryable_listing_failures:
            raise CommandException("Caught non-retryable exception - aborting rsync")

        self.sorted_list_src_file = open(self.sorted_list_src_file_name, "r")
        self.sorted_list_dst_file = open(self.sorted_list_dst_file_name, "r")

        # Wrap iterators in PluralityCheckableIterator so we can check emptiness.
        self.sorted_src_urls_it = PluralityCheckableIterator(iter(self.sorted_list_src_file))
        self.sorted_dst_urls_it = PluralityCheckableIterator(iter(self.sorted_list_dst_file))

    def _ParseTmpFileLine(self, line):
        """Parses output from _BuildTmpOutputLine.

    Parses into tuple:
      (URL, size, crc32c, md5)
    where crc32c and/or md5 can be _NA.

    Args:
      line: The line to parse.

    Returns:
      Parsed tuple: (url, size, crc32c, md5)
    """
        (encoded_url, size, crc32c, md5) = line.split()
        return (_DecodeUrl(encoded_url), int(size), crc32c, md5.strip())

    def _WarnIfMissingCloudHash(self, url_str, crc32c, md5):
        """Warns if given url_str is a cloud URL and is missing both crc32c and md5.

    Args:
      url_str: Destination URL string.
      crc32c: Destination CRC32c.
      md5: Destination MD5.

    Returns:
      True if issued warning.
    """
        # One known way this can currently happen is when rsync'ing objects larger
        # than 5 GB from S3 (for which the etag is not an MD5).
        if StorageUrlFromString(url_str).IsCloudUrl() and crc32c == _NA and md5 == _NA:
            self.logger.warn("Found no hashes to validate %s. Integrity cannot be assured without " "hashes.", url_str)
            return True
        return False

    def _ObjectsMatch(self, src_url_str, src_size, src_crc32c, src_md5, dst_url_str, dst_size, dst_crc32c, dst_md5):
        """Returns True if src and dst objects are the same.

    Uses size plus whatever checksums are available.

    Args:
      src_url_str: Source URL string.
      src_size: Source size
      src_crc32c: Source CRC32c.
      src_md5: Source MD5.
      dst_url_str: Destination URL string.
#.........这里部分代码省略.........
开发者ID:feczo,项目名称:gsutil,代码行数:101,代码来源:rsync.py

示例10: _DiffIterator

class _DiffIterator(object):
  """Iterator yielding sequence of _DiffToApply objects."""

  def __init__(self, command_obj, base_src_url, base_dst_url):
    self.command_obj = command_obj
    self.compute_checksums = command_obj.compute_checksums
    self.delete_extras = command_obj.delete_extras
    self.recursion_requested = command_obj.recursion_requested
    self.logger = self.command_obj.logger
    self.base_src_url = base_src_url
    self.base_dst_url = base_dst_url
    self.logger.info('Building synchronization state...')

    (src_fh, self.sorted_list_src_file_name) = tempfile.mkstemp(
        prefix='gsutil-rsync-src-')
    (dst_fh, self.sorted_list_dst_file_name) = tempfile.mkstemp(
        prefix='gsutil-rsync-dst-')
    # Close the file handles; the file will be opened in write mode by
    # _ListUrlRootFunc.
    os.close(src_fh)
    os.close(dst_fh)

    # Build sorted lists of src and dst URLs in parallel. To do this, pass args
    # to _ListUrlRootFunc as tuple (url_str, out_file_name, desc).
    args_iter = iter([
        (self.base_src_url.GetUrlString(), self.sorted_list_src_file_name,
         'source'),
        (self.base_dst_url.GetUrlString(), self.sorted_list_dst_file_name,
         'destination')
    ])
    command_obj.Apply(_ListUrlRootFunc, args_iter, _RootListingExceptionHandler,
                      arg_checker=DummyArgChecker,
                      parallel_operations_override=True,
                      fail_on_error=True)

    self.sorted_list_src_file = open(self.sorted_list_src_file_name, 'rb')
    self.sorted_list_dst_file = open(self.sorted_list_dst_file_name, 'rb')

    # Wrap iterators in PluralityCheckableIterator so we can check emptiness.
    self.sorted_src_urls_it = PluralityCheckableIterator(
        iter(self.sorted_list_src_file))
    self.sorted_dst_urls_it = PluralityCheckableIterator(
        iter(self.sorted_list_dst_file))

  # pylint: disable=bare-except
  def CleanUpTempFiles(self):
    """Cleans up temp files.

    This function allows the main (RunCommand) function to clean up at end of
    operation. This is necessary because tempfile.NamedTemporaryFile doesn't
    allow the created file to be re-opened in read mode on Windows, so we have
    to use tempfile.mkstemp, which doesn't automatically delete temp files (see
    https://mail.python.org/pipermail/python-list/2005-December/336958.html).
    """
    try:
      self.sorted_list_src_file.close()
      self.sorted_list_dst_file.close()
      for fname in (self.sorted_list_src_file_name,
                    self.sorted_list_dst_file_name):
        os.unlink(fname)
    except:
      pass

  def _ParseTmpFileLine(self, line):
    """Parses output from _BuildTmpOutputLine.

    Parses into tuple:
      (URL, size, crc32c, md5)
    where crc32c and/or md5 can be _NA.

    Args:
      line: The line to parse.

    Returns:
      Parsed tuple: (url, size, crc32c, md5)
    """
    (encoded_url, size, crc32c, md5) = line.split()
    return (urllib.unquote_plus(encoded_url).decode(UTF8),
            int(size), crc32c, md5.strip())

  def _WarnIfMissingCloudHash(self, url_str, crc32c, md5):
    """Warns if given url_str is a cloud URL and is missing both crc32c and md5.

    Args:
      url_str: Destination URL string.
      crc32c: Destination CRC32c.
      md5: Destination MD5.

    Returns:
      True if issued warning.
    """
    # One known way this can currently happen is when rsync'ing objects larger
    # than 5GB from S3 (for which the etag is not an MD5).
    if (StorageUrlFromString(url_str).IsCloudUrl()
        and crc32c == _NA and md5 == _NA):
      self.logger.warn(
          'Found no hashes to validate %s. '
          'Integrity cannot be assured without hashes.' % url_str)
      return True
    return False
#.........这里部分代码省略.........
开发者ID:altock,项目名称:dev,代码行数:101,代码来源:rsync.py

示例11: NameExpansionIterator

def NameExpansionIterator(command_name, proj_id_handler, headers, debug,
                          logger, bucket_storage_uri_class, uri_strs,
                          recursion_requested,
                          have_existing_dst_container=None, flat=True,
                          all_versions=False,
                          for_all_version_delete=False,
                          cmd_supports_recursion=True):
  """
  Static factory function for instantiating _NameExpansionIterator, which
  wraps the resulting iterator in a PluralityCheckableIterator and checks
  that it is non-empty. Also, allows uri_strs can be either an array or an
  iterator.

  Args:
    command_name: name of command being run.
    proj_id_handler: ProjectIdHandler to use for current command.
    headers: Dictionary containing optional HTTP headers to pass to boto.
    debug: Debug level to pass in to boto connection (range 0..3).
    logger: logging.Logger object.
    bucket_storage_uri_class: Class to instantiate for cloud StorageUris.
        Settable for testing/mocking.
    uri_strs: PluralityCheckableIterator of URI strings needing expansion.
    recursion_requested: True if -R specified on command-line.
    have_existing_dst_container: Bool indicator whether this is a copy
        request to an existing bucket, bucket subdir, or directory. Default
        None value should be used in cases where this is not needed (commands
        other than cp).
    flat: Bool indicating whether bucket listings should be flattened, i.e.,
        so the mapped-to results contain objects spanning subdirectories.
    all_versions: Bool indicating whether to iterate over all object versions.
    for_all_version_delete: Bool indicating whether this is for an all-version
        delete.
    cmd_supports_recursion: Bool indicating whether this command supports a '-R'
        flag. Useful for printing helpful error messages.

  Examples of ExpandWildcardsAndContainers with flat=True:
    - Calling with one of the uri_strs being 'gs://bucket' will enumerate all
      top-level objects, as will 'gs://bucket/' and 'gs://bucket/*'.
    - 'gs://bucket/**' will enumerate all objects in the bucket.
    - 'gs://bucket/abc' will enumerate all next-level objects under directory
      abc (i.e., not including subdirectories of abc) if gs://bucket/abc/*
      matches any objects; otherwise it will enumerate the single name
      gs://bucket/abc
    - 'gs://bucket/abc/**' will enumerate all objects under abc or any of its
      subdirectories.
    - 'file:///tmp' will enumerate all files under /tmp, as will
      'file:///tmp/*'
    - 'file:///tmp/**' will enumerate all files under /tmp or any of its
      subdirectories.

  Example if flat=False: calling with gs://bucket/abc/* lists matching objects
  or subdirs, but not sub-subdirs or objects beneath subdirs.

  Note: In step-by-step comments below we give examples assuming there's a
  gs://bucket with object paths:
    abcd/o1.txt
    abcd/o2.txt
    xyz/o1.txt
    xyz/o2.txt
  and a directory file://dir with file paths:
    dir/a.txt
    dir/b.txt
    dir/c/
  """
  uri_strs = PluralityCheckableIterator(uri_strs)
  name_expansion_iterator = _NameExpansionIterator(
      command_name, proj_id_handler, headers, debug, logger,
      bucket_storage_uri_class, uri_strs, recursion_requested,
      have_existing_dst_container, flat, all_versions=all_versions,
      for_all_version_delete=for_all_version_delete,
      cmd_supports_recursion=cmd_supports_recursion)
  name_expansion_iterator = PluralityCheckableIterator(name_expansion_iterator)
  if name_expansion_iterator.is_empty():
    raise CommandException('No URIs matched')
  return name_expansion_iterator
开发者ID:davidjamesca,项目名称:gsutil,代码行数:75,代码来源:name_expansion.py

示例12: _ExpandUriAndPrintInfo

  def _ExpandUriAndPrintInfo(self, uri, listing_style, should_recurse=False):
    """
    Expands wildcards and directories/buckets for uri as needed, and
    calls _PrintInfoAboutBucketListingRef() on each.

    Args:
      uri: StorageUri being listed.
      listing_style: ListingStyle enum describing type of output desired.
      should_recurse: bool indicator of whether to expand recursively.

    Returns:
      Tuple (number of matching objects, number of bytes across these objects).
    """
    # We do a two-level loop, with the outer loop iterating level-by-level from
    # blrs_to_expand, and the inner loop iterating the matches at the current
    # level, printing them, and adding any new subdirs that need expanding to
    # blrs_to_expand (to be picked up in the next outer loop iteration).
    blrs_to_expand = [BucketListingRef(uri)]
    num_objs = 0
    num_bytes = 0
    expanding_top_level = True
    printed_one = False
    num_expanded_blrs = 0
    while len(blrs_to_expand):
      if printed_one:
        print
      blr = blrs_to_expand.pop(0)
      if blr.HasKey():
        blr_iterator = iter([blr])
      elif blr.HasPrefix():
        # Bucket subdir from a previous iteration. Print "header" line only if
        # we're listing more than one subdir (or if it's a recursive listing),
        # to be consistent with the way UNIX ls works.
        if num_expanded_blrs > 1 or should_recurse:
          print '%s:' % blr.GetUriString().encode('utf-8')
          printed_one = True
        blr_iterator = self.WildcardIterator('%s/*' %
                                             blr.GetRStrippedUriString(),
                                             all_versions=self.all_versions)
      elif blr.NamesBucket():
        blr_iterator = self.WildcardIterator('%s*' % blr.GetUriString(),
                                             all_versions=self.all_versions)
      else:
        # This BLR didn't come from a bucket listing. This case happens for
        # BLR's instantiated from a user-provided URI.
        blr_iterator = PluralityCheckableIterator(
            _UriOnlyBlrExpansionIterator(
                self, blr, all_versions=self.all_versions))
        if blr_iterator.is_empty() and not ContainsWildcard(uri):
          raise CommandException('No such object %s' % uri)
      for cur_blr in blr_iterator:
        num_expanded_blrs = num_expanded_blrs + 1
        if cur_blr.HasKey():
          # Object listing.
          (no, nb) = self._PrintInfoAboutBucketListingRef(
              cur_blr, listing_style)
          num_objs += no
          num_bytes += nb
          printed_one = True
        else:
          # Subdir listing. If we're at the top level of a bucket subdir
          # listing don't print the list here (corresponding to how UNIX ls
          # dir just prints its contents, not the name followed by its
          # contents).
          if (expanding_top_level and not uri.names_bucket()) or should_recurse:
            if cur_blr.GetUriString().endswith('//'):
              # Expand gs://bucket// into gs://bucket//* so we don't infinite
              # loop. This case happens when user has uploaded an object whose
              # name begins with a /.
              cur_blr = BucketListingRef(self.suri_builder.StorageUri(
                  '%s*' % cur_blr.GetUriString()), None, None, cur_blr.headers)
            blrs_to_expand.append(cur_blr)
          # Don't include the subdir name in the output if we're doing a
          # recursive listing, as it will be printed as 'subdir:' when we get
          # to the prefix expansion, the next iteration of the main loop.
          else:
            if listing_style == ListingStyle.LONG:
              print '%-33s%s' % (
                  '', cur_blr.GetUriString().encode('utf-8'))
            else:
              print cur_blr.GetUriString().encode('utf-8')
      expanding_top_level = False
    return (num_objs, num_bytes)
开发者ID:173210,项目名称:depot_tools,代码行数:83,代码来源:ls.py


注:本文中的gslib.plurality_checkable_iterator.PluralityCheckableIterator类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。