当前位置: 首页>>代码示例>>Python>>正文


Python dirutil.safe_rmtree函数代码示例

本文整理汇总了Python中twitter.common.dirutil.safe_rmtree函数的典型用法代码示例。如果您正苦于以下问题:Python safe_rmtree函数的具体用法?Python safe_rmtree怎么用?Python safe_rmtree使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了safe_rmtree函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: erase_logs

 def erase_logs(self, task_id):
   for fn in self.get_logs(task_id, with_size=False):
     safe_delete(fn)
   state = self.state(task_id)
   if state and state.header:
     safe_rmtree(TaskPath(root=self._root, task_id=task_id, log_dir=state.header.log_dir)
                 .getpath('process_logbase'))
开发者ID:aalzabarah,项目名称:incubator-aurora,代码行数:7,代码来源:garbage.py

示例2: insert

  def insert(self, cache_key, build_artifacts, artifact_root=None):
    """Cache the output of a build.

    If there is an existing set of artifacts for this key they are deleted.

    TODO: Check that they're equal? If they aren't it's a grave bug, since the key is supposed
    to be a fingerprint of all possible inputs to the build.

    :param cache_key: A CacheKey object.
    :param build_artifacts: List of paths to generated artifacts under artifact_root.
    :param artifact_root: Optional root directory under which artifacts are stored.
    """
    cache_dir = self._cache_dir_for_key(cache_key)
    try:
      safe_rmtree(cache_dir)
      for artifact in build_artifacts or ():
        rel_path = os.path.basename(artifact) \
        if artifact_root is None \
        else os.path.relpath(artifact, artifact_root)
        assert not rel_path.startswith('..'), \
          'Weird: artifact=%s, rel_path=%s' % (artifact, rel_path)
        artifact_dest = os.path.join(cache_dir, rel_path)
        dir_name = os.path.dirname(artifact_dest)
        safe_mkdir(dir_name)
        if os.path.isdir(artifact):
          shutil.copytree(artifact, artifact_dest)
        else:
          shutil.copy(artifact, artifact_dest)
    except Exception as e:
      try:
        safe_rmtree(cache_dir)
      except Exception as e:
        print('IMPORTANT: failed to delete %s on error. Your artifact cache may be corrupted. '
              'Please delete manually.' % cache_dir)
      raise e
开发者ID:kevints,项目名称:commons,代码行数:35,代码来源:artifact_cache.py

示例3: _dump_built_library

 def _dump_built_library(self, library, builder):
   # TODO(wickman): Port this over to the Installer+Distiller and stop using ArtifactCache.
   absolute_sources = library.expand_files()
   absolute_sources.sort()
   cache_key = self._key_generator.key_for(library.id, absolute_sources)
   cache_dir = os.path.join(self._egg_cache_root, cache_key.hash)
   if os.path.exists(cache_dir):
     self.debug('  Generating (cached) %s...' % library)
     # We have no idea what the egg path is, so we simply walk the directory.
     for dir_name, _, filenames in os.walk(cache_dir):
       for filename in filenames:
         self._builder.add_egg(os.path.join(dir_name, filename))
   else:
     self.debug('  Generating %s...' % library)
     egg_file = builder.build_egg()
     if not egg_file:
       raise PythonChroot.BuildFailureException("Failed to build %s!" % library)
     src_egg_file = egg_file
     dst_egg_file = os.path.join(os.path.dirname(egg_file),
         cache_key.hash + '_' + os.path.basename(egg_file))
     self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
     os.rename(src_egg_file, dst_egg_file)
     cache_dir = os.path.join(self._egg_cache_root, cache_key.hash)
     cached_egg_file = os.path.join(cache_dir, os.path.relpath(dst_egg_file, self._root))
     try:
       safe_mkdir_for(cached_egg_file)
       shutil.copy(dst_egg_file, cached_egg_file)
     except:
       safe_rmtree(cache_dir)
       raise
     self._builder.add_egg(dst_egg_file)
开发者ID:dynamicguy,项目名称:commons,代码行数:31,代码来源:python_chroot.py

示例4: _merge_classes_dir

  def _merge_classes_dir(self, state):
    """Merge the classes dirs from the underlying artifacts into a single dir.

    May symlink instead of copying, when it's OK to do so.

    Postcondition: symlinks are of leaf packages only.
    """
    if len(self.underlying_artifacts) <= 1:
      return
    self.log.debug('Merging classes dirs into %s' % self.classes_dir)
    symlinkable_packages = self._symlinkable_packages(state)
    for artifact in self.underlying_artifacts:
      classnames_by_package = defaultdict(list)
      for cls in state.classes_by_target.get(artifact.targets[0], []):
        classnames_by_package[os.path.dirname(cls)].append(os.path.basename(cls))

      for package, classnames in classnames_by_package.items():
        artifact_package_dir = os.path.join(artifact.classes_dir, package)
        merged_package_dir = os.path.join(self.classes_dir, package)

        if package in symlinkable_packages:
          if os.path.islink(merged_package_dir):
            assert os.readlink(merged_package_dir) == artifact_package_dir
          elif os.path.exists(merged_package_dir):
            safe_rmtree(merged_package_dir)
            os.symlink(artifact_package_dir, merged_package_dir)
          else:
            safe_mkdir(os.path.dirname(merged_package_dir))
            os.symlink(artifact_package_dir, merged_package_dir)
        else:
          safe_mkdir(merged_package_dir)
          for classname in classnames:
            src = os.path.join(artifact_package_dir, classname)
            dst = os.path.join(merged_package_dir, classname)
            self._maybe_hardlink(src, dst)
开发者ID:samitny,项目名称:commons,代码行数:35,代码来源:zinc_artifact.py

示例5: update

  def update(self, cache_key, build_artifacts=None, artifact_root=None):
    """Cache the output of a build.

    If the cache area contains an existing object with the same (path, source_sha) its path will
    be returned. If no such object exists, builder will be called with a path inside the staging
    area and should create a new object.

    :param cache_key: A CacheKey object (typically returned by BuildCache.key_for().
    :param build_artifacts: List of paths to generated artifacts under artifact_root.
    :param artifact_root: Optional root directory under which artifacts are stored.
    """
    safe_rmtree(cache_key.filename)
    for artifact in build_artifacts or ():
      rel_path = os.path.basename(artifact) \
          if artifact_root is None \
          else os.path.relpath(artifact, artifact_root)
      assert not rel_path.startswith('..'), \
        'Weird: artifact=%s, rel_path=%s' % (artifact, rel_path)
      artifact_dest = os.path.join(cache_key.filename, rel_path)
      dir_name = os.path.dirname(artifact_dest)
      if not os.path.exists(dir_name):
        os.makedirs(dir_name)
      if os.path.isdir(artifact):
        shutil.copytree(artifact, artifact_dest)
      else:
        shutil.copy(artifact, artifact_dest)
    self._write_sha(cache_key)
开发者ID:adamsxu,项目名称:commons,代码行数:27,代码来源:build_cache.py

示例6: _merge_artifact

  def _merge_artifact(self, versioned_target_set):
    """Merges artifacts representing the individual targets in a VersionedTargetSet into one artifact for that set.
    Creates an output classes dir, depfile and analysis file for the VersionedTargetSet.
    Note that the merged artifact may be incomplete (e.g., if we have no previous artifacts for some of the
    individual targets). That's OK: We run this right before we invoke zinc, which will fill in what's missing.
    This method is not required for correctness, only for efficiency: it can prevent zinc from doing superfluous work.

    NOTE: This method is reentrant.
    """
    if len(versioned_target_set.targets) <= 1:
      return  # Nothing to do.

    with temporary_dir() as tmpdir:
      dst_classes_dir, dst_depfile, dst_analysis_file = self._output_paths(versioned_target_set.targets)
      safe_rmtree(dst_classes_dir)
      safe_mkdir(dst_classes_dir)
      src_analysis_files = []

      # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success.
      dst_deps = Dependencies(dst_classes_dir)

      for target in versioned_target_set.targets:
        src_classes_dir, src_depfile, src_analysis_file = self._output_paths([target])
        if os.path.exists(src_depfile):
          src_deps = Dependencies(src_classes_dir)
          src_deps.load(src_depfile)
          dst_deps.merge(src_deps)

          classes_by_source = src_deps.findclasses([target]).get(target, {})
          for source, classes in classes_by_source.items():
            for cls in classes:
              src = os.path.join(src_classes_dir, cls)
              dst = os.path.join(dst_classes_dir, cls)
              # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that
              # it's missing and rebuild it.
              # dst may already exist if we have overlapping targets. It's not a good idea
              # to have those, but until we enforce it, we must allow it here.
              if os.path.exists(src) and not os.path.exists(dst):
                # Copy the class file.
                safe_mkdir(os.path.dirname(dst))
                os.link(src, dst)

          # Rebase a copy of the per-target analysis files to reflect the merged classes dir.
          if os.path.exists(src_analysis_file):
            src_analysis_file_tmp = \
            os.path.join(tmpdir, os.path.relpath(src_analysis_file, self._analysis_files_base))
            shutil.copyfile(src_analysis_file, src_analysis_file_tmp)
            src_analysis_files.append(src_analysis_file_tmp)
            if self._zinc_utils.run_zinc_rebase(src_analysis_file_tmp, [(src_classes_dir, dst_classes_dir)]):
              self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. '\
                                    'Target may require a full rebuild.' %\
                                    src_analysis_file_tmp)

      dst_deps.save(dst_depfile)

      if self._zinc_utils.run_zinc_merge(src_analysis_files, dst_analysis_file):
        self.context.log.warn('zinc failed to merge analysis files %s to %s. '\
                              'Target may require a full rebuild.' %\
                             (':'.join(src_analysis_files), dst_analysis_file))
开发者ID:bag-of-projects,项目名称:commons,代码行数:59,代码来源:scala_compile.py

示例7: _split_classes_dir

  def _split_classes_dir(self, state, diff):
    """Split the merged classes dir into one dir per underlying artifact."""
    if len(self.underlying_artifacts) <= 1:
      return

    def map_classes_by_package(classes):
      # E.g., com/foo/bar/Bar.scala, com/foo/bar/Baz.scala to com/foo/bar -> [Bar.scala, Baz.scala].
      ret = defaultdict(list)
      for cls in classes:
        ret[os.path.dirname(cls)].append(os.path.basename(cls))
      return ret

    self.log.debug('Splitting classes dir %s' % self.classes_dir)
    if diff:
      new_or_changed_classnames_by_package = map_classes_by_package(diff.new_or_changed_classes)
      deleted_classnames_by_package = map_classes_by_package(diff.deleted_classes)
    else:
      new_or_changed_classnames_by_package = None
      deleted_classnames_by_package = None

    symlinkable_packages = self._symlinkable_packages(state)
    for artifact in self.underlying_artifacts:
      classnames_by_package = \
        map_classes_by_package(state.classes_by_target.get(artifact.targets[0], []))

      for package, classnames in classnames_by_package.items():
        if package == "":
          raise  TaskError("Found class files %s with empty package" % classnames)
        artifact_package_dir = os.path.join(artifact.classes_dir, package)
        merged_package_dir = os.path.join(self.classes_dir, package)

        if package in symlinkable_packages:
          if os.path.islink(merged_package_dir):
            current_link = os.readlink(merged_package_dir)
            if current_link != artifact_package_dir:
              # The code moved to a different target.
              os.unlink(merged_package_dir)
              safe_rmtree(artifact_package_dir)
              shutil.move(current_link, artifact_package_dir)
              os.symlink(artifact_package_dir, merged_package_dir)
          else:
            safe_rmtree(artifact_package_dir)
            shutil.move(merged_package_dir, artifact_package_dir)
            os.symlink(artifact_package_dir, merged_package_dir)
        else:
          safe_mkdir(artifact_package_dir)
          new_or_changed_classnames = \
            set(new_or_changed_classnames_by_package.get(package, [])) if diff else None
          for classname in classnames:
            if not diff or classname in new_or_changed_classnames:
              src = os.path.join(merged_package_dir, classname)
              dst = os.path.join(artifact_package_dir, classname)
              self._maybe_hardlink(src, dst)
          if diff:
            for classname in deleted_classnames_by_package.get(package, []):
              path = os.path.join(artifact_package_dir, classname)
              if os.path.exists(path):
                os.unlink(path)
开发者ID:magicbill,项目名称:commons,代码行数:58,代码来源:zinc_artifact.py

示例8: split_artifact

  def split_artifact(self, deps, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return
    buildroot = get_buildroot()
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    src_output_dir, _, src_analysis_cache = self.create_output_paths(versioned_target_set.targets)
    analysis_splits = []  # List of triples of (list of sources, destination output dir, destination analysis cache).
    # for dependency analysis, we need to record the cache files that we create in the split

    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths([target])
      safe_rmtree(dst_output_dir)
      safe_mkdir(dst_output_dir)

      sources = []
      dst_deps = Dependencies(dst_output_dir)

      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
        source_abspath = os.path.join(buildroot, target.target_base, source)
        sources.append(source_abspath)
        for cls in classes:
          # Copy the class file.
          dst = os.path.join(dst_output_dir, cls)
          safe_mkdir(os.path.dirname(dst))
          os.link(os.path.join(src_output_dir, cls), dst)
      dst_deps.save(dst_depfile)
      analysis_splits.append((sources, dst_output_dir, dst_analysis_cache))
      self.generated_caches.add(os.path.join(dst_output_dir, dst_analysis_cache))
    # Use zinc to split the analysis files.
    if os.path.exists(src_analysis_cache):
      analysis_args = []
      analysis_args.extend(self._zinc_jar_args)
      analysis_args.extend([
        '-log-level', self.context.options.log_level or 'info',
        '-analysis',
        '-mirror-analysis'
        ])
      split_args = analysis_args + [
        '-cache', src_analysis_cache,
        '-split', ','.join(['{%s}:%s' % (':'.join(x[0]), x[2]) for x in analysis_splits]),
        ]
      if self.runjava(self._main, classpath=self._zinc_classpath, args=split_args, jvmargs=self._jvm_args):
        raise TaskError, 'zinc failed to split analysis files %s from %s' %\
                         (':'.join([x[2] for x in analysis_splits]), src_analysis_cache)

      # Now rebase the newly created analysis files.
      for split in analysis_splits:
        dst_analysis_cache = split[2]
        if os.path.exists(dst_analysis_cache):
          rebase_args = analysis_args + [
            '-cache', dst_analysis_cache,
            '-rebase', '%s:%s' % (src_output_dir, split[1]),
            ]
          if self.runjava(self._main, classpath=self._zinc_classpath, args=rebase_args, jvmargs=self._jvm_args):
            raise TaskError, 'In split_artifact: zinc failed to rebase analysis file %s' % dst_analysis_cache
开发者ID:SeungEun,项目名称:commons,代码行数:58,代码来源:scala_compile.py

示例9: teardown_class

 def teardown_class(cls):
   if 'THERMOS_DEBUG' not in os.environ:
     safe_rmtree(cls.LOG_DIR)
     if cls.PEX_PATH:
       safe_rmtree(os.path.dirname(cls.PEX_PATH))
   else:
     print('Saving executor logs in %s' % cls.LOG_DIR)
     if cls.PEX_PATH:
       print('Saved thermos executor at %s' % cls.PEX_PATH)
开发者ID:AltanAlpay,项目名称:aurora,代码行数:9,代码来源:test_thermos_task_runner.py

示例10: erase_data

 def erase_data(self, task_id):
   # TODO(wickman)
   # This could be potentially dangerous if somebody naively runs their sandboxes in e.g.
   # $HOME or / or similar.  Perhaps put a guard somewhere?
   for fn in self.get_data(task_id, with_size=False):
     os.unlink(fn)
   state = self.state(task_id)
   if state and state.header and state.header.sandbox:
     safe_rmtree(state.header.sandbox)
开发者ID:aalzabarah,项目名称:incubator-aurora,代码行数:9,代码来源:garbage.py

示例11: _split_artifact

  def _split_artifact(self, deps, versioned_target_set):
    """Splits an artifact representing several targets into target-by-target artifacts.
    Creates an output classes dir, a depfile and an analysis file for each target.
    Note that it's not OK to create incomplete artifacts here: this is run *after* a zinc invocation,
    and the expectation is that the result is complete.

    NOTE: This method is reentrant.
    """
    if len(versioned_target_set.targets) <= 1:
      return
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    src_classes_dir, _, src_analysis_file = self._output_paths(versioned_target_set.targets)

    # Specifies that the list of sources defines a split to the classes dir and analysis file.
    SplitInfo = namedtuple('SplitInfo', ['sources', 'dst_classes_dir', 'dst_analysis_file'])

    analysis_splits = []  # List of SplitInfos.
    portable_analysis_splits = []  # The same, for the portable version of the analysis cache.

    # Prepare the split arguments.
    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_classes_dir, dst_depfile, dst_analysis_file = self._output_paths([target])
      safe_rmtree(dst_classes_dir)
      safe_mkdir(dst_classes_dir)

      sources = []
      dst_deps = Dependencies(dst_classes_dir)

      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
        sources.append(os.path.join(target.target_base, source))
        for cls in classes:
          # Copy the class file.
          dst = os.path.join(dst_classes_dir, cls)
          safe_mkdir(os.path.dirname(dst))
          os.link(os.path.join(src_classes_dir, cls), dst)
      dst_deps.save(dst_depfile)
      analysis_splits.append(SplitInfo(sources, dst_classes_dir, dst_analysis_file))
      portable_analysis_splits.append(SplitInfo(sources, dst_classes_dir, _portable(dst_analysis_file)))

    def do_split(src_analysis_file, splits):
      if os.path.exists(src_analysis_file):
        if self._zinc_utils.run_zinc_split(src_analysis_file, [(x.sources, x.dst_analysis_file) for x in splits]):
          raise TaskError, 'zinc failed to split analysis files %s from %s' %\
                           (':'.join([x.dst_analysis_file for x in splits]), src_analysis_file)
        for split in splits:
          if os.path.exists(split.dst_analysis_file):
            if self._zinc_utils.run_zinc_rebase(split.dst_analysis_file,
                                                [(src_classes_dir, split.dst_classes_dir)]):
              raise TaskError, \
                'In split_artifact: zinc failed to rebase analysis file %s' % split.dst_analysis_file

    # Now rebase the newly created analysis file(s) to reflect the split classes dirs.
    do_split(src_analysis_file, analysis_splits)
    do_split(_portable(src_analysis_file), portable_analysis_splits)
开发者ID:bag-of-projects,项目名称:commons,代码行数:57,代码来源:scala_compile.py

示例12: invalidate

  def invalidate(self, cache_key):
    """Invalidates this cache key and any cached files associated with it.

    :param cache_key: A CacheKey object (as returned by BuildCache.key_for().
    """
    safe_rmtree(cache_key.filename)
    sha_file = self._sha_file(cache_key)
    if os.path.exists(sha_file):
      os.unlink(sha_file)
开发者ID:adamsxu,项目名称:commons,代码行数:9,代码来源:build_cache.py

示例13: test_split_existing

 def test_split_existing(self):
   td = tempfile.mkdtemp()
   try:
     assert Nested._split_existing(td) == (td, '.')
     assert Nested._split_existing(td + os.sep) == (td, '.')
     assert Nested._split_existing(os.path.join(td, 'a', 'b', 'c')) == (
       td, os.path.join('a', 'b', 'c'))
     assert Nested._split_existing(os.path.join(td, 'a', '..', 'c')) == (td, 'c')
   finally:
     safe_rmtree(td)
开发者ID:JoeEnnever,项目名称:commons,代码行数:10,代码来源:test_nested.py

示例14: _compile

  def _compile(self, versioned_target_set, classpath, upstream_analysis_files):
    """Actually compile some targets.

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in versioned_target_set are up-to-date, as if each
                   were compiled individually.
    """
    # Note: We actually compile all the targets in the set in a single zinc call, because
    # compiler invocation overhead is high, but this fact is not exposed outside this method.
    classes_dir, depfile, analysis_file = self._output_paths(versioned_target_set.targets)
    safe_mkdir(classes_dir)

    # Get anything we have from previous builds.
    self._merge_artifact(versioned_target_set)

    # Compute the sources we need to compile.
    sources_by_target = ScalaCompile._calculate_sources(versioned_target_set.targets)

    if sources_by_target:
      sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
      if not sources:
        self.context.log.warn('Skipping scala compile for targets with no sources:\n  %s' %
                              '\n  '.join(str(t) for t in sources_by_target.keys()))
      else:
        # Invoke the compiler.
        self.context.log.info('Compiling targets %s' % versioned_target_set.targets)
        if self._zinc_utils.compile(classpath, sources, classes_dir, analysis_file,
                                    upstream_analysis_files, depfile):
          raise TaskError('Compile failed.')

        # Read in the deps we just created.
        self.context.log.debug('Reading dependencies from ' + depfile)
        deps = Dependencies(classes_dir)
        deps.load(depfile)

        # Split the artifact into per-target artifacts.
        self._split_artifact(deps, versioned_target_set)

        # Write to artifact cache, if needed.
        for vt in versioned_target_set.versioned_targets:
          vt_classes_dir, vt_depfile, vt_analysis_file = self._output_paths(vt.targets)
          vt_portable_analysis_file = _portable(vt_analysis_file)
          if self._artifact_cache and self.context.options.write_to_artifact_cache:
            # Relativize the analysis.
            # TODO: Relativize before splitting? This will require changes to Zinc, which currently
            # eliminates paths it doesn't recognize (including our placeholders) when splitting.
            if os.path.exists(vt_analysis_file) and \
                self._zinc_utils.relativize_analysis_file(vt_analysis_file, vt_portable_analysis_file):
              raise TaskError('Zinc failed to relativize analysis file: %s' % vt_analysis_file)
            # Write the per-target artifacts to the cache.
            artifacts = [vt_classes_dir, vt_depfile, vt_portable_analysis_file]
            self.update_artifact_cache(vt, artifacts)
          else:
            safe_rmtree(vt_portable_analysis_file)  # Don't leave cruft lying around.
开发者ID:bag-of-projects,项目名称:commons,代码行数:55,代码来源:scala_compile.py

示例15: teardown_class

 def teardown_class(cls):
   if 'THERMOS_DEBUG' not in os.environ:
     safe_rmtree(cls.LOG_DIR)
     thermos_path = thermos_runner_path(build=False)
     if thermos_path:
       safe_rmtree(os.path.dirname(thermos_path))
   else:
     print('Saving executor logs in %s' % cls.LOG_DIR)
     thermos_path = thermos_runner_path(build=False)
     if thermos_path:
       print('Saved thermos executor at %s' % thermos_path)
开发者ID:bmhatfield,项目名称:aurora,代码行数:11,代码来源:test_thermos_executor.py


注:本文中的twitter.common.dirutil.safe_rmtree函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。