当前位置: 首页>>代码示例>>Python>>正文


Python Dependencies.add方法代码示例

本文整理汇总了Python中twitter.pants.tasks.jvm_compiler_dependencies.Dependencies.add方法的典型用法代码示例。如果您正苦于以下问题:Python Dependencies.add方法的具体用法?Python Dependencies.add怎么用?Python Dependencies.add使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在twitter.pants.tasks.jvm_compiler_dependencies.Dependencies的用法示例。


在下文中一共展示了Dependencies.add方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: split_artifact

# 需要导入模块: from twitter.pants.tasks.jvm_compiler_dependencies import Dependencies [as 别名]
# 或者: from twitter.pants.tasks.jvm_compiler_dependencies.Dependencies import add [as 别名]
  def split_artifact(self, deps, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return
    buildroot = get_buildroot()
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    src_output_dir, _, src_analysis_cache = self.create_output_paths(versioned_target_set.targets)
    analysis_splits = []  # List of triples of (list of sources, destination output dir, destination analysis cache).
    # for dependency analysis, we need to record the cache files that we create in the split

    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths([target])
      safe_rmtree(dst_output_dir)
      safe_mkdir(dst_output_dir)

      sources = []
      dst_deps = Dependencies(dst_output_dir)

      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
        source_abspath = os.path.join(buildroot, target.target_base, source)
        sources.append(source_abspath)
        for cls in classes:
          # Copy the class file.
          dst = os.path.join(dst_output_dir, cls)
          safe_mkdir(os.path.dirname(dst))
          os.link(os.path.join(src_output_dir, cls), dst)
      dst_deps.save(dst_depfile)
      analysis_splits.append((sources, dst_output_dir, dst_analysis_cache))
      self.generated_caches.add(os.path.join(dst_output_dir, dst_analysis_cache))
    # Use zinc to split the analysis files.
    if os.path.exists(src_analysis_cache):
      analysis_args = []
      analysis_args.extend(self._zinc_jar_args)
      analysis_args.extend([
        '-log-level', self.context.options.log_level or 'info',
        '-analysis',
        '-mirror-analysis'
        ])
      split_args = analysis_args + [
        '-cache', src_analysis_cache,
        '-split', ','.join(['{%s}:%s' % (':'.join(x[0]), x[2]) for x in analysis_splits]),
        ]
      if self.runjava(self._main, classpath=self._zinc_classpath, args=split_args, jvmargs=self._jvm_args):
        raise TaskError, 'zinc failed to split analysis files %s from %s' %\
                         (':'.join([x[2] for x in analysis_splits]), src_analysis_cache)

      # Now rebase the newly created analysis files.
      for split in analysis_splits:
        dst_analysis_cache = split[2]
        if os.path.exists(dst_analysis_cache):
          rebase_args = analysis_args + [
            '-cache', dst_analysis_cache,
            '-rebase', '%s:%s' % (src_output_dir, split[1]),
            ]
          if self.runjava(self._main, classpath=self._zinc_classpath, args=rebase_args, jvmargs=self._jvm_args):
            raise TaskError, 'In split_artifact: zinc failed to rebase analysis file %s' % dst_analysis_cache
开发者ID:SeungEun,项目名称:commons,代码行数:60,代码来源:scala_compile.py

示例2: _split_artifact

# 需要导入模块: from twitter.pants.tasks.jvm_compiler_dependencies import Dependencies [as 别名]
# 或者: from twitter.pants.tasks.jvm_compiler_dependencies.Dependencies import add [as 别名]
  def _split_artifact(self, deps, versioned_target_set):
    """Splits an artifact representing several targets into target-by-target artifacts.
    Creates an output classes dir, a depfile and an analysis file for each target.
    Note that it's not OK to create incomplete artifacts here: this is run *after* a zinc invocation,
    and the expectation is that the result is complete.

    NOTE: This method is reentrant.
    """
    if len(versioned_target_set.targets) <= 1:
      return
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    src_classes_dir, _, src_analysis_file = self._output_paths(versioned_target_set.targets)

    # Specifies that the list of sources defines a split to the classes dir and analysis file.
    SplitInfo = namedtuple('SplitInfo', ['sources', 'dst_classes_dir', 'dst_analysis_file'])

    analysis_splits = []  # List of SplitInfos.
    portable_analysis_splits = []  # The same, for the portable version of the analysis cache.

    # Prepare the split arguments.
    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_classes_dir, dst_depfile, dst_analysis_file = self._output_paths([target])
      safe_rmtree(dst_classes_dir)
      safe_mkdir(dst_classes_dir)

      sources = []
      dst_deps = Dependencies(dst_classes_dir)

      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
        sources.append(os.path.join(target.target_base, source))
        for cls in classes:
          # Copy the class file.
          dst = os.path.join(dst_classes_dir, cls)
          safe_mkdir(os.path.dirname(dst))
          os.link(os.path.join(src_classes_dir, cls), dst)
      dst_deps.save(dst_depfile)
      analysis_splits.append(SplitInfo(sources, dst_classes_dir, dst_analysis_file))
      portable_analysis_splits.append(SplitInfo(sources, dst_classes_dir, _portable(dst_analysis_file)))

    def do_split(src_analysis_file, splits):
      if os.path.exists(src_analysis_file):
        if self._zinc_utils.run_zinc_split(src_analysis_file, [(x.sources, x.dst_analysis_file) for x in splits]):
          raise TaskError, 'zinc failed to split analysis files %s from %s' %\
                           (':'.join([x.dst_analysis_file for x in splits]), src_analysis_file)
        for split in splits:
          if os.path.exists(split.dst_analysis_file):
            if self._zinc_utils.run_zinc_rebase(split.dst_analysis_file,
                                                [(src_classes_dir, split.dst_classes_dir)]):
              raise TaskError, \
                'In split_artifact: zinc failed to rebase analysis file %s' % split.dst_analysis_file

    # Now rebase the newly created analysis file(s) to reflect the split classes dirs.
    do_split(src_analysis_file, analysis_splits)
    do_split(_portable(src_analysis_file), portable_analysis_splits)
开发者ID:bag-of-projects,项目名称:commons,代码行数:59,代码来源:scala_compile.py

示例3: split_depfile

# 需要导入模块: from twitter.pants.tasks.jvm_compiler_dependencies import Dependencies [as 别名]
# 或者: from twitter.pants.tasks.jvm_compiler_dependencies.Dependencies import add [as 别名]
 def split_depfile(self, deps, versioned_target_set):
   if len(versioned_target_set.targets) <= 1:
     return
   classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
   for target in versioned_target_set.targets:
     classes_by_source = classes_by_source_by_target.get(target, {})
     dst_depfile = self.create_depfile_path([target])
     dst_deps = Dependencies(self._classes_dir)
     for source, classes in classes_by_source.items():
       src = os.path.join(target.target_base, source)
       dst_deps.add(src, classes)
     dst_deps.save(dst_depfile)
开发者ID:samitny,项目名称:commons,代码行数:14,代码来源:java_compile.py

示例4: split_depfile

# 需要导入模块: from twitter.pants.tasks.jvm_compiler_dependencies import Dependencies [as 别名]
# 或者: from twitter.pants.tasks.jvm_compiler_dependencies.Dependencies import add [as 别名]
  def split_depfile(self, vt):
    depfile = self.create_depfile_path(vt.targets)
    if len(vt.targets) <= 1 or not os.path.exists(depfile) or self.dry_run:
      return

    deps = Dependencies(self._classes_dir)
    deps.load(depfile)

    classes_by_source_by_target = deps.findclasses(vt.targets)
    for target in vt.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_depfile = self.create_depfile_path([target])
      dst_deps = Dependencies(self._classes_dir)
      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
      dst_deps.save(dst_depfile)
开发者ID:wickman,项目名称:commons,代码行数:19,代码来源:java_compile.py

示例5: split_artifact

# 需要导入模块: from twitter.pants.tasks.jvm_compiler_dependencies import Dependencies [as 别名]
# 或者: from twitter.pants.tasks.jvm_compiler_dependencies.Dependencies import add [as 别名]
  def split_artifact(self, deps, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    src_output_dir, _, src_analysis_cache = self.create_output_paths(versioned_target_set.targets)
    analysis_splits = []  # List of triples of (list of sources, destination output dir, destination analysis cache).
    # for dependency analysis, we need to record the cache files that we create in the split

    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths([target])
      safe_rmtree(dst_output_dir)
      safe_mkdir(dst_output_dir)

      sources = []
      dst_deps = Dependencies(dst_output_dir)

      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
        sources.append(os.path.join(target.target_base, source))
        for cls in classes:
          # Copy the class file.
          dst = os.path.join(dst_output_dir, cls)
          safe_mkdir(os.path.dirname(dst))
          os.link(os.path.join(src_output_dir, cls), dst)
      dst_deps.save(dst_depfile)
      analysis_splits.append((sources, dst_output_dir, dst_analysis_cache))
      self.generated_caches.add(os.path.join(dst_output_dir, dst_analysis_cache))
    # Split the analysis files.
    if os.path.exists(src_analysis_cache):
      if self._zinc_utils.run_zinc_split(src_cache=src_analysis_cache, splits=[(x[0], x[2]) for x in analysis_splits]):
        raise TaskError, 'zinc failed to split analysis files %s from %s' %\
                         (':'.join([x[2] for x in analysis_splits]), src_analysis_cache)

      # Now rebase the newly created analysis files.
      for split in analysis_splits:
        dst_analysis_cache = split[2]
        dst_output_dir = split[1]
        if os.path.exists(dst_analysis_cache):
          rebasings = [(src_output_dir, dst_output_dir)]
          if self._zinc_utils.run_zinc_rebase(cache=dst_analysis_cache, rebasings=rebasings):
            raise TaskError, 'In split_artifact: zinc failed to rebase analysis file %s' % dst_analysis_cache
开发者ID:forestlzj,项目名称:commons,代码行数:45,代码来源:scala_compile.py


注:本文中的twitter.pants.tasks.jvm_compiler_dependencies.Dependencies.add方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。