本文整理匯總了Python中pants.base.build_invalidator.BuildInvalidator類的典型用法代碼示例。如果您正苦於以下問題:Python BuildInvalidator類的具體用法?Python BuildInvalidator怎麽用?Python BuildInvalidator使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
在下文中一共展示了BuildInvalidator類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
def __init__(self,
context,
python_setup,
python_repos,
targets,
extra_requirements=None,
builder=None,
platforms=None,
interpreter=None):
self.context = context
self._python_setup = python_setup
self._python_repos = python_repos
self._targets = targets
self._extra_requirements = list(extra_requirements) if extra_requirements else []
self._platforms = platforms
self._interpreter = interpreter or PythonInterpreter.get()
self._builder = builder or PEXBuilder(os.path.realpath(tempfile.mkdtemp()),
interpreter=self._interpreter)
# Note: unrelated to the general pants artifact cache.
self._egg_cache_root = os.path.join(
self._python_setup.scratch_dir, 'artifacts', str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator( self._egg_cache_root)
示例2: __init__
def __init__(self,
python_setup,
python_repos,
ivy_bootstrapper,
thrift_binary_factory,
interpreter,
builder,
targets,
platforms,
extra_requirements=None,
log=None):
self._python_setup = python_setup
self._python_repos = python_repos
self._ivy_bootstrapper = ivy_bootstrapper
self._thrift_binary_factory = thrift_binary_factory
self._interpreter = interpreter
self._builder = builder
self._targets = targets
self._platforms = platforms
self._extra_requirements = list(extra_requirements) if extra_requirements else []
self._logger = log or logger
# Note: unrelated to the general pants artifact cache.
self._artifact_cache_root = os.path.join(
self._python_setup.artifact_cache_dir, str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator(self._artifact_cache_root)
示例3: __init__
def __init__(self,
context,
targets,
extra_requirements=None,
builder=None,
platforms=None,
interpreter=None):
self.context = context
# TODO: These should come from the caller, and we should not know about config.
self._python_setup = PythonSetup(self.context.config)
self._python_repos = PythonRepos(self.context.config)
self._targets = targets
self._extra_requirements = list(extra_requirements) if extra_requirements else []
self._platforms = platforms
self._interpreter = interpreter or PythonInterpreter.get()
self._builder = builder or PEXBuilder(os.path.realpath(tempfile.mkdtemp()),
interpreter=self._interpreter)
# Note: unrelated to the general pants artifact cache.
self._egg_cache_root = os.path.join(
self._python_setup.scratch_dir, 'artifacts', str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator( self._egg_cache_root)
示例4: __init__
def __init__(self,
target,
root_dir,
extra_targets=None,
extra_requirements=None,
builder=None,
platforms=None,
interpreter=None,
conn_timeout=None):
self._config = Config.load()
self._target = target
self._root = root_dir
self._platforms = platforms
self._interpreter = interpreter or PythonInterpreter.get()
self._extra_targets = list(extra_targets) if extra_targets is not None else []
self._extra_requirements = list(extra_requirements) if extra_requirements is not None else []
self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter)
# Note: unrelated to the general pants artifact cache.
self._egg_cache_root = os.path.join(
PythonSetup(self._config).scratch_dir('artifact_cache', default_name='artifacts'),
str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator( self._egg_cache_root)
示例5: __init__
def __init__(self, cache_key_generator, build_invalidator_dir,
invalidate_dependents, extra_data, only_externaldeps):
self._cache_key_generator = cache_key_generator
self._invalidate_dependents = invalidate_dependents
self._extra_data = pickle.dumps(extra_data) # extra_data may be None.
self._sources = NO_SOURCES if only_externaldeps else TARGET_SOURCES
self._invalidator = BuildInvalidator(build_invalidator_dir)
示例6: __init__
def __init__(self,
cache_key_generator,
build_invalidator_dir,
invalidate_dependents,
fingerprint_strategy=None):
self._cache_key_generator = cache_key_generator
self._invalidate_dependents = invalidate_dependents
self._invalidator = BuildInvalidator(build_invalidator_dir)
self._fingerprint_strategy = fingerprint_strategy
示例7: __init__
def __init__(self,
cache_key_generator,
build_invalidator_dir,
invalidate_dependents,
extra_data):
self._cache_key_generator = cache_key_generator
self._invalidate_dependents = invalidate_dependents
self._extra_data = pickle.dumps(extra_data) # extra_data may be None.
self._invalidator = BuildInvalidator(build_invalidator_dir)
示例8: __init__
def __init__(self,
cache_key_generator,
build_invalidator_dir,
invalidate_dependents,
fingerprint_strategy=None,
invalidation_report=None,
task_name=None):
self._cache_key_generator = cache_key_generator
self._task_name = task_name or 'UNKNOWN'
self._invalidate_dependents = invalidate_dependents
self._invalidator = BuildInvalidator(build_invalidator_dir)
self._fingerprint_strategy = fingerprint_strategy
self.invalidation_report = invalidation_report
示例9: __init__
def __init__(self,
context,
python_setup,
python_repos,
interpreter,
builder,
targets,
platforms,
extra_requirements=None):
self.context = context
self._python_setup = python_setup
self._python_repos = python_repos
self._interpreter = interpreter
self._builder = builder
self._targets = targets
self._platforms = platforms
self._extra_requirements = list(extra_requirements) if extra_requirements else []
# Note: unrelated to the general pants artifact cache.
self._artifact_cache_root = os.path.join(
self._python_setup.artifact_cache_dir, str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator(self._artifact_cache_root)
示例10: InvalidationCacheManager
class InvalidationCacheManager(object):
"""Manages cache checks, updates and invalidation keeping track of basic change
and invalidation statistics.
Note that this is distinct from the ArtifactCache concept, and should probably be renamed.
"""
class CacheValidationError(Exception):
"""Indicates a problem accessing the cache."""
def __init__(self,
cache_key_generator,
build_invalidator_dir,
invalidate_dependents,
fingerprint_strategy=None):
self._cache_key_generator = cache_key_generator
self._invalidate_dependents = invalidate_dependents
self._invalidator = BuildInvalidator(build_invalidator_dir)
self._fingerprint_strategy = fingerprint_strategy
def update(self, vts):
"""Mark a changed or invalidated VersionedTargetSet as successfully processed."""
for vt in vts.versioned_targets:
self._invalidator.update(vt.cache_key)
vt.valid = True
self._invalidator.update(vts.cache_key)
vts.valid = True
def force_invalidate(self, vts):
"""Force invalidation of a VersionedTargetSet."""
for vt in vts.versioned_targets:
self._invalidator.force_invalidate(vt.cache_key)
vt.valid = False
self._invalidator.force_invalidate(vts.cache_key)
vts.valid = False
def check(self,
targets,
partition_size_hint=None,
target_colors=None):
"""Checks whether each of the targets has changed and invalidates it if so.
Returns a list of VersionedTargetSet objects (either valid or invalid). The returned sets
'cover' the input targets, possibly partitioning them, and are in topological order.
The caller can inspect these in order and, e.g., rebuild the invalid ones.
If target_colors is specified, it must be a map from Target -> opaque 'color' values.
Two Targets will be in the same partition only if they have the same color.
"""
all_vts = self._sort_and_validate_targets(targets)
invalid_vts = filter(lambda vt: not vt.valid, all_vts)
return InvalidationCheck(all_vts, invalid_vts, partition_size_hint, target_colors)
def _sort_and_validate_targets(self, targets):
"""Validate each target.
Returns a topologically ordered set of VersionedTargets, each representing one input target.
"""
# We must check the targets in this order, to ensure correctness if invalidate_dependents=True,
# since we use earlier cache keys to compute later cache keys in this case.
ordered_targets = self._order_target_list(targets)
# This will be a list of VersionedTargets that correspond to @targets.
versioned_targets = []
# This will be a mapping from each target to its corresponding VersionedTarget.
versioned_targets_by_target = {}
# Map from id to current fingerprint of the target with that id. We update this as we iterate,
# in topological order, so when handling a target, this will already contain all its deps (in
# this round).
id_to_hash = {}
for target in ordered_targets:
cache_key = self._key_for(target, transitive=self._invalidate_dependents)
id_to_hash[target.id] = cache_key.hash
# Create a VersionedTarget corresponding to @target.
versioned_target = VersionedTarget(self, target, cache_key)
# Add the new VersionedTarget to the list of computed VersionedTargets.
versioned_targets.append(versioned_target)
return versioned_targets
def needs_update(self, cache_key):
return self._invalidator.needs_update(cache_key)
def _order_target_list(self, targets):
"""Orders the targets topologically, from least to most dependent."""
return filter(targets.__contains__, reversed(sort_targets(targets)))
def _key_for(self, target, transitive=False):
try:
return self._cache_key_generator.key_for_target(target,
transitive=transitive,
fingerprint_strategy=self._fingerprint_strategy)
except Exception as e:
# This is a catch-all for problems we haven't caught up with and given a better diagnostic.
# TODO(Eric Ayers): If you see this exception, add a fix to catch the problem earlier.
exc_info = sys.exc_info()
#.........這裏部分代碼省略.........
示例11: PythonChroot
class PythonChroot(object):
_VALID_DEPENDENCIES = {
PrepCommand: 'prep',
PythonLibrary: 'libraries',
PythonRequirementLibrary: 'reqs',
PythonBinary: 'binaries',
PythonThriftLibrary: 'thrifts',
PythonAntlrLibrary: 'antlrs',
PythonTests: 'tests'
}
MEMOIZED_THRIFTS = {}
class InvalidDependencyException(Exception):
def __init__(self, target):
Exception.__init__(self, "Not a valid Python dependency! Found: %s" % target)
def __init__(self,
context,
targets,
extra_requirements=None,
builder=None,
platforms=None,
interpreter=None):
self.context = context
self._targets = targets
self._extra_requirements = list(extra_requirements) if extra_requirements else []
self._platforms = platforms
self._interpreter = interpreter or PythonInterpreter.get()
self._builder = builder or PEXBuilder(os.path.realpath(tempfile.mkdtemp()),
interpreter=self._interpreter)
# Note: unrelated to the general pants artifact cache.
self._egg_cache_root = os.path.join(
PythonSetup(self.context.config).scratch_dir('artifact_cache', default_name='artifacts'),
str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator( self._egg_cache_root)
def delete(self):
"""Deletes this chroot from disk if it has been dumped."""
safe_rmtree(self.path())
def __del__(self):
if os.getenv('PANTS_LEAVE_CHROOT') is None:
self.delete()
else:
self.debug('Left chroot at %s' % self.path())
@property
def builder(self):
return self._builder
def debug(self, msg, indent=0):
if os.getenv('PANTS_VERBOSE') is not None:
print('%s%s' % (' ' * indent, msg))
def path(self):
return os.path.realpath(self._builder.path())
def _dump_library(self, library):
def copy_to_chroot(base, path, add_function):
src = os.path.join(get_buildroot(), base, path)
add_function(src, path)
self.debug(' Dumping library: %s' % library)
for relpath in library.sources_relative_to_source_root():
try:
copy_to_chroot(library.target_base, relpath, self._builder.add_source)
except OSError as e:
logger.error("Failed to copy {path} for library {library}"
.format(path=os.path.join(library.target_base, relpath),
library=library))
raise
for resources_tgt in library.resources:
for resource_file_from_source_root in resources_tgt.sources_relative_to_source_root():
try:
copy_to_chroot(resources_tgt.target_base, resource_file_from_source_root,
self._builder.add_resource)
except OSError as e:
logger.error("Failed to copy {path} for resource {resource}"
.format(path=os.path.join(resources_tgt.target_base,
resource_file_from_source_root),
resource=resources_tgt.address.spec))
raise
def _dump_requirement(self, req):
self.debug(' Dumping requirement: %s' % req)
self._builder.add_requirement(req)
def _dump_distribution(self, dist):
self.debug(' Dumping distribution: .../%s' % os.path.basename(dist.location))
self._builder.add_distribution(dist)
def _generate_requirement(self, library, builder_cls):
library_key = self._key_generator.key_for_target(library)
builder = builder_cls(library, get_buildroot(),
self.context.options, '-' + library_key.hash[:8])
#.........這裏部分代碼省略.........
示例12: InvalidationCacheManager
class InvalidationCacheManager(object):
"""Manages cache checks, updates and invalidation keeping track of basic change
and invalidation statistics.
Note that this is distinct from the ArtifactCache concept, and should probably be renamed.
"""
class CacheValidationError(Exception):
"""Indicates a problem accessing the cache."""
def __init__(self,
cache_key_generator,
build_invalidator_dir,
invalidate_dependents,
fingerprint_strategy=None):
self._cache_key_generator = cache_key_generator
self._invalidate_dependents = invalidate_dependents
self._invalidator = BuildInvalidator(build_invalidator_dir)
self._fingerprint_strategy = fingerprint_strategy
def update(self, vts):
"""Mark a changed or invalidated VersionedTargetSet as successfully processed."""
for vt in vts.versioned_targets:
self._invalidator.update(vt.cache_key)
vt.valid = True
self._invalidator.update(vts.cache_key)
vts.valid = True
def force_invalidate(self, vts):
"""Force invalidation of a VersionedTargetSet."""
for vt in vts.versioned_targets:
self._invalidator.force_invalidate(vt.cache_key)
vt.valid = False
self._invalidator.force_invalidate(vts.cache_key)
vts.valid = False
def check(self,
targets,
partition_size_hint=None,
target_colors=None,
topological_order=False):
"""Checks whether each of the targets has changed and invalidates it if so.
Returns a list of VersionedTargetSet objects (either valid or invalid). The returned sets
'cover' the input targets, possibly partitioning them, with one caveat: if the FingerprintStrategy
opted out of fingerprinting a target because it doesn't contribute to invalidation, then that
target will be excluded from all_vts, invalid_vts, and the partitioned VTS.
Callers can inspect these vts and rebuild the invalid ones, for example.
If target_colors is specified, it must be a map from Target -> opaque 'color' values.
Two Targets will be in the same partition only if they have the same color.
"""
all_vts = self.wrap_targets(targets, topological_order=topological_order)
invalid_vts = filter(lambda vt: not vt.valid, all_vts)
return InvalidationCheck(all_vts, invalid_vts, partition_size_hint, target_colors)
def wrap_targets(self, targets, topological_order=False):
"""Wrap targets and their computed cache keys in VersionedTargets.
If the FingerprintStrategy opted out of providing a fingerprint for a target, that target will not
have an associated VersionedTarget returned.
Returns a list of VersionedTargets, each representing one input target.
"""
def vt_iter():
if topological_order:
sorted_targets = [t for t in reversed(sort_targets(targets)) if t in targets]
else:
sorted_targets = sorted(targets)
for target in sorted_targets:
target_key = self._key_for(target)
if target_key is not None:
yield VersionedTarget(self, target, target_key)
return list(vt_iter())
def needs_update(self, cache_key):
return self._invalidator.needs_update(cache_key)
def _key_for(self, target):
try:
return self._cache_key_generator.key_for_target(target,
transitive=self._invalidate_dependents,
fingerprint_strategy=self._fingerprint_strategy)
except Exception as e:
# This is a catch-all for problems we haven't caught up with and given a better diagnostic.
# TODO(Eric Ayers): If you see this exception, add a fix to catch the problem earlier.
exc_info = sys.exc_info()
new_exception = self.CacheValidationError("Problem validating target {} in {}: {}"
.format(target.id, target.address.spec_path, e))
raise self.CacheValidationError, new_exception, exc_info[2]
示例13: PythonChroot
class PythonChroot(object):
_VALID_DEPENDENCIES = {
PrepCommand: 'prep',
PythonLibrary: 'libraries',
PythonRequirementLibrary: 'reqs',
PythonBinary: 'binaries',
PythonThriftLibrary: 'thrifts',
PythonAntlrLibrary: 'antlrs',
PythonTests: 'tests'
}
class InvalidDependencyException(Exception):
def __init__(self, target):
Exception.__init__(self, "Not a valid Python dependency! Found: {}".format(target))
@staticmethod
def get_platforms(platform_list):
return tuple({Platform.current() if p == 'current' else p for p in platform_list})
def __init__(self,
python_setup,
python_repos,
ivy_bootstrapper,
thrift_binary_factory,
interpreter,
builder,
targets,
platforms,
extra_requirements=None,
log=None):
self._python_setup = python_setup
self._python_repos = python_repos
self._ivy_bootstrapper = ivy_bootstrapper
self._thrift_binary_factory = thrift_binary_factory
self._interpreter = interpreter
self._builder = builder
self._targets = targets
self._platforms = platforms
self._extra_requirements = list(extra_requirements) if extra_requirements else []
self._logger = log or logger
# Note: unrelated to the general pants artifact cache.
self._artifact_cache_root = os.path.join(
self._python_setup.artifact_cache_dir, str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator(self._artifact_cache_root)
def delete(self):
"""Deletes this chroot from disk if it has been dumped."""
safe_rmtree(self.path())
def debug(self, msg):
self._logger.debug(msg)
def path(self):
return os.path.realpath(self._builder.path())
def pex(self):
return PEX(self.path(), interpreter=self._interpreter)
def package_pex(self, filename):
"""Package into a PEX zipfile.
:param filename: The filename where the PEX should be stored.
"""
self._builder.build(filename)
def _dump_library(self, library):
def copy_to_chroot(base, path, add_function):
src = os.path.join(get_buildroot(), base, path)
add_function(src, path)
self.debug(' Dumping library: {}'.format(library))
for relpath in library.sources_relative_to_source_root():
try:
copy_to_chroot(library.target_base, relpath, self._builder.add_source)
except OSError:
logger.error("Failed to copy {path} for library {library}"
.format(path=os.path.join(library.target_base, relpath),
library=library))
raise
for resources_tgt in library.resources:
for resource_file_from_source_root in resources_tgt.sources_relative_to_source_root():
try:
copy_to_chroot(resources_tgt.target_base, resource_file_from_source_root,
self._builder.add_resource)
except OSError:
logger.error("Failed to copy {path} for resource {resource}"
.format(path=os.path.join(resources_tgt.target_base,
resource_file_from_source_root),
resource=resources_tgt.address.spec))
raise
def _dump_requirement(self, req):
self.debug(' Dumping requirement: {}'.format(req))
self._builder.add_requirement(req)
def _dump_distribution(self, dist):
#.........這裏部分代碼省略.........
示例14: CacheManager
class CacheManager(object):
"""Manages cache checks, updates and invalidation keeping track of basic change
and invalidation statistics.
Note that this is distinct from the ArtifactCache concept, and should probably be renamed.
"""
def __init__(self, cache_key_generator, build_invalidator_dir,
invalidate_dependents, extra_data, only_externaldeps):
self._cache_key_generator = cache_key_generator
self._invalidate_dependents = invalidate_dependents
self._extra_data = pickle.dumps(extra_data) # extra_data may be None.
self._sources = NO_SOURCES if only_externaldeps else TARGET_SOURCES
self._invalidator = BuildInvalidator(build_invalidator_dir)
def update(self, vts):
"""Mark a changed or invalidated VersionedTargetSet as successfully processed."""
for vt in vts.versioned_targets:
self._invalidator.update(vt.cache_key)
vt.valid = True
self._invalidator.update(vts.cache_key)
vts.valid = True
def force_invalidate(self, vts):
"""Force invalidation of a VersionedTargetSet."""
for vt in vts.versioned_targets:
self._invalidator.force_invalidate(vt.cache_key)
vt.valid = False
self._invalidator.force_invalidate(vts.cache_key)
vts.valid = False
def check(self, targets, partition_size_hint=None):
"""Checks whether each of the targets has changed and invalidates it if so.
Returns a list of VersionedTargetSet objects (either valid or invalid). The returned sets
'cover' the input targets, possibly partitioning them, and are in topological order.
The caller can inspect these in order and, e.g., rebuild the invalid ones.
"""
all_vts = self._sort_and_validate_targets(targets)
invalid_vts = filter(lambda vt: not vt.valid, all_vts)
return InvalidationCheck(all_vts, invalid_vts, partition_size_hint)
def _sort_and_validate_targets(self, targets):
"""Validate each target.
Returns a topologically ordered set of VersionedTargets, each representing one input target.
"""
# We must check the targets in this order, to ensure correctness if invalidate_dependents=True,
# since we use earlier cache keys to compute later cache keys in this case.
ordered_targets = self._order_target_list(targets)
# This will be a list of VersionedTargets that correspond to @targets.
versioned_targets = []
# This will be a mapping from each target to its corresponding VersionedTarget.
versioned_targets_by_target = {}
# Map from id to current fingerprint of the target with that id. We update this as we iterate,
# in topological order, so when handling a target, this will already contain all its deps (in
# this round).
id_to_hash = {}
for target in ordered_targets:
dependency_keys = set()
if self._invalidate_dependents and hasattr(target, 'dependencies'):
# Note that we only need to do this for the immediate deps, because those will already
# reflect changes in their own deps.
for dep in target.dependencies:
# We rely on the fact that any deps have already been processed, either in an earlier
# round or because they came first in ordered_targets.
# Note that only external deps (e.g., JarDependency) or targets with sources can
# affect invalidation. Other targets (JarLibrary, Pants) are just dependency scaffolding.
if isinstance(dep, ExternalDependency):
dependency_keys.add(dep.cache_key())
elif isinstance(dep, TargetWithSources):
fprint = id_to_hash.get(dep.id, None)
if fprint is None:
# It may have been processed in a prior round, and therefore the fprint should
# have been written out by the invalidator.
fprint = self._invalidator.existing_hash(dep.id)
# Note that fprint may still be None here. E.g., a codegen target is in the list
# of deps, but its fprint is not visible to our self._invalidator (that of the
# target synthesized from it is visible, so invalidation will still be correct.)
#
# Another case where this can happen is a dep of a codegen target on, say,
# a java target that hasn't been built yet (again, the synthesized target will
# depend on that same java target, so invalidation will still be correct.)
# TODO(benjy): Make this simpler and more obviously correct.
if fprint is not None:
dependency_keys.add(fprint)
elif isinstance(dep, JarLibrary) or isinstance(dep, Pants):
pass
else:
raise ValueError('Cannot calculate a cache_key for a dependency: %s' % dep)
cache_key = self._key_for(target, dependency_keys)
id_to_hash[target.id] = cache_key.hash
# Create a VersionedTarget corresponding to @target.
versioned_target = VersionedTarget(self, target, cache_key)
# Add the new VersionedTarget to the list of computed VersionedTargets.
#.........這裏部分代碼省略.........
示例15: PythonChroot
class PythonChroot(object):
_VALID_DEPENDENCIES = {
PythonLibrary: 'libraries',
PythonRequirement: 'reqs',
PythonBinary: 'binaries',
PythonThriftLibrary: 'thrifts',
PythonAntlrLibrary: 'antlrs',
PythonTests: 'tests'
}
MEMOIZED_THRIFTS = {}
class InvalidDependencyException(Exception):
def __init__(self, target):
Exception.__init__(self, "Not a valid Python dependency! Found: %s" % target)
def __init__(self,
target,
root_dir,
extra_targets=None,
builder=None,
platforms=None,
interpreter=None,
conn_timeout=None):
self._config = Config.load()
self._target = target
self._root = root_dir
self._platforms = platforms
self._interpreter = interpreter or PythonInterpreter.get()
self._extra_targets = list(extra_targets) if extra_targets is not None else []
self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter)
# Note: unrelated to the general pants artifact cache.
self._egg_cache_root = os.path.join(
PythonSetup(self._config).scratch_dir('artifact_cache', default_name='artifacts'),
str(self._interpreter.identity))
self._key_generator = CacheKeyGenerator()
self._build_invalidator = BuildInvalidator( self._egg_cache_root)
def __del__(self):
if os.getenv('PANTS_LEAVE_CHROOT') is None:
safe_rmtree(self.path())
else:
self.debug('Left chroot at %s' % self.path())
@property
def builder(self):
return self._builder
def debug(self, msg, indent=0):
if os.getenv('PANTS_VERBOSE') is not None:
print('%s%s' % (' ' * indent, msg))
def path(self):
return self._builder.path()
def _dump_library(self, library):
def copy_to_chroot(base, path, add_function):
src = os.path.join(self._root, base, path)
add_function(src, path)
self.debug(' Dumping library: %s' % library)
for filename in library.sources:
copy_to_chroot(library.target_base, filename, self._builder.add_source)
for filename in library.resources:
copy_to_chroot(library.target_base, filename, self._builder.add_resource)
def _dump_requirement(self, req, dynamic, repo):
self.debug(' Dumping requirement: %s%s%s' % (str(req),
' (dynamic)' if dynamic else '', ' (repo: %s)' % repo if repo else ''))
self._builder.add_requirement(req, dynamic, repo)
def _dump_distribution(self, dist):
self.debug(' Dumping distribution: .../%s' % os.path.basename(dist.location))
self._builder.add_distribution(dist)
def _generate_requirement(self, library, builder_cls):
library_key = self._key_generator.key_for_target(library)
builder = builder_cls(library, self._root, self._config, '-' + library_key.hash[:8])
cache_dir = os.path.join(self._egg_cache_root, library_key.id)
if self._build_invalidator.needs_update(library_key):
sdist = builder.build(interpreter=self._interpreter)
safe_mkdir(cache_dir)
shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist)))
self._build_invalidator.update(library_key)
with ParseContext.temp():
return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True)
def _generate_thrift_requirement(self, library):
return self._generate_requirement(library, PythonThriftBuilder)
def _generate_antlr_requirement(self, library):
return self._generate_requirement(library, PythonAntlrBuilder)
def resolve(self, targets):
children = defaultdict(OrderedSet)
#.........這裏部分代碼省略.........