本文整理汇总了Python中twitter.pants.targets.InternalTarget.coalesce_targets方法的典型用法代码示例。如果您正苦于以下问题:Python InternalTarget.coalesce_targets方法的具体用法?Python InternalTarget.coalesce_targets怎么用?Python InternalTarget.coalesce_targets使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类twitter.pants.targets.InternalTarget
的用法示例。
在下文中一共展示了InternalTarget.coalesce_targets方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _create_chunks
# 需要导入模块: from twitter.pants.targets import InternalTarget [as 别名]
# 或者: from twitter.pants.targets.InternalTarget import coalesce_targets [as 别名]
def _create_chunks(context, goals):
def discriminator(target):
for i, goal in enumerate(goals):
if goal.group.predicate(target):
return i
return 'other'
# First, divide the set of all targets to be built into compatible chunks, based
# on their declared exclusives. Then, for each chunk of compatible exclusives, do
# further subchunking. At the end, we'll have a list of chunks to be built,
# which will go through the chunks of each exclusives-compatible group separately.
# TODO(markcc); chunks with incompatible exclusives require separate ivy resolves.
# Either interleave the ivy task in this group so that it runs once for each batch of
# chunks with compatible exclusives, or make the compilation tasks do their own ivy resolves
# for each batch of targets they're asked to compile.
exclusives = Group._get_exclusives_product(context)
sorted_excl_group_keys = exclusives.get_ordered_group_keys()
all_chunks = []
for excl_group_key in sorted_excl_group_keys:
# TODO(John Sirois): coalescing should be made available in another spot, InternalTarget is jvm
# specific, and all we care is that the Targets have dependencies defined
chunk_targets = exclusives.get_targets_for_group_key(excl_group_key)
# need to extract the targets for this chunk that are internal.
## TODO(markcc): right here, we're using "context.targets", which doesn't respect any of the
## exclusives rubbish going on around here.
#coalesced = InternalTarget.coalesce_targets(context.targets(is_internal), discriminator)
coalesced = InternalTarget.coalesce_targets(filter(is_internal, chunk_targets), discriminator)
coalesced = list(reversed(coalesced))
def not_internal(target):
return not is_internal(target)
# got targets that aren't internal.
#rest = OrderedSet(context.targets(not_internal))
rest = OrderedSet(filter(not_internal, chunk_targets))
chunks = [rest] if rest else []
flavor = None
chunk_start = 0
for i, target in enumerate(coalesced):
target_flavor = discriminator(target)
if target_flavor != flavor and i > chunk_start:
chunks.append(OrderedSet(coalesced[chunk_start:i]))
chunk_start = i
flavor = target_flavor
if chunk_start < len(coalesced):
chunks.append(OrderedSet(coalesced[chunk_start:]))
all_chunks += chunks
context.log.debug('::: created chunks(%d)' % len(all_chunks))
for i, chunk in enumerate(all_chunks):
context.log.debug(' chunk(%d):\n\t%s' % (i, '\n\t'.join(sorted(map(str, chunk)))))
return all_chunks
示例2: create_chunks
# 需要导入模块: from twitter.pants.targets import InternalTarget [as 别名]
# 或者: from twitter.pants.targets.InternalTarget import coalesce_targets [as 别名]
def create_chunks(context, goals):
def discriminator(target):
for i, goal in enumerate(goals):
if goal.group.predicate(target):
return i
return 'other'
# TODO(John Sirois): coalescing should be made available in another spot, InternalTarget is jvm
# specific, and all we care is that the Targets have dependencies defined
coalesced = InternalTarget.coalesce_targets(context.targets(is_internal), discriminator)
coalesced = list(reversed(coalesced))
def not_internal(target):
return not is_internal(target)
rest = OrderedSet(context.targets(not_internal))
chunks = [rest] if rest else []
flavor = None
chunk_start = 0
for i, target in enumerate(coalesced):
target_flavor = discriminator(target)
if target_flavor != flavor and i > chunk_start:
chunks.append(OrderedSet(coalesced[chunk_start:i]))
chunk_start = i
flavor = target_flavor
if chunk_start < len(coalesced):
chunks.append(OrderedSet(coalesced[chunk_start:]))
context.log.debug('::: created chunks(%d)' % len(chunks))
for i, chunk in enumerate(chunks):
context.log.debug(' chunk(%d):\n\t%s' % (i, '\n\t'.join(sorted(map(str, chunk)))))
return chunks
示例3: extract_target
# 需要导入模块: from twitter.pants.targets import InternalTarget [as 别名]
# 或者: from twitter.pants.targets.InternalTarget import coalesce_targets [as 别名]
def extract_target(java_targets, name = None):
"""Extracts a minimal set of linked targets from the given target's internal transitive dependency
set. The root target in the extracted target set is returned. The algorithm does a topological
sort of the internal targets and then tries to coalesce targets of a given type. Any target with
a custom ant build xml will be excluded from the coalescing."""
# TODO(John Sirois): this is broken - representative_target is not necessarily representative
representative_target = list(java_targets)[0]
meta_target_base_name = "fast-%s" % (name if name else representative_target.name)
provides = None
deployjar = hasattr(representative_target, 'deployjar') and representative_target.deployjar
buildflags = representative_target.buildflags
def discriminator(tgt):
# Chunk up our targets by (type, src base) - the javac task in the ant build relies upon a
# single srcdir that points to the root of a package tree to ensure differential compilation
# works.
return type(tgt), tgt.target_base
def create_target(category, target_name, target_index, targets):
def name(name):
return "%s-%s-%d" % (target_name, name, target_index)
# TODO(John Sirois): JavaLibrary and ScalaLibrary can float here between src/ and tests/ - add
# ant build support to allow the same treatment for JavaThriftLibrary and JavaProtobufLibrary
# so that tests can house test IDL in tests/
target_type, base = category
if target_type == JavaProtobufLibrary:
return JavaProtobufLibrary._aggregate(name('protobuf'), provides, buildflags, targets)
elif target_type == JavaThriftLibrary:
return JavaThriftLibrary._aggregate(name('thrift'), provides, buildflags, targets)
elif target_type == AnnotationProcessor:
return AnnotationProcessor._aggregate(name('apt'), provides, targets)
elif target_type == JavaLibrary:
return JavaLibrary._aggregate(name('java'), provides, deployjar, buildflags, targets, base)
elif target_type == ScalaLibrary:
return ScalaLibrary._aggregate(name('scala'), provides, deployjar, buildflags, targets, base)
elif target_type == JavaTests:
return JavaTests._aggregate(name('java-tests'), buildflags, targets)
elif target_type == ScalaTests:
return ScalaTests._aggregate(name('scala-tests'), buildflags, targets)
else:
raise Exception("Cannot aggregate targets of type: %s" % target_type)
# TODO(John Sirois): support a flag that selects conflict resolution policy - this currently
# happens to mirror the ivy policy we use
def resolve_conflicts(target):
dependencies = {}
for dependency in target.resolved_dependencies:
for jar in dependency._as_jar_dependencies():
key = jar.org, jar.name
previous = dependencies.get(key, jar)
if jar.rev >= previous.rev:
if jar != previous:
print "WARNING: replacing %s with %s for %s" % (previous, jar, target._id)
target.resolved_dependencies.remove(previous)
target.jar_dependencies.remove(previous)
dependencies[key] = jar
return target
# chunk up our targets by type & custom build xml
coalesced = InternalTarget.coalesce_targets(java_targets, discriminator)
coalesced = list(reversed(coalesced))
start_type = discriminator(coalesced[0])
start = 0
descriptors = []
for current in range(0, len(coalesced)):
current_target = coalesced[current]
current_type = discriminator(current_target)
if current_target.custom_antxml_path:
if start < current:
# if we have a type chunk to our left, record it
descriptors.append((start_type, coalesced[start:current]))
# record a chunk containing just the target that has the custom build xml to be conservative
descriptors.append((current_type, [current_target]))
start = current + 1
if current < (len(coalesced) - 1):
start_type = discriminator(coalesced[start])
elif start_type != current_type:
# record the type chunk we just left
descriptors.append((start_type, coalesced[start:current]))
start = current
start_type = current_type
if start < len(coalesced):
# record the tail chunk
descriptors.append((start_type, coalesced[start:]))
# build meta targets aggregated from the chunks and keep track of which targets end up in which
# meta targets
meta_targets_by_target_id = dict()
targets_by_meta_target = []
for (ttype, targets), index in zip(descriptors, reversed(range(0, len(descriptors)))):
meta_target = resolve_conflicts(create_target(ttype, meta_target_base_name, index, targets))
#.........这里部分代码省略.........