本文整理汇总了Python中mercurial.scmutil.revrange函数的典型用法代码示例。如果您正苦于以下问题:Python revrange函数的具体用法?Python revrange怎么用?Python revrange使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了revrange函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: getlogrevs
def getlogrevs(repo, pats, opts):
"""Return (revs, expr, filematcher) where revs is a list of
revision numbers, expr is a revset string built from log options
and file patterns or None, and used to filter 'revs'. If --stat or
--patch are not passed filematcher is None. Otherwise it is a
callable taking a revision number and returning a match objects
filtering the files to be detailed when displaying the revision.
"""
if not len(repo):
return [], None, None
# Default --rev value depends on --follow but --follow behaviour
# depends on revisions resolved from --rev...
follow = opts.get('follow') or opts.get('follow_first')
if opts.get('rev'):
revs = scmutil.revrange(repo, opts['rev'])
else:
if follow and len(repo) > 0:
revs = scmutil.revrange(repo, ['.:0'])
else:
revs = range(len(repo) - 1, -1, -1)
if not revs:
return [], None, None
expr, filematcher = _makelogrevset(repo, pats, opts, revs)
if expr:
# Evaluate revisions in changelog order for performance
# reasons but preserve the original sequence order in the
# filtered result.
matched = set(revset.match(repo.ui, expr)(repo, sorted(revs)))
revs = [r for r in revs if r in matched]
if not opts.get('hidden'):
# --hidden is still experimental and not worth a dedicated revset
# yet. Fortunately, filtering revision number is fast.
revs = [r for r in revs if r not in repo.changelog.hiddenrevs]
return revs, expr, filematcher
示例2: delete
def delete(self, repo, patches, opts):
if not patches and not opts.get('rev'):
raise util.Abort(_('qdelete requires at least one revision or '
'patch name'))
realpatches = []
for patch in patches:
patch = self.lookup(patch, strict=True)
info = self.isapplied(patch)
if info:
raise util.Abort(_("cannot delete applied patch %s") % patch)
if patch not in self.series:
raise util.Abort(_("patch %s not in series file") % patch)
if patch not in realpatches:
realpatches.append(patch)
numrevs = 0
if opts.get('rev'):
if not self.applied:
raise util.Abort(_('no patches applied'))
revs = scmutil.revrange(repo, opts.get('rev'))
if len(revs) > 1 and revs[0] > revs[1]:
revs.reverse()
revpatches = self._revpatches(repo, revs)
realpatches += revpatches
numrevs = len(revpatches)
self._cleanup(realpatches, numrevs, opts.get('keep'))
示例3: squash
def squash(ui, repo, **opts):
"""Simple extension that squashes multiple revisions into a single one"""
revrange = scmutil.revrange(repo, opts["rev"])
if not revrange:
raise util.Abort(_("Please specify a revision"))
start = revrange[0]
end = revrange[-1]
revs = find_revisions(start, end, repo)
if not revs:
raise util.Abort(_("The revision %s is not an ancestor of %s\n") % (start, end))
elif len(revs) == 1:
raise util.Abort(_("Please specify a start and an end revision"))
verify_user(ui, repo, revs)
no_children(ui, repo, end)
has_parent(ui, repo, start)
verify_pending_commits(repo)
squash_revisions(ui, repo, revs, start, end)
for r in revs:
ui.status("rev: %s, owner: %s\n" % (repo[r], ui.username()))
示例4: check_hook
def check_hook(ui, repo, hooktype, node, **kwargs):
'''blocks commits/changesets containing tabs or trailing whitespace'''
if hooktype == 'pretxncommit':
ui.note('checkfiles: checking commit for tabs or trailing whitespace...\n')
cf = CheckFiles(ui, repo, repo.changectx(node))
return cf.check()
elif hooktype == 'pretxnchangegroup':
try:
from mercurial.scmutil import revrange
except ImportError:
# 1.8 and earlier
from mercurial.cmdutil import revrange
ui.note('checkfiles: checking incoming changes for tabs or trailing whitespace...\n')
cf = CheckFiles(ui, repo, repo[None])
fail = False
for rev in revrange(repo, ['%s::' % node]):
cf.set_changectx(repo.changectx(rev))
cf.files = cf.ctx.files()
fail = cf.check() or fail
return fail
else:
from mercurial import util
raise util.Abort(_('checkfiles: check_hook installed as unsupported hooktype: %s') %
hooktype)
示例5: pull
def pull(orig, ui, repo, *pats, **opts):
result = orig(ui, repo, *pats, **opts)
if shallowrepo.requirement in repo.requirements:
# prefetch if it's configured
prefetchrevset = ui.config('remotefilelog', 'pullprefetch', None)
bgrepack = repo.ui.configbool('remotefilelog',
'backgroundrepack', False)
bgprefetch = repo.ui.configbool('remotefilelog',
'backgroundprefetch', False)
if prefetchrevset:
ui.status(_("prefetching file contents\n"))
revs = scmutil.revrange(repo, [prefetchrevset])
base = repo['.'].rev()
if bgprefetch:
repo.backgroundprefetch(prefetchrevset, repack=bgrepack)
else:
repo.prefetch(revs, base=base)
if bgrepack:
repackmod.backgroundrepack(repo, incremental=True)
elif bgrepack:
repackmod.backgroundrepack(repo, incremental=True)
return result
示例6: graphlog
def graphlog(ui, repo, *pats, **opts):
"""show revision history alongside an ASCII revision graph
Print a revision history alongside a revision graph drawn with
ASCII characters.
Nodes printed as an @ character are parents of the working
directory.
"""
revs, expr, filematcher = getlogrevs(repo, pats, opts)
revs = sorted(revs, reverse=1)
limit = cmdutil.loglimit(opts)
if limit is not None:
revs = revs[:limit]
revdag = graphmod.dagwalker(repo, revs)
getrenamed = None
if opts.get('copies'):
endrev = None
if opts.get('rev'):
endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
displayer = show_changeset(ui, repo, opts, buffered=True)
showparents = [ctx.node() for ctx in repo[None].parents()]
generate(ui, revdag, displayer, showparents, asciiedges, getrenamed,
filematcher)
示例7: _revive
def _revive(repo, rev):
"""Brings the given rev back into the repository. Finding it in backup
bundles if necessary.
"""
unfi = repo.unfiltered()
try:
ctx = unfi[rev]
except error.RepoLookupError:
# It could either be a revset or a stripped commit.
pass
else:
if ctx.obsolete():
try:
inhibit = extensions.find('inhibit')
except KeyError:
raise error.Abort(_('cannot revive %s - inhibit extension '
'is not enabled') % ctx)
else:
torevive = unfi.set('::%d & obsolete()', ctx.rev())
inhibit.revive(torevive, operation='reset')
try:
revs = scmutil.revrange(repo, [rev])
if len(revs) > 1:
raise error.Abort(_('exactly one revision must be specified'))
if len(revs) == 1:
return repo[revs.first()]
except error.RepoLookupError:
revs = []
return _pullbundle(repo, rev)
示例8: keepset
def keepset(repo, keyfn, lastkeepkeys=None):
"""Computes a keepset which is not garbage collected.
'keyfn' is a function that maps filename, node to a unique key.
'lastkeepkeys' is an optional argument and if provided the keepset
function updates lastkeepkeys with more keys and returns the result.
"""
if not lastkeepkeys:
keepkeys = set()
else:
keepkeys = lastkeepkeys
# We want to keep:
# 1. Working copy parent
# 2. Draft commits
# 3. Parents of draft commits
# 4. Pullprefetch and bgprefetchrevs revsets if specified
revs = ['.', 'draft()', 'parents(draft())']
prefetchrevs = repo.ui.config('remotefilelog', 'pullprefetch', None)
if prefetchrevs:
revs.append('(%s)' % prefetchrevs)
prefetchrevs = repo.ui.config('remotefilelog', 'bgprefetchrevs', None)
if prefetchrevs:
revs.append('(%s)' % prefetchrevs)
revs = '+'.join(revs)
revs = ['sort((%s), "topo")' % revs]
keep = scmutil.revrange(repo, revs)
processed = set()
lastmanifest = None
# process the commits in toposorted order starting from the oldest
for r in reversed(keep._list):
if repo[r].p1().rev() in processed:
# if the direct parent has already been processed
# then we only need to process the delta
m = repo[r].manifestctx().readdelta()
else:
# otherwise take the manifest and diff it
# with the previous manifest if one exists
if lastmanifest:
m = repo[r].manifest().diff(lastmanifest)
else:
m = repo[r].manifest()
lastmanifest = repo[r].manifest()
processed.add(r)
# populate keepkeys with keys from the current manifest
if type(m) is dict:
# m is a result of diff of two manifests and is a dictionary that
# maps filename to ((newnode, newflag), (oldnode, oldflag)) tuple
for filename, diff in m.iteritems():
if diff[0][0] is not None:
keepkeys.add(keyfn(filename, diff[0][0]))
else:
# m is a manifest object
for filename, filenode in m.iteritems():
keepkeys.add(keyfn(filename, filenode))
return keepkeys
示例9: lfpull
def lfpull(ui, repo, source="default", **opts):
"""pull largefiles for the specified revisions from the specified source
Pull largefiles that are referenced from local changesets but missing
locally, pulling from a remote repository to the local cache.
If SOURCE is omitted, the 'default' path will be used.
See :hg:`help urls` for more information.
.. container:: verbose
Some examples:
- pull largefiles for all branch heads::
hg lfpull -r "head() and not closed()"
- pull largefiles on the default branch::
hg lfpull -r "branch(default)"
"""
repo.lfpullsource = source
revs = opts.get('rev', [])
if not revs:
raise util.Abort(_('no revisions specified'))
revs = scmutil.revrange(repo, revs)
numcached = 0
for rev in revs:
ui.note(_('pulling largefiles for revision %s\n') % rev)
(cached, missing) = cachelfiles(ui, repo, rev)
numcached += len(cached)
ui.status(_("%d largefiles cached\n") % numcached)
示例10: fastmanifesttocache
def fastmanifesttocache(repo, subset, x):
"""Revset of the interesting revisions to cache. This returns:
- Drafts
- Revisions with a bookmarks
- Revisions with some selected remote bookmarks (master, stable ...)
- Their parents (to make diff -c faster)
- TODO The base of potential rebase operations
- Filtering all of the above to only include recent changes
"""
# Add relevant remotenames to the list of interesting revs
revs = _relevantremonamesrevs(repo)
# Add all the other relevant revs
query = "(not public() & not hidden()) + bookmark()"
cutoff = repo.ui.configint("fastmanifest", "cachecutoffdays", 60)
if cutoff == -1: # no cutoff
datelimit = ""
else:
datelimit = "and date(-%d)" % cutoff
revs.update(scmutil.revrange(repo,["(%s + parents(%s)) %s"
% (query, query, datelimit)]))
metricscollector.get().recordsample("revsetsize", size=len(revs))
return subset & revs
示例11: check_hook
def check_hook(ui, repo, hooktype, **kwargs):
"""blocks commits/changesets containing tabs or trailing whitespace"""
if hooktype == 'precommit':
ui.note('checkfiles: checking for tabs and/or trailing whitespace ' \
'in changed files...\n')
cf = CheckFiles(ui, repo, repo[None])
return cf.check()
if hooktype == 'pretxncommit':
ui.note('checkfiles: checking commit for tabs or trailing ' \
'whitespace...\n')
node = kwargs['node']
cf = CheckFiles(ui, repo, repo.changectx(node))
return cf.check()
elif hooktype == 'pretxnchangegroup':
ui.note('checkfiles: checking incoming changes for tabs or trailing ' \
'whitespace...\n')
cf = CheckFiles(ui, repo, repo[None])
fail = False
node = kwargs['node']
for rev in revrange(repo, ['%s::' % node]):
cf.set_changectx(repo.changectx(rev))
cf.files = cf.ctx.files()
fail = cf.check() or fail
return fail
else:
raise util.Abort(_('checkfiles: check_hook installed as unsupported ' \
'hooktype: %s') % hooktype)
示例12: critic
def critic(ui, repo, rev='.', entire=False, **opts):
"""Perform a critique of changeset(s).
This will perform static analysis on changeset(s) and report any issues
found.
"""
for r in scmutil.revrange(repo, [rev]):
critique(ui, repo, node=r, entire=entire, **opts)
示例13: get_revs
def get_revs(repo, rev_opt):
if rev_opt:
revs = scmutil.revrange(repo, rev_opt)
if len(revs) == 0:
return (nullrev, nullrev)
return (max(revs), min(revs))
else:
return (len(repo) - 1, 0)
示例14: get_hgtags_from_heads
def get_hgtags_from_heads(ui, repo, rev):
from mercurial import scmutil
heads = scmutil.revrange(repo,['heads(%d::)' % (rev)])
head_hgtags = dict()
for h in heads:
if '.hgtags' in repo[h]:
hgtags = repo[h]['.hgtags'].data()
head_hgtags[hex(repo[h].node())] = hgtags
return head_hgtags
示例15: obsoleteinfo
def obsoleteinfo(repo, hgcmd):
"""Return obsolescence markers that are relevant to smartlog revset"""
unfi = repo.unfiltered()
revs = scmutil.revrange(unfi, ["smartlog()"])
hashes = '|'.join(unfi[rev].hex() for rev in revs)
markers = hgcmd(debugcommands.debugobsolete, rev=[])
pat = re.compile('(^.*(?:'+hashes+').*$)', re.MULTILINE)
relevant = pat.findall(markers)
return "\n".join(relevant)