1# scmutil.py - Mercurial core utility functions
2#
3#  Copyright Olivia Mackall <olivia@selenic.com>
4#
5# This software may be used and distributed according to the terms of the
6# GNU General Public License version 2 or any later version.
7
8from __future__ import absolute_import
9
10import errno
11import glob
12import os
13import posixpath
14import re
15import subprocess
16import weakref
17
18from .i18n import _
19from .node import (
20    bin,
21    hex,
22    nullrev,
23    short,
24    wdirrev,
25)
26from .pycompat import getattr
27from .thirdparty import attr
28from . import (
29    copies as copiesmod,
30    encoding,
31    error,
32    match as matchmod,
33    obsolete,
34    obsutil,
35    pathutil,
36    phases,
37    policy,
38    pycompat,
39    requirements as requirementsmod,
40    revsetlang,
41    similar,
42    smartset,
43    url,
44    util,
45    vfs,
46)
47
48from .utils import (
49    hashutil,
50    procutil,
51    stringutil,
52)
53
54if pycompat.iswindows:
55    from . import scmwindows as scmplatform
56else:
57    from . import scmposix as scmplatform
58
59parsers = policy.importmod('parsers')
60rustrevlog = policy.importrust('revlog')
61
62termsize = scmplatform.termsize
63
64
65@attr.s(slots=True, repr=False)
66class status(object):
67    """Struct with a list of files per status.
68
69    The 'deleted', 'unknown' and 'ignored' properties are only
70    relevant to the working copy.
71    """
72
73    modified = attr.ib(default=attr.Factory(list))
74    added = attr.ib(default=attr.Factory(list))
75    removed = attr.ib(default=attr.Factory(list))
76    deleted = attr.ib(default=attr.Factory(list))
77    unknown = attr.ib(default=attr.Factory(list))
78    ignored = attr.ib(default=attr.Factory(list))
79    clean = attr.ib(default=attr.Factory(list))
80
81    def __iter__(self):
82        yield self.modified
83        yield self.added
84        yield self.removed
85        yield self.deleted
86        yield self.unknown
87        yield self.ignored
88        yield self.clean
89
90    def __repr__(self):
91        return (
92            r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
93            r'unknown=%s, ignored=%s, clean=%s>'
94        ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
95
96
97def itersubrepos(ctx1, ctx2):
98    """find subrepos in ctx1 or ctx2"""
99    # Create a (subpath, ctx) mapping where we prefer subpaths from
100    # ctx1. The subpaths from ctx2 are important when the .hgsub file
101    # has been modified (in ctx2) but not yet committed (in ctx1).
102    subpaths = dict.fromkeys(ctx2.substate, ctx2)
103    subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
104
105    missing = set()
106
107    for subpath in ctx2.substate:
108        if subpath not in ctx1.substate:
109            del subpaths[subpath]
110            missing.add(subpath)
111
112    for subpath, ctx in sorted(pycompat.iteritems(subpaths)):
113        yield subpath, ctx.sub(subpath)
114
115    # Yield an empty subrepo based on ctx1 for anything only in ctx2.  That way,
116    # status and diff will have an accurate result when it does
117    # 'sub.{status|diff}(rev2)'.  Otherwise, the ctx2 subrepo is compared
118    # against itself.
119    for subpath in missing:
120        yield subpath, ctx2.nullsub(subpath, ctx1)
121
122
123def nochangesfound(ui, repo, excluded=None):
124    """Report no changes for push/pull, excluded is None or a list of
125    nodes excluded from the push/pull.
126    """
127    secretlist = []
128    if excluded:
129        for n in excluded:
130            ctx = repo[n]
131            if ctx.phase() >= phases.secret and not ctx.extinct():
132                secretlist.append(n)
133
134    if secretlist:
135        ui.status(
136            _(b"no changes found (ignored %d secret changesets)\n")
137            % len(secretlist)
138        )
139    else:
140        ui.status(_(b"no changes found\n"))
141
142
143def callcatch(ui, func):
144    """call func() with global exception handling
145
146    return func() if no exception happens. otherwise do some error handling
147    and return an exit code accordingly. does not handle all exceptions.
148    """
149    coarse_exit_code = -1
150    detailed_exit_code = -1
151    try:
152        try:
153            return func()
154        except:  # re-raises
155            ui.traceback()
156            raise
157    # Global exception handling, alphabetically
158    # Mercurial-specific first, followed by built-in and library exceptions
159    except error.LockHeld as inst:
160        detailed_exit_code = 20
161        if inst.errno == errno.ETIMEDOUT:
162            reason = _(b'timed out waiting for lock held by %r') % (
163                pycompat.bytestr(inst.locker)
164            )
165        else:
166            reason = _(b'lock held by %r') % inst.locker
167        ui.error(
168            _(b"abort: %s: %s\n")
169            % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
170        )
171        if not inst.locker:
172            ui.error(_(b"(lock might be very busy)\n"))
173    except error.LockUnavailable as inst:
174        detailed_exit_code = 20
175        ui.error(
176            _(b"abort: could not lock %s: %s\n")
177            % (
178                inst.desc or stringutil.forcebytestr(inst.filename),
179                encoding.strtolocal(inst.strerror),
180            )
181        )
182    except error.RepoError as inst:
183        ui.error(_(b"abort: %s\n") % inst)
184        if inst.hint:
185            ui.error(_(b"(%s)\n") % inst.hint)
186    except error.ResponseError as inst:
187        ui.error(_(b"abort: %s") % inst.args[0])
188        msg = inst.args[1]
189        if isinstance(msg, type(u'')):
190            msg = pycompat.sysbytes(msg)
191        if msg is None:
192            ui.error(b"\n")
193        elif not isinstance(msg, bytes):
194            ui.error(b" %r\n" % (msg,))
195        elif not msg:
196            ui.error(_(b" empty string\n"))
197        else:
198            ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
199    except error.CensoredNodeError as inst:
200        ui.error(_(b"abort: file censored %s\n") % inst)
201    except error.WdirUnsupported:
202        ui.error(_(b"abort: working directory revision cannot be specified\n"))
203    except error.Error as inst:
204        if inst.detailed_exit_code is not None:
205            detailed_exit_code = inst.detailed_exit_code
206        if inst.coarse_exit_code is not None:
207            coarse_exit_code = inst.coarse_exit_code
208        ui.error(inst.format())
209    except error.WorkerError as inst:
210        # Don't print a message -- the worker already should have
211        return inst.status_code
212    except ImportError as inst:
213        ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
214        m = stringutil.forcebytestr(inst).split()[-1]
215        if m in b"mpatch bdiff".split():
216            ui.error(_(b"(did you forget to compile extensions?)\n"))
217        elif m in b"zlib".split():
218            ui.error(_(b"(is your Python install correct?)\n"))
219    except util.urlerr.httperror as inst:
220        detailed_exit_code = 100
221        ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
222    except util.urlerr.urlerror as inst:
223        detailed_exit_code = 100
224        try:  # usually it is in the form (errno, strerror)
225            reason = inst.reason.args[1]
226        except (AttributeError, IndexError):
227            # it might be anything, for example a string
228            reason = inst.reason
229        if isinstance(reason, pycompat.unicode):
230            # SSLError of Python 2.7.9 contains a unicode
231            reason = encoding.unitolocal(reason)
232        ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
233    except (IOError, OSError) as inst:
234        if (
235            util.safehasattr(inst, b"args")
236            and inst.args
237            and inst.args[0] == errno.EPIPE
238        ):
239            pass
240        elif getattr(inst, "strerror", None):  # common IOError or OSError
241            if getattr(inst, "filename", None) is not None:
242                ui.error(
243                    _(b"abort: %s: '%s'\n")
244                    % (
245                        encoding.strtolocal(inst.strerror),
246                        stringutil.forcebytestr(inst.filename),
247                    )
248                )
249            else:
250                ui.error(_(b"abort: %s\n") % encoding.strtolocal(inst.strerror))
251        else:  # suspicious IOError
252            raise
253    except MemoryError:
254        ui.error(_(b"abort: out of memory\n"))
255    except SystemExit as inst:
256        # Commands shouldn't sys.exit directly, but give a return code.
257        # Just in case catch this and and pass exit code to caller.
258        detailed_exit_code = 254
259        coarse_exit_code = inst.code
260
261    if ui.configbool(b'ui', b'detailed-exit-code'):
262        return detailed_exit_code
263    else:
264        return coarse_exit_code
265
266
267def checknewlabel(repo, lbl, kind):
268    # Do not use the "kind" parameter in ui output.
269    # It makes strings difficult to translate.
270    if lbl in [b'tip', b'.', b'null']:
271        raise error.InputError(_(b"the name '%s' is reserved") % lbl)
272    for c in (b':', b'\0', b'\n', b'\r'):
273        if c in lbl:
274            raise error.InputError(
275                _(b"%r cannot be used in a name") % pycompat.bytestr(c)
276            )
277    try:
278        int(lbl)
279        raise error.InputError(_(b"cannot use an integer as a name"))
280    except ValueError:
281        pass
282    if lbl.strip() != lbl:
283        raise error.InputError(
284            _(b"leading or trailing whitespace in name %r") % lbl
285        )
286
287
288def checkfilename(f):
289    '''Check that the filename f is an acceptable filename for a tracked file'''
290    if b'\r' in f or b'\n' in f:
291        raise error.InputError(
292            _(b"'\\n' and '\\r' disallowed in filenames: %r")
293            % pycompat.bytestr(f)
294        )
295
296
297def checkportable(ui, f):
298    '''Check if filename f is portable and warn or abort depending on config'''
299    checkfilename(f)
300    abort, warn = checkportabilityalert(ui)
301    if abort or warn:
302        msg = util.checkwinfilename(f)
303        if msg:
304            msg = b"%s: %s" % (msg, procutil.shellquote(f))
305            if abort:
306                raise error.InputError(msg)
307            ui.warn(_(b"warning: %s\n") % msg)
308
309
310def checkportabilityalert(ui):
311    """check if the user's config requests nothing, a warning, or abort for
312    non-portable filenames"""
313    val = ui.config(b'ui', b'portablefilenames')
314    lval = val.lower()
315    bval = stringutil.parsebool(val)
316    abort = pycompat.iswindows or lval == b'abort'
317    warn = bval or lval == b'warn'
318    if bval is None and not (warn or abort or lval == b'ignore'):
319        raise error.ConfigError(
320            _(b"ui.portablefilenames value is invalid ('%s')") % val
321        )
322    return abort, warn
323
324
325class casecollisionauditor(object):
326    def __init__(self, ui, abort, dirstate):
327        self._ui = ui
328        self._abort = abort
329        allfiles = b'\0'.join(dirstate)
330        self._loweredfiles = set(encoding.lower(allfiles).split(b'\0'))
331        self._dirstate = dirstate
332        # The purpose of _newfiles is so that we don't complain about
333        # case collisions if someone were to call this object with the
334        # same filename twice.
335        self._newfiles = set()
336
337    def __call__(self, f):
338        if f in self._newfiles:
339            return
340        fl = encoding.lower(f)
341        if fl in self._loweredfiles and f not in self._dirstate:
342            msg = _(b'possible case-folding collision for %s') % f
343            if self._abort:
344                raise error.Abort(msg)
345            self._ui.warn(_(b"warning: %s\n") % msg)
346        self._loweredfiles.add(fl)
347        self._newfiles.add(f)
348
349
350def filteredhash(repo, maxrev):
351    """build hash of filtered revisions in the current repoview.
352
353    Multiple caches perform up-to-date validation by checking that the
354    tiprev and tipnode stored in the cache file match the current repository.
355    However, this is not sufficient for validating repoviews because the set
356    of revisions in the view may change without the repository tiprev and
357    tipnode changing.
358
359    This function hashes all the revs filtered from the view and returns
360    that SHA-1 digest.
361    """
362    cl = repo.changelog
363    if not cl.filteredrevs:
364        return None
365    key = cl._filteredrevs_hashcache.get(maxrev)
366    if not key:
367        revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
368        if revs:
369            s = hashutil.sha1()
370            for rev in revs:
371                s.update(b'%d;' % rev)
372            key = s.digest()
373            cl._filteredrevs_hashcache[maxrev] = key
374    return key
375
376
377def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
378    """yield every hg repository under path, always recursively.
379    The recurse flag will only control recursion into repo working dirs"""
380
381    def errhandler(err):
382        if err.filename == path:
383            raise err
384
385    samestat = getattr(os.path, 'samestat', None)
386    if followsym and samestat is not None:
387
388        def adddir(dirlst, dirname):
389            dirstat = os.stat(dirname)
390            match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
391            if not match:
392                dirlst.append(dirstat)
393            return not match
394
395    else:
396        followsym = False
397
398    if (seen_dirs is None) and followsym:
399        seen_dirs = []
400        adddir(seen_dirs, path)
401    for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
402        dirs.sort()
403        if b'.hg' in dirs:
404            yield root  # found a repository
405            qroot = os.path.join(root, b'.hg', b'patches')
406            if os.path.isdir(os.path.join(qroot, b'.hg')):
407                yield qroot  # we have a patch queue repo here
408            if recurse:
409                # avoid recursing inside the .hg directory
410                dirs.remove(b'.hg')
411            else:
412                dirs[:] = []  # don't descend further
413        elif followsym:
414            newdirs = []
415            for d in dirs:
416                fname = os.path.join(root, d)
417                if adddir(seen_dirs, fname):
418                    if os.path.islink(fname):
419                        for hgname in walkrepos(fname, True, seen_dirs):
420                            yield hgname
421                    else:
422                        newdirs.append(d)
423            dirs[:] = newdirs
424
425
426def binnode(ctx):
427    """Return binary node id for a given basectx"""
428    node = ctx.node()
429    if node is None:
430        return ctx.repo().nodeconstants.wdirid
431    return node
432
433
434def intrev(ctx):
435    """Return integer for a given basectx that can be used in comparison or
436    arithmetic operation"""
437    rev = ctx.rev()
438    if rev is None:
439        return wdirrev
440    return rev
441
442
443def formatchangeid(ctx):
444    """Format changectx as '{rev}:{node|formatnode}', which is the default
445    template provided by logcmdutil.changesettemplater"""
446    repo = ctx.repo()
447    return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
448
449
450def formatrevnode(ui, rev, node):
451    """Format given revision and node depending on the current verbosity"""
452    if ui.debugflag:
453        hexfunc = hex
454    else:
455        hexfunc = short
456    return b'%d:%s' % (rev, hexfunc(node))
457
458
459def resolvehexnodeidprefix(repo, prefix):
460    if prefix.startswith(b'x'):
461        prefix = prefix[1:]
462    try:
463        # Uses unfiltered repo because it's faster when prefix is ambiguous/
464        # This matches the shortesthexnodeidprefix() function below.
465        node = repo.unfiltered().changelog._partialmatch(prefix)
466    except error.AmbiguousPrefixLookupError:
467        revset = repo.ui.config(
468            b'experimental', b'revisions.disambiguatewithin'
469        )
470        if revset:
471            # Clear config to avoid infinite recursion
472            configoverrides = {
473                (b'experimental', b'revisions.disambiguatewithin'): None
474            }
475            with repo.ui.configoverride(configoverrides):
476                revs = repo.anyrevs([revset], user=True)
477                matches = []
478                for rev in revs:
479                    node = repo.changelog.node(rev)
480                    if hex(node).startswith(prefix):
481                        matches.append(node)
482                if len(matches) == 1:
483                    return matches[0]
484        raise
485    if node is None:
486        return
487    repo.changelog.rev(node)  # make sure node isn't filtered
488    return node
489
490
491def mayberevnum(repo, prefix):
492    """Checks if the given prefix may be mistaken for a revision number"""
493    try:
494        i = int(prefix)
495        # if we are a pure int, then starting with zero will not be
496        # confused as a rev; or, obviously, if the int is larger
497        # than the value of the tip rev. We still need to disambiguate if
498        # prefix == '0', since that *is* a valid revnum.
499        if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
500            return False
501        return True
502    except ValueError:
503        return False
504
505
506def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
507    """Find the shortest unambiguous prefix that matches hexnode.
508
509    If "cache" is not None, it must be a dictionary that can be used for
510    caching between calls to this method.
511    """
512    # _partialmatch() of filtered changelog could take O(len(repo)) time,
513    # which would be unacceptably slow. so we look for hash collision in
514    # unfiltered space, which means some hashes may be slightly longer.
515
516    minlength = max(minlength, 1)
517
518    def disambiguate(prefix):
519        """Disambiguate against revnums."""
520        if repo.ui.configbool(b'experimental', b'revisions.prefixhexnode'):
521            if mayberevnum(repo, prefix):
522                return b'x' + prefix
523            else:
524                return prefix
525
526        hexnode = hex(node)
527        for length in range(len(prefix), len(hexnode) + 1):
528            prefix = hexnode[:length]
529            if not mayberevnum(repo, prefix):
530                return prefix
531
532    cl = repo.unfiltered().changelog
533    revset = repo.ui.config(b'experimental', b'revisions.disambiguatewithin')
534    if revset:
535        revs = None
536        if cache is not None:
537            revs = cache.get(b'disambiguationrevset')
538        if revs is None:
539            revs = repo.anyrevs([revset], user=True)
540            if cache is not None:
541                cache[b'disambiguationrevset'] = revs
542        if cl.rev(node) in revs:
543            hexnode = hex(node)
544            nodetree = None
545            if cache is not None:
546                nodetree = cache.get(b'disambiguationnodetree')
547            if not nodetree:
548                if util.safehasattr(parsers, 'nodetree'):
549                    # The CExt is the only implementation to provide a nodetree
550                    # class so far.
551                    index = cl.index
552                    if util.safehasattr(index, 'get_cindex'):
553                        # the rust wrapped need to give access to its internal index
554                        index = index.get_cindex()
555                    nodetree = parsers.nodetree(index, len(revs))
556                    for r in revs:
557                        nodetree.insert(r)
558                    if cache is not None:
559                        cache[b'disambiguationnodetree'] = nodetree
560            if nodetree is not None:
561                length = max(nodetree.shortest(node), minlength)
562                prefix = hexnode[:length]
563                return disambiguate(prefix)
564            for length in range(minlength, len(hexnode) + 1):
565                matches = []
566                prefix = hexnode[:length]
567                for rev in revs:
568                    otherhexnode = repo[rev].hex()
569                    if prefix == otherhexnode[:length]:
570                        matches.append(otherhexnode)
571                if len(matches) == 1:
572                    return disambiguate(prefix)
573
574    try:
575        return disambiguate(cl.shortest(node, minlength))
576    except error.LookupError:
577        raise error.RepoLookupError()
578
579
580def isrevsymbol(repo, symbol):
581    """Checks if a symbol exists in the repo.
582
583    See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
584    symbol is an ambiguous nodeid prefix.
585    """
586    try:
587        revsymbol(repo, symbol)
588        return True
589    except error.RepoLookupError:
590        return False
591
592
593def revsymbol(repo, symbol):
594    """Returns a context given a single revision symbol (as string).
595
596    This is similar to revsingle(), but accepts only a single revision symbol,
597    i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
598    not "max(public())".
599    """
600    if not isinstance(symbol, bytes):
601        msg = (
602            b"symbol (%s of type %s) was not a string, did you mean "
603            b"repo[symbol]?" % (symbol, type(symbol))
604        )
605        raise error.ProgrammingError(msg)
606    try:
607        if symbol in (b'.', b'tip', b'null'):
608            return repo[symbol]
609
610        try:
611            r = int(symbol)
612            if b'%d' % r != symbol:
613                raise ValueError
614            l = len(repo.changelog)
615            if r < 0:
616                r += l
617            if r < 0 or r >= l and r != wdirrev:
618                raise ValueError
619            return repo[r]
620        except error.FilteredIndexError:
621            raise
622        except (ValueError, OverflowError, IndexError):
623            pass
624
625        if len(symbol) == 2 * repo.nodeconstants.nodelen:
626            try:
627                node = bin(symbol)
628                rev = repo.changelog.rev(node)
629                return repo[rev]
630            except error.FilteredLookupError:
631                raise
632            except (TypeError, LookupError):
633                pass
634
635        # look up bookmarks through the name interface
636        try:
637            node = repo.names.singlenode(repo, symbol)
638            rev = repo.changelog.rev(node)
639            return repo[rev]
640        except KeyError:
641            pass
642
643        node = resolvehexnodeidprefix(repo, symbol)
644        if node is not None:
645            rev = repo.changelog.rev(node)
646            return repo[rev]
647
648        raise error.RepoLookupError(_(b"unknown revision '%s'") % symbol)
649
650    except error.WdirUnsupported:
651        return repo[None]
652    except (
653        error.FilteredIndexError,
654        error.FilteredLookupError,
655        error.FilteredRepoLookupError,
656    ):
657        raise _filterederror(repo, symbol)
658
659
660def _filterederror(repo, changeid):
661    """build an exception to be raised about a filtered changeid
662
663    This is extracted in a function to help extensions (eg: evolve) to
664    experiment with various message variants."""
665    if repo.filtername.startswith(b'visible'):
666
667        # Check if the changeset is obsolete
668        unfilteredrepo = repo.unfiltered()
669        ctx = revsymbol(unfilteredrepo, changeid)
670
671        # If the changeset is obsolete, enrich the message with the reason
672        # that made this changeset not visible
673        if ctx.obsolete():
674            msg = obsutil._getfilteredreason(repo, changeid, ctx)
675        else:
676            msg = _(b"hidden revision '%s'") % changeid
677
678        hint = _(b'use --hidden to access hidden revisions')
679
680        return error.FilteredRepoLookupError(msg, hint=hint)
681    msg = _(b"filtered revision '%s' (not in '%s' subset)")
682    msg %= (changeid, repo.filtername)
683    return error.FilteredRepoLookupError(msg)
684
685
686def revsingle(repo, revspec, default=b'.', localalias=None):
687    if not revspec and revspec != 0:
688        return repo[default]
689
690    l = revrange(repo, [revspec], localalias=localalias)
691    if not l:
692        raise error.InputError(_(b'empty revision set'))
693    return repo[l.last()]
694
695
696def _pairspec(revspec):
697    tree = revsetlang.parse(revspec)
698    return tree and tree[0] in (
699        b'range',
700        b'rangepre',
701        b'rangepost',
702        b'rangeall',
703    )
704
705
706def revpair(repo, revs):
707    if not revs:
708        return repo[b'.'], repo[None]
709
710    l = revrange(repo, revs)
711
712    if not l:
713        raise error.InputError(_(b'empty revision range'))
714
715    first = l.first()
716    second = l.last()
717
718    if (
719        first == second
720        and len(revs) >= 2
721        and not all(revrange(repo, [r]) for r in revs)
722    ):
723        raise error.InputError(_(b'empty revision on one side of range'))
724
725    # if top-level is range expression, the result must always be a pair
726    if first == second and len(revs) == 1 and not _pairspec(revs[0]):
727        return repo[first], repo[None]
728
729    return repo[first], repo[second]
730
731
732def revrange(repo, specs, localalias=None):
733    """Execute 1 to many revsets and return the union.
734
735    This is the preferred mechanism for executing revsets using user-specified
736    config options, such as revset aliases.
737
738    The revsets specified by ``specs`` will be executed via a chained ``OR``
739    expression. If ``specs`` is empty, an empty result is returned.
740
741    ``specs`` can contain integers, in which case they are assumed to be
742    revision numbers.
743
744    It is assumed the revsets are already formatted. If you have arguments
745    that need to be expanded in the revset, call ``revsetlang.formatspec()``
746    and pass the result as an element of ``specs``.
747
748    Specifying a single revset is allowed.
749
750    Returns a ``smartset.abstractsmartset`` which is a list-like interface over
751    integer revisions.
752    """
753    allspecs = []
754    for spec in specs:
755        if isinstance(spec, int):
756            spec = revsetlang.formatspec(b'%d', spec)
757        allspecs.append(spec)
758    return repo.anyrevs(allspecs, user=True, localalias=localalias)
759
760
761def increasingwindows(windowsize=8, sizelimit=512):
762    while True:
763        yield windowsize
764        if windowsize < sizelimit:
765            windowsize *= 2
766
767
768def walkchangerevs(repo, revs, makefilematcher, prepare):
769    """Iterate over files and the revs in a "windowed" way.
770
771    Callers most commonly need to iterate backwards over the history
772    in which they are interested. Doing so has awful (quadratic-looking)
773    performance, so we use iterators in a "windowed" way.
774
775    We walk a window of revisions in the desired order.  Within the
776    window, we first walk forwards to gather data, then in the desired
777    order (usually backwards) to display it.
778
779    This function returns an iterator yielding contexts. Before
780    yielding each context, the iterator will first call the prepare
781    function on each context in the window in forward order."""
782
783    if not revs:
784        return []
785    change = repo.__getitem__
786
787    def iterate():
788        it = iter(revs)
789        stopiteration = False
790        for windowsize in increasingwindows():
791            nrevs = []
792            for i in pycompat.xrange(windowsize):
793                rev = next(it, None)
794                if rev is None:
795                    stopiteration = True
796                    break
797                nrevs.append(rev)
798            for rev in sorted(nrevs):
799                ctx = change(rev)
800                prepare(ctx, makefilematcher(ctx))
801            for rev in nrevs:
802                yield change(rev)
803
804            if stopiteration:
805                break
806
807    return iterate()
808
809
810def meaningfulparents(repo, ctx):
811    """Return list of meaningful (or all if debug) parentrevs for rev.
812
813    For merges (two non-nullrev revisions) both parents are meaningful.
814    Otherwise the first parent revision is considered meaningful if it
815    is not the preceding revision.
816    """
817    parents = ctx.parents()
818    if len(parents) > 1:
819        return parents
820    if repo.ui.debugflag:
821        return [parents[0], repo[nullrev]]
822    if parents[0].rev() >= intrev(ctx) - 1:
823        return []
824    return parents
825
826
827def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
828    """Return a function that produced paths for presenting to the user.
829
830    The returned function takes a repo-relative path and produces a path
831    that can be presented in the UI.
832
833    Depending on the value of ui.relative-paths, either a repo-relative or
834    cwd-relative path will be produced.
835
836    legacyrelativevalue is the value to use if ui.relative-paths=legacy
837
838    If forcerelativevalue is not None, then that value will be used regardless
839    of what ui.relative-paths is set to.
840    """
841    if forcerelativevalue is not None:
842        relative = forcerelativevalue
843    else:
844        config = repo.ui.config(b'ui', b'relative-paths')
845        if config == b'legacy':
846            relative = legacyrelativevalue
847        else:
848            relative = stringutil.parsebool(config)
849            if relative is None:
850                raise error.ConfigError(
851                    _(b"ui.relative-paths is not a boolean ('%s')") % config
852                )
853
854    if relative:
855        cwd = repo.getcwd()
856        if cwd != b'':
857            # this branch would work even if cwd == b'' (ie cwd = repo
858            # root), but its generality makes the returned function slower
859            pathto = repo.pathto
860            return lambda f: pathto(f, cwd)
861    if repo.ui.configbool(b'ui', b'slash'):
862        return lambda f: f
863    else:
864        return util.localpath
865
866
867def subdiruipathfn(subpath, uipathfn):
868    '''Create a new uipathfn that treats the file as relative to subpath.'''
869    return lambda f: uipathfn(posixpath.join(subpath, f))
870
871
872def anypats(pats, opts):
873    """Checks if any patterns, including --include and --exclude were given.
874
875    Some commands (e.g. addremove) use this condition for deciding whether to
876    print absolute or relative paths.
877    """
878    return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
879
880
881def expandpats(pats):
882    """Expand bare globs when running on windows.
883    On posix we assume it already has already been done by sh."""
884    if not util.expandglobs:
885        return list(pats)
886    ret = []
887    for kindpat in pats:
888        kind, pat = matchmod._patsplit(kindpat, None)
889        if kind is None:
890            try:
891                globbed = glob.glob(pat)
892            except re.error:
893                globbed = [pat]
894            if globbed:
895                ret.extend(globbed)
896                continue
897        ret.append(kindpat)
898    return ret
899
900
901def matchandpats(
902    ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
903):
904    """Return a matcher and the patterns that were used.
905    The matcher will warn about bad matches, unless an alternate badfn callback
906    is provided."""
907    if opts is None:
908        opts = {}
909    if not globbed and default == b'relpath':
910        pats = expandpats(pats or [])
911
912    uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
913
914    def bad(f, msg):
915        ctx.repo().ui.warn(b"%s: %s\n" % (uipathfn(f), msg))
916
917    if badfn is None:
918        badfn = bad
919
920    m = ctx.match(
921        pats,
922        opts.get(b'include'),
923        opts.get(b'exclude'),
924        default,
925        listsubrepos=opts.get(b'subrepos'),
926        badfn=badfn,
927    )
928
929    if m.always():
930        pats = []
931    return m, pats
932
933
934def match(
935    ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
936):
937    '''Return a matcher that will warn about bad matches.'''
938    return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
939
940
941def matchall(repo):
942    '''Return a matcher that will efficiently match everything.'''
943    return matchmod.always()
944
945
946def matchfiles(repo, files, badfn=None):
947    '''Return a matcher that will efficiently match exactly these files.'''
948    return matchmod.exact(files, badfn=badfn)
949
950
951def parsefollowlinespattern(repo, rev, pat, msg):
952    """Return a file name from `pat` pattern suitable for usage in followlines
953    logic.
954    """
955    if not matchmod.patkind(pat):
956        return pathutil.canonpath(repo.root, repo.getcwd(), pat)
957    else:
958        ctx = repo[rev]
959        m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
960        files = [f for f in ctx if m(f)]
961        if len(files) != 1:
962            raise error.ParseError(msg)
963        return files[0]
964
965
966def getorigvfs(ui, repo):
967    """return a vfs suitable to save 'orig' file
968
969    return None if no special directory is configured"""
970    origbackuppath = ui.config(b'ui', b'origbackuppath')
971    if not origbackuppath:
972        return None
973    return vfs.vfs(repo.wvfs.join(origbackuppath))
974
975
976def backuppath(ui, repo, filepath):
977    """customize where working copy backup files (.orig files) are created
978
979    Fetch user defined path from config file: [ui] origbackuppath = <path>
980    Fall back to default (filepath with .orig suffix) if not specified
981
982    filepath is repo-relative
983
984    Returns an absolute path
985    """
986    origvfs = getorigvfs(ui, repo)
987    if origvfs is None:
988        return repo.wjoin(filepath + b".orig")
989
990    origbackupdir = origvfs.dirname(filepath)
991    if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
992        ui.note(_(b'creating directory: %s\n') % origvfs.join(origbackupdir))
993
994        # Remove any files that conflict with the backup file's path
995        for f in reversed(list(pathutil.finddirs(filepath))):
996            if origvfs.isfileorlink(f):
997                ui.note(_(b'removing conflicting file: %s\n') % origvfs.join(f))
998                origvfs.unlink(f)
999                break
1000
1001        origvfs.makedirs(origbackupdir)
1002
1003    if origvfs.isdir(filepath) and not origvfs.islink(filepath):
1004        ui.note(
1005            _(b'removing conflicting directory: %s\n') % origvfs.join(filepath)
1006        )
1007        origvfs.rmtree(filepath, forcibly=True)
1008
1009    return origvfs.join(filepath)
1010
1011
1012class _containsnode(object):
1013    """proxy __contains__(node) to container.__contains__ which accepts revs"""
1014
1015    def __init__(self, repo, revcontainer):
1016        self._torev = repo.changelog.rev
1017        self._revcontains = revcontainer.__contains__
1018
1019    def __contains__(self, node):
1020        return self._revcontains(self._torev(node))
1021
1022
1023def cleanupnodes(
1024    repo,
1025    replacements,
1026    operation,
1027    moves=None,
1028    metadata=None,
1029    fixphase=False,
1030    targetphase=None,
1031    backup=True,
1032):
1033    """do common cleanups when old nodes are replaced by new nodes
1034
1035    That includes writing obsmarkers or stripping nodes, and moving bookmarks.
1036    (we might also want to move working directory parent in the future)
1037
1038    By default, bookmark moves are calculated automatically from 'replacements',
1039    but 'moves' can be used to override that. Also, 'moves' may include
1040    additional bookmark moves that should not have associated obsmarkers.
1041
1042    replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
1043    have replacements. operation is a string, like "rebase".
1044
1045    metadata is dictionary containing metadata to be stored in obsmarker if
1046    obsolescence is enabled.
1047    """
1048    assert fixphase or targetphase is None
1049    if not replacements and not moves:
1050        return
1051
1052    # translate mapping's other forms
1053    if not util.safehasattr(replacements, b'items'):
1054        replacements = {(n,): () for n in replacements}
1055    else:
1056        # upgrading non tuple "source" to tuple ones for BC
1057        repls = {}
1058        for key, value in replacements.items():
1059            if not isinstance(key, tuple):
1060                key = (key,)
1061            repls[key] = value
1062        replacements = repls
1063
1064    # Unfiltered repo is needed since nodes in replacements might be hidden.
1065    unfi = repo.unfiltered()
1066
1067    # Calculate bookmark movements
1068    if moves is None:
1069        moves = {}
1070        for oldnodes, newnodes in replacements.items():
1071            for oldnode in oldnodes:
1072                if oldnode in moves:
1073                    continue
1074                if len(newnodes) > 1:
1075                    # usually a split, take the one with biggest rev number
1076                    newnode = next(unfi.set(b'max(%ln)', newnodes)).node()
1077                elif len(newnodes) == 0:
1078                    # move bookmark backwards
1079                    allreplaced = []
1080                    for rep in replacements:
1081                        allreplaced.extend(rep)
1082                    roots = list(
1083                        unfi.set(b'max((::%n) - %ln)', oldnode, allreplaced)
1084                    )
1085                    if roots:
1086                        newnode = roots[0].node()
1087                    else:
1088                        newnode = repo.nullid
1089                else:
1090                    newnode = newnodes[0]
1091                moves[oldnode] = newnode
1092
1093    allnewnodes = [n for ns in replacements.values() for n in ns]
1094    toretract = {}
1095    toadvance = {}
1096    if fixphase:
1097        precursors = {}
1098        for oldnodes, newnodes in replacements.items():
1099            for oldnode in oldnodes:
1100                for newnode in newnodes:
1101                    precursors.setdefault(newnode, []).append(oldnode)
1102
1103        allnewnodes.sort(key=lambda n: unfi[n].rev())
1104        newphases = {}
1105
1106        def phase(ctx):
1107            return newphases.get(ctx.node(), ctx.phase())
1108
1109        for newnode in allnewnodes:
1110            ctx = unfi[newnode]
1111            parentphase = max(phase(p) for p in ctx.parents())
1112            if targetphase is None:
1113                oldphase = max(
1114                    unfi[oldnode].phase() for oldnode in precursors[newnode]
1115                )
1116                newphase = max(oldphase, parentphase)
1117            else:
1118                newphase = max(targetphase, parentphase)
1119            newphases[newnode] = newphase
1120            if newphase > ctx.phase():
1121                toretract.setdefault(newphase, []).append(newnode)
1122            elif newphase < ctx.phase():
1123                toadvance.setdefault(newphase, []).append(newnode)
1124
1125    with repo.transaction(b'cleanup') as tr:
1126        # Move bookmarks
1127        bmarks = repo._bookmarks
1128        bmarkchanges = []
1129        for oldnode, newnode in moves.items():
1130            oldbmarks = repo.nodebookmarks(oldnode)
1131            if not oldbmarks:
1132                continue
1133            from . import bookmarks  # avoid import cycle
1134
1135            repo.ui.debug(
1136                b'moving bookmarks %r from %s to %s\n'
1137                % (
1138                    pycompat.rapply(pycompat.maybebytestr, oldbmarks),
1139                    hex(oldnode),
1140                    hex(newnode),
1141                )
1142            )
1143            # Delete divergent bookmarks being parents of related newnodes
1144            deleterevs = repo.revs(
1145                b'parents(roots(%ln & (::%n))) - parents(%n)',
1146                allnewnodes,
1147                newnode,
1148                oldnode,
1149            )
1150            deletenodes = _containsnode(repo, deleterevs)
1151            for name in oldbmarks:
1152                bmarkchanges.append((name, newnode))
1153                for b in bookmarks.divergent2delete(repo, deletenodes, name):
1154                    bmarkchanges.append((b, None))
1155
1156        if bmarkchanges:
1157            bmarks.applychanges(repo, tr, bmarkchanges)
1158
1159        for phase, nodes in toretract.items():
1160            phases.retractboundary(repo, tr, phase, nodes)
1161        for phase, nodes in toadvance.items():
1162            phases.advanceboundary(repo, tr, phase, nodes)
1163
1164        mayusearchived = repo.ui.config(b'experimental', b'cleanup-as-archived')
1165        # Obsolete or strip nodes
1166        if obsolete.isenabled(repo, obsolete.createmarkersopt):
1167            # If a node is already obsoleted, and we want to obsolete it
1168            # without a successor, skip that obssolete request since it's
1169            # unnecessary. That's the "if s or not isobs(n)" check below.
1170            # Also sort the node in topology order, that might be useful for
1171            # some obsstore logic.
1172            # NOTE: the sorting might belong to createmarkers.
1173            torev = unfi.changelog.rev
1174            sortfunc = lambda ns: torev(ns[0][0])
1175            rels = []
1176            for ns, s in sorted(replacements.items(), key=sortfunc):
1177                rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1178                rels.append(rel)
1179            if rels:
1180                obsolete.createmarkers(
1181                    repo, rels, operation=operation, metadata=metadata
1182                )
1183        elif phases.supportinternal(repo) and mayusearchived:
1184            # this assume we do not have "unstable" nodes above the cleaned ones
1185            allreplaced = set()
1186            for ns in replacements.keys():
1187                allreplaced.update(ns)
1188            if backup:
1189                from . import repair  # avoid import cycle
1190
1191                node = min(allreplaced, key=repo.changelog.rev)
1192                repair.backupbundle(
1193                    repo, allreplaced, allreplaced, node, operation
1194                )
1195            phases.retractboundary(repo, tr, phases.archived, allreplaced)
1196        else:
1197            from . import repair  # avoid import cycle
1198
1199            tostrip = list(n for ns in replacements for n in ns)
1200            if tostrip:
1201                repair.delayedstrip(
1202                    repo.ui, repo, tostrip, operation, backup=backup
1203                )
1204
1205
1206def addremove(repo, matcher, prefix, uipathfn, opts=None):
1207    if opts is None:
1208        opts = {}
1209    m = matcher
1210    dry_run = opts.get(b'dry_run')
1211    try:
1212        similarity = float(opts.get(b'similarity') or 0)
1213    except ValueError:
1214        raise error.InputError(_(b'similarity must be a number'))
1215    if similarity < 0 or similarity > 100:
1216        raise error.InputError(_(b'similarity must be between 0 and 100'))
1217    similarity /= 100.0
1218
1219    ret = 0
1220
1221    wctx = repo[None]
1222    for subpath in sorted(wctx.substate):
1223        submatch = matchmod.subdirmatcher(subpath, m)
1224        if opts.get(b'subrepos') or m.exact(subpath) or any(submatch.files()):
1225            sub = wctx.sub(subpath)
1226            subprefix = repo.wvfs.reljoin(prefix, subpath)
1227            subuipathfn = subdiruipathfn(subpath, uipathfn)
1228            try:
1229                if sub.addremove(submatch, subprefix, subuipathfn, opts):
1230                    ret = 1
1231            except error.LookupError:
1232                repo.ui.status(
1233                    _(b"skipping missing subrepository: %s\n")
1234                    % uipathfn(subpath)
1235                )
1236
1237    rejected = []
1238
1239    def badfn(f, msg):
1240        if f in m.files():
1241            m.bad(f, msg)
1242        rejected.append(f)
1243
1244    badmatch = matchmod.badmatch(m, badfn)
1245    added, unknown, deleted, removed, forgotten = _interestingfiles(
1246        repo, badmatch
1247    )
1248
1249    unknownset = set(unknown + forgotten)
1250    toprint = unknownset.copy()
1251    toprint.update(deleted)
1252    for abs in sorted(toprint):
1253        if repo.ui.verbose or not m.exact(abs):
1254            if abs in unknownset:
1255                status = _(b'adding %s\n') % uipathfn(abs)
1256                label = b'ui.addremove.added'
1257            else:
1258                status = _(b'removing %s\n') % uipathfn(abs)
1259                label = b'ui.addremove.removed'
1260            repo.ui.status(status, label=label)
1261
1262    renames = _findrenames(
1263        repo, m, added + unknown, removed + deleted, similarity, uipathfn
1264    )
1265
1266    if not dry_run:
1267        _markchanges(repo, unknown + forgotten, deleted, renames)
1268
1269    for f in rejected:
1270        if f in m.files():
1271            return 1
1272    return ret
1273
1274
1275def marktouched(repo, files, similarity=0.0):
1276    """Assert that files have somehow been operated upon. files are relative to
1277    the repo root."""
1278    m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1279    rejected = []
1280
1281    added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1282
1283    if repo.ui.verbose:
1284        unknownset = set(unknown + forgotten)
1285        toprint = unknownset.copy()
1286        toprint.update(deleted)
1287        for abs in sorted(toprint):
1288            if abs in unknownset:
1289                status = _(b'adding %s\n') % abs
1290            else:
1291                status = _(b'removing %s\n') % abs
1292            repo.ui.status(status)
1293
1294    # TODO: We should probably have the caller pass in uipathfn and apply it to
1295    # the messages above too. legacyrelativevalue=True is consistent with how
1296    # it used to work.
1297    uipathfn = getuipathfn(repo, legacyrelativevalue=True)
1298    renames = _findrenames(
1299        repo, m, added + unknown, removed + deleted, similarity, uipathfn
1300    )
1301
1302    _markchanges(repo, unknown + forgotten, deleted, renames)
1303
1304    for f in rejected:
1305        if f in m.files():
1306            return 1
1307    return 0
1308
1309
1310def _interestingfiles(repo, matcher):
1311    """Walk dirstate with matcher, looking for files that addremove would care
1312    about.
1313
1314    This is different from dirstate.status because it doesn't care about
1315    whether files are modified or clean."""
1316    added, unknown, deleted, removed, forgotten = [], [], [], [], []
1317    audit_path = pathutil.pathauditor(repo.root, cached=True)
1318
1319    ctx = repo[None]
1320    dirstate = repo.dirstate
1321    matcher = repo.narrowmatch(matcher, includeexact=True)
1322    walkresults = dirstate.walk(
1323        matcher,
1324        subrepos=sorted(ctx.substate),
1325        unknown=True,
1326        ignored=False,
1327        full=False,
1328    )
1329    for abs, st in pycompat.iteritems(walkresults):
1330        entry = dirstate.get_entry(abs)
1331        if (not entry.any_tracked) and audit_path.check(abs):
1332            unknown.append(abs)
1333        elif (not entry.removed) and not st:
1334            deleted.append(abs)
1335        elif entry.removed and st:
1336            forgotten.append(abs)
1337        # for finding renames
1338        elif entry.removed and not st:
1339            removed.append(abs)
1340        elif entry.added:
1341            added.append(abs)
1342
1343    return added, unknown, deleted, removed, forgotten
1344
1345
1346def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
1347    '''Find renames from removed files to added ones.'''
1348    renames = {}
1349    if similarity > 0:
1350        for old, new, score in similar.findrenames(
1351            repo, added, removed, similarity
1352        ):
1353            if (
1354                repo.ui.verbose
1355                or not matcher.exact(old)
1356                or not matcher.exact(new)
1357            ):
1358                repo.ui.status(
1359                    _(
1360                        b'recording removal of %s as rename to %s '
1361                        b'(%d%% similar)\n'
1362                    )
1363                    % (uipathfn(old), uipathfn(new), score * 100)
1364                )
1365            renames[new] = old
1366    return renames
1367
1368
1369def _markchanges(repo, unknown, deleted, renames):
1370    """Marks the files in unknown as added, the files in deleted as removed,
1371    and the files in renames as copied."""
1372    wctx = repo[None]
1373    with repo.wlock():
1374        wctx.forget(deleted)
1375        wctx.add(unknown)
1376        for new, old in pycompat.iteritems(renames):
1377            wctx.copy(old, new)
1378
1379
1380def getrenamedfn(repo, endrev=None):
1381    if copiesmod.usechangesetcentricalgo(repo):
1382
1383        def getrenamed(fn, rev):
1384            ctx = repo[rev]
1385            p1copies = ctx.p1copies()
1386            if fn in p1copies:
1387                return p1copies[fn]
1388            p2copies = ctx.p2copies()
1389            if fn in p2copies:
1390                return p2copies[fn]
1391            return None
1392
1393        return getrenamed
1394
1395    rcache = {}
1396    if endrev is None:
1397        endrev = len(repo)
1398
1399    def getrenamed(fn, rev):
1400        """looks up all renames for a file (up to endrev) the first
1401        time the file is given. It indexes on the changerev and only
1402        parses the manifest if linkrev != changerev.
1403        Returns rename info for fn at changerev rev."""
1404        if fn not in rcache:
1405            rcache[fn] = {}
1406            fl = repo.file(fn)
1407            for i in fl:
1408                lr = fl.linkrev(i)
1409                renamed = fl.renamed(fl.node(i))
1410                rcache[fn][lr] = renamed and renamed[0]
1411                if lr >= endrev:
1412                    break
1413        if rev in rcache[fn]:
1414            return rcache[fn][rev]
1415
1416        # If linkrev != rev (i.e. rev not found in rcache) fallback to
1417        # filectx logic.
1418        try:
1419            return repo[rev][fn].copysource()
1420        except error.LookupError:
1421            return None
1422
1423    return getrenamed
1424
1425
1426def getcopiesfn(repo, endrev=None):
1427    if copiesmod.usechangesetcentricalgo(repo):
1428
1429        def copiesfn(ctx):
1430            if ctx.p2copies():
1431                allcopies = ctx.p1copies().copy()
1432                # There should be no overlap
1433                allcopies.update(ctx.p2copies())
1434                return sorted(allcopies.items())
1435            else:
1436                return sorted(ctx.p1copies().items())
1437
1438    else:
1439        getrenamed = getrenamedfn(repo, endrev)
1440
1441        def copiesfn(ctx):
1442            copies = []
1443            for fn in ctx.files():
1444                rename = getrenamed(fn, ctx.rev())
1445                if rename:
1446                    copies.append((fn, rename))
1447            return copies
1448
1449    return copiesfn
1450
1451
1452def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1453    """Update the dirstate to reflect the intent of copying src to dst. For
1454    different reasons it might not end with dst being marked as copied from src.
1455    """
1456    origsrc = repo.dirstate.copied(src) or src
1457    if dst == origsrc:  # copying back a copy?
1458        entry = repo.dirstate.get_entry(dst)
1459        if (entry.added or not entry.tracked) and not dryrun:
1460            repo.dirstate.set_tracked(dst)
1461    else:
1462        if repo.dirstate.get_entry(origsrc).added and origsrc == src:
1463            if not ui.quiet:
1464                ui.warn(
1465                    _(
1466                        b"%s has not been committed yet, so no copy "
1467                        b"data will be stored for %s.\n"
1468                    )
1469                    % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
1470                )
1471            if not repo.dirstate.get_entry(dst).tracked and not dryrun:
1472                wctx.add([dst])
1473        elif not dryrun:
1474            wctx.copy(origsrc, dst)
1475
1476
1477def movedirstate(repo, newctx, match=None):
1478    """Move the dirstate to newctx and adjust it as necessary.
1479
1480    A matcher can be provided as an optimization. It is probably a bug to pass
1481    a matcher that doesn't match all the differences between the parent of the
1482    working copy and newctx.
1483    """
1484    oldctx = repo[b'.']
1485    ds = repo.dirstate
1486    copies = dict(ds.copies())
1487    ds.setparents(newctx.node(), repo.nullid)
1488    s = newctx.status(oldctx, match=match)
1489
1490    for f in s.modified:
1491        ds.update_file_p1(f, p1_tracked=True)
1492
1493    for f in s.added:
1494        ds.update_file_p1(f, p1_tracked=False)
1495
1496    for f in s.removed:
1497        ds.update_file_p1(f, p1_tracked=True)
1498
1499    # Merge old parent and old working dir copies
1500    oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
1501    oldcopies.update(copies)
1502    copies = {
1503        dst: oldcopies.get(src, src)
1504        for dst, src in pycompat.iteritems(oldcopies)
1505    }
1506    # Adjust the dirstate copies
1507    for dst, src in pycompat.iteritems(copies):
1508        if src not in newctx or dst in newctx or not ds.get_entry(dst).added:
1509            src = None
1510        ds.copy(src, dst)
1511    repo._quick_access_changeid_invalidate()
1512
1513
1514def filterrequirements(requirements):
1515    """filters the requirements into two sets:
1516
1517    wcreq: requirements which should be written in .hg/requires
1518    storereq: which should be written in .hg/store/requires
1519
1520    Returns (wcreq, storereq)
1521    """
1522    if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
1523        wc, store = set(), set()
1524        for r in requirements:
1525            if r in requirementsmod.WORKING_DIR_REQUIREMENTS:
1526                wc.add(r)
1527            else:
1528                store.add(r)
1529        return wc, store
1530    return requirements, None
1531
1532
1533def istreemanifest(repo):
1534    """returns whether the repository is using treemanifest or not"""
1535    return requirementsmod.TREEMANIFEST_REQUIREMENT in repo.requirements
1536
1537
1538def writereporequirements(repo, requirements=None):
1539    """writes requirements for the repo
1540
1541    Requirements are written to .hg/requires and .hg/store/requires based
1542    on whether share-safe mode is enabled and which requirements are wdir
1543    requirements and which are store requirements
1544    """
1545    if requirements:
1546        repo.requirements = requirements
1547    wcreq, storereq = filterrequirements(repo.requirements)
1548    if wcreq is not None:
1549        writerequires(repo.vfs, wcreq)
1550    if storereq is not None:
1551        writerequires(repo.svfs, storereq)
1552    elif repo.ui.configbool(b'format', b'usestore'):
1553        # only remove store requires if we are using store
1554        repo.svfs.tryunlink(b'requires')
1555
1556
1557def writerequires(opener, requirements):
1558    with opener(b'requires', b'w', atomictemp=True) as fp:
1559        for r in sorted(requirements):
1560            fp.write(b"%s\n" % r)
1561
1562
1563class filecachesubentry(object):
1564    def __init__(self, path, stat):
1565        self.path = path
1566        self.cachestat = None
1567        self._cacheable = None
1568
1569        if stat:
1570            self.cachestat = filecachesubentry.stat(self.path)
1571
1572            if self.cachestat:
1573                self._cacheable = self.cachestat.cacheable()
1574            else:
1575                # None means we don't know yet
1576                self._cacheable = None
1577
1578    def refresh(self):
1579        if self.cacheable():
1580            self.cachestat = filecachesubentry.stat(self.path)
1581
1582    def cacheable(self):
1583        if self._cacheable is not None:
1584            return self._cacheable
1585
1586        # we don't know yet, assume it is for now
1587        return True
1588
1589    def changed(self):
1590        # no point in going further if we can't cache it
1591        if not self.cacheable():
1592            return True
1593
1594        newstat = filecachesubentry.stat(self.path)
1595
1596        # we may not know if it's cacheable yet, check again now
1597        if newstat and self._cacheable is None:
1598            self._cacheable = newstat.cacheable()
1599
1600            # check again
1601            if not self._cacheable:
1602                return True
1603
1604        if self.cachestat != newstat:
1605            self.cachestat = newstat
1606            return True
1607        else:
1608            return False
1609
1610    @staticmethod
1611    def stat(path):
1612        try:
1613            return util.cachestat(path)
1614        except OSError as e:
1615            if e.errno != errno.ENOENT:
1616                raise
1617
1618
1619class filecacheentry(object):
1620    def __init__(self, paths, stat=True):
1621        self._entries = []
1622        for path in paths:
1623            self._entries.append(filecachesubentry(path, stat))
1624
1625    def changed(self):
1626        '''true if any entry has changed'''
1627        for entry in self._entries:
1628            if entry.changed():
1629                return True
1630        return False
1631
1632    def refresh(self):
1633        for entry in self._entries:
1634            entry.refresh()
1635
1636
1637class filecache(object):
1638    """A property like decorator that tracks files under .hg/ for updates.
1639
1640    On first access, the files defined as arguments are stat()ed and the
1641    results cached. The decorated function is called. The results are stashed
1642    away in a ``_filecache`` dict on the object whose method is decorated.
1643
1644    On subsequent access, the cached result is used as it is set to the
1645    instance dictionary.
1646
1647    On external property set/delete operations, the caller must update the
1648    corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1649    instead of directly setting <attr>.
1650
1651    When using the property API, the cached data is always used if available.
1652    No stat() is performed to check if the file has changed.
1653
1654    Others can muck about with the state of the ``_filecache`` dict. e.g. they
1655    can populate an entry before the property's getter is called. In this case,
1656    entries in ``_filecache`` will be used during property operations,
1657    if available. If the underlying file changes, it is up to external callers
1658    to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1659    method result as well as possibly calling ``del obj._filecache[attr]`` to
1660    remove the ``filecacheentry``.
1661    """
1662
1663    def __init__(self, *paths):
1664        self.paths = paths
1665
1666    def tracked_paths(self, obj):
1667        return [self.join(obj, path) for path in self.paths]
1668
1669    def join(self, obj, fname):
1670        """Used to compute the runtime path of a cached file.
1671
1672        Users should subclass filecache and provide their own version of this
1673        function to call the appropriate join function on 'obj' (an instance
1674        of the class that its member function was decorated).
1675        """
1676        raise NotImplementedError
1677
1678    def __call__(self, func):
1679        self.func = func
1680        self.sname = func.__name__
1681        self.name = pycompat.sysbytes(self.sname)
1682        return self
1683
1684    def __get__(self, obj, type=None):
1685        # if accessed on the class, return the descriptor itself.
1686        if obj is None:
1687            return self
1688
1689        assert self.sname not in obj.__dict__
1690
1691        entry = obj._filecache.get(self.name)
1692
1693        if entry:
1694            if entry.changed():
1695                entry.obj = self.func(obj)
1696        else:
1697            paths = self.tracked_paths(obj)
1698
1699            # We stat -before- creating the object so our cache doesn't lie if
1700            # a writer modified between the time we read and stat
1701            entry = filecacheentry(paths, True)
1702            entry.obj = self.func(obj)
1703
1704            obj._filecache[self.name] = entry
1705
1706        obj.__dict__[self.sname] = entry.obj
1707        return entry.obj
1708
1709    # don't implement __set__(), which would make __dict__ lookup as slow as
1710    # function call.
1711
1712    def set(self, obj, value):
1713        if self.name not in obj._filecache:
1714            # we add an entry for the missing value because X in __dict__
1715            # implies X in _filecache
1716            paths = self.tracked_paths(obj)
1717            ce = filecacheentry(paths, False)
1718            obj._filecache[self.name] = ce
1719        else:
1720            ce = obj._filecache[self.name]
1721
1722        ce.obj = value  # update cached copy
1723        obj.__dict__[self.sname] = value  # update copy returned by obj.x
1724
1725
1726def extdatasource(repo, source):
1727    """Gather a map of rev -> value dict from the specified source
1728
1729    A source spec is treated as a URL, with a special case shell: type
1730    for parsing the output from a shell command.
1731
1732    The data is parsed as a series of newline-separated records where
1733    each record is a revision specifier optionally followed by a space
1734    and a freeform string value. If the revision is known locally, it
1735    is converted to a rev, otherwise the record is skipped.
1736
1737    Note that both key and value are treated as UTF-8 and converted to
1738    the local encoding. This allows uniformity between local and
1739    remote data sources.
1740    """
1741
1742    spec = repo.ui.config(b"extdata", source)
1743    if not spec:
1744        raise error.Abort(_(b"unknown extdata source '%s'") % source)
1745
1746    data = {}
1747    src = proc = None
1748    try:
1749        if spec.startswith(b"shell:"):
1750            # external commands should be run relative to the repo root
1751            cmd = spec[6:]
1752            proc = subprocess.Popen(
1753                procutil.tonativestr(cmd),
1754                shell=True,
1755                bufsize=-1,
1756                close_fds=procutil.closefds,
1757                stdout=subprocess.PIPE,
1758                cwd=procutil.tonativestr(repo.root),
1759            )
1760            src = proc.stdout
1761        else:
1762            # treat as a URL or file
1763            src = url.open(repo.ui, spec)
1764        for l in src:
1765            if b" " in l:
1766                k, v = l.strip().split(b" ", 1)
1767            else:
1768                k, v = l.strip(), b""
1769
1770            k = encoding.tolocal(k)
1771            try:
1772                data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1773            except (error.LookupError, error.RepoLookupError, error.InputError):
1774                pass  # we ignore data for nodes that don't exist locally
1775    finally:
1776        if proc:
1777            try:
1778                proc.communicate()
1779            except ValueError:
1780                # This happens if we started iterating src and then
1781                # get a parse error on a line. It should be safe to ignore.
1782                pass
1783        if src:
1784            src.close()
1785    if proc and proc.returncode != 0:
1786        raise error.Abort(
1787            _(b"extdata command '%s' failed: %s")
1788            % (cmd, procutil.explainexit(proc.returncode))
1789        )
1790
1791    return data
1792
1793
1794class progress(object):
1795    def __init__(self, ui, updatebar, topic, unit=b"", total=None):
1796        self.ui = ui
1797        self.pos = 0
1798        self.topic = topic
1799        self.unit = unit
1800        self.total = total
1801        self.debug = ui.configbool(b'progress', b'debug')
1802        self._updatebar = updatebar
1803
1804    def __enter__(self):
1805        return self
1806
1807    def __exit__(self, exc_type, exc_value, exc_tb):
1808        self.complete()
1809
1810    def update(self, pos, item=b"", total=None):
1811        assert pos is not None
1812        if total:
1813            self.total = total
1814        self.pos = pos
1815        self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1816        if self.debug:
1817            self._printdebug(item)
1818
1819    def increment(self, step=1, item=b"", total=None):
1820        self.update(self.pos + step, item, total)
1821
1822    def complete(self):
1823        self.pos = None
1824        self.unit = b""
1825        self.total = None
1826        self._updatebar(self.topic, self.pos, b"", self.unit, self.total)
1827
1828    def _printdebug(self, item):
1829        unit = b''
1830        if self.unit:
1831            unit = b' ' + self.unit
1832        if item:
1833            item = b' ' + item
1834
1835        if self.total:
1836            pct = 100.0 * self.pos / self.total
1837            self.ui.debug(
1838                b'%s:%s %d/%d%s (%4.2f%%)\n'
1839                % (self.topic, item, self.pos, self.total, unit, pct)
1840            )
1841        else:
1842            self.ui.debug(b'%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1843
1844
1845def gdinitconfig(ui):
1846    """helper function to know if a repo should be created as general delta"""
1847    # experimental config: format.generaldelta
1848    return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1849        b'format', b'usegeneraldelta'
1850    )
1851
1852
1853def gddeltaconfig(ui):
1854    """helper function to know if incoming delta should be optimised"""
1855    # experimental config: format.generaldelta
1856    return ui.configbool(b'format', b'generaldelta')
1857
1858
1859class simplekeyvaluefile(object):
1860    """A simple file with key=value lines
1861
1862    Keys must be alphanumerics and start with a letter, values must not
1863    contain '\n' characters"""
1864
1865    firstlinekey = b'__firstline'
1866
1867    def __init__(self, vfs, path, keys=None):
1868        self.vfs = vfs
1869        self.path = path
1870
1871    def read(self, firstlinenonkeyval=False):
1872        """Read the contents of a simple key-value file
1873
1874        'firstlinenonkeyval' indicates whether the first line of file should
1875        be treated as a key-value pair or reuturned fully under the
1876        __firstline key."""
1877        lines = self.vfs.readlines(self.path)
1878        d = {}
1879        if firstlinenonkeyval:
1880            if not lines:
1881                e = _(b"empty simplekeyvalue file")
1882                raise error.CorruptedState(e)
1883            # we don't want to include '\n' in the __firstline
1884            d[self.firstlinekey] = lines[0][:-1]
1885            del lines[0]
1886
1887        try:
1888            # the 'if line.strip()' part prevents us from failing on empty
1889            # lines which only contain '\n' therefore are not skipped
1890            # by 'if line'
1891            updatedict = dict(
1892                line[:-1].split(b'=', 1) for line in lines if line.strip()
1893            )
1894            if self.firstlinekey in updatedict:
1895                e = _(b"%r can't be used as a key")
1896                raise error.CorruptedState(e % self.firstlinekey)
1897            d.update(updatedict)
1898        except ValueError as e:
1899            raise error.CorruptedState(stringutil.forcebytestr(e))
1900        return d
1901
1902    def write(self, data, firstline=None):
1903        """Write key=>value mapping to a file
1904        data is a dict. Keys must be alphanumerical and start with a letter.
1905        Values must not contain newline characters.
1906
1907        If 'firstline' is not None, it is written to file before
1908        everything else, as it is, not in a key=value form"""
1909        lines = []
1910        if firstline is not None:
1911            lines.append(b'%s\n' % firstline)
1912
1913        for k, v in data.items():
1914            if k == self.firstlinekey:
1915                e = b"key name '%s' is reserved" % self.firstlinekey
1916                raise error.ProgrammingError(e)
1917            if not k[0:1].isalpha():
1918                e = b"keys must start with a letter in a key-value file"
1919                raise error.ProgrammingError(e)
1920            if not k.isalnum():
1921                e = b"invalid key name in a simple key-value file"
1922                raise error.ProgrammingError(e)
1923            if b'\n' in v:
1924                e = b"invalid value in a simple key-value file"
1925                raise error.ProgrammingError(e)
1926            lines.append(b"%s=%s\n" % (k, v))
1927        with self.vfs(self.path, mode=b'wb', atomictemp=True) as fp:
1928            fp.write(b''.join(lines))
1929
1930
1931_reportobsoletedsource = [
1932    b'debugobsolete',
1933    b'pull',
1934    b'push',
1935    b'serve',
1936    b'unbundle',
1937]
1938
1939_reportnewcssource = [
1940    b'pull',
1941    b'unbundle',
1942]
1943
1944
1945def prefetchfiles(repo, revmatches):
1946    """Invokes the registered file prefetch functions, allowing extensions to
1947    ensure the corresponding files are available locally, before the command
1948    uses them.
1949
1950    Args:
1951      revmatches: a list of (revision, match) tuples to indicate the files to
1952      fetch at each revision. If any of the match elements is None, it matches
1953      all files.
1954    """
1955
1956    def _matcher(m):
1957        if m:
1958            assert isinstance(m, matchmod.basematcher)
1959            # The command itself will complain about files that don't exist, so
1960            # don't duplicate the message.
1961            return matchmod.badmatch(m, lambda fn, msg: None)
1962        else:
1963            return matchall(repo)
1964
1965    revbadmatches = [(rev, _matcher(match)) for (rev, match) in revmatches]
1966
1967    fileprefetchhooks(repo, revbadmatches)
1968
1969
1970# a list of (repo, revs, match) prefetch functions
1971fileprefetchhooks = util.hooks()
1972
1973# A marker that tells the evolve extension to suppress its own reporting
1974_reportstroubledchangesets = True
1975
1976
1977def registersummarycallback(repo, otr, txnname=b'', as_validator=False):
1978    """register a callback to issue a summary after the transaction is closed
1979
1980    If as_validator is true, then the callbacks are registered as transaction
1981    validators instead
1982    """
1983
1984    def txmatch(sources):
1985        return any(txnname.startswith(source) for source in sources)
1986
1987    categories = []
1988
1989    def reportsummary(func):
1990        """decorator for report callbacks."""
1991        # The repoview life cycle is shorter than the one of the actual
1992        # underlying repository. So the filtered object can die before the
1993        # weakref is used leading to troubles. We keep a reference to the
1994        # unfiltered object and restore the filtering when retrieving the
1995        # repository through the weakref.
1996        filtername = repo.filtername
1997        reporef = weakref.ref(repo.unfiltered())
1998
1999        def wrapped(tr):
2000            repo = reporef()
2001            if filtername:
2002                assert repo is not None  # help pytype
2003                repo = repo.filtered(filtername)
2004            func(repo, tr)
2005
2006        newcat = b'%02i-txnreport' % len(categories)
2007        if as_validator:
2008            otr.addvalidator(newcat, wrapped)
2009        else:
2010            otr.addpostclose(newcat, wrapped)
2011        categories.append(newcat)
2012        return wrapped
2013
2014    @reportsummary
2015    def reportchangegroup(repo, tr):
2016        cgchangesets = tr.changes.get(b'changegroup-count-changesets', 0)
2017        cgrevisions = tr.changes.get(b'changegroup-count-revisions', 0)
2018        cgfiles = tr.changes.get(b'changegroup-count-files', 0)
2019        cgheads = tr.changes.get(b'changegroup-count-heads', 0)
2020        if cgchangesets or cgrevisions or cgfiles:
2021            htext = b""
2022            if cgheads:
2023                htext = _(b" (%+d heads)") % cgheads
2024            msg = _(b"added %d changesets with %d changes to %d files%s\n")
2025            if as_validator:
2026                msg = _(b"adding %d changesets with %d changes to %d files%s\n")
2027            assert repo is not None  # help pytype
2028            repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
2029
2030    if txmatch(_reportobsoletedsource):
2031
2032        @reportsummary
2033        def reportobsoleted(repo, tr):
2034            obsoleted = obsutil.getobsoleted(repo, tr)
2035            newmarkers = len(tr.changes.get(b'obsmarkers', ()))
2036            if newmarkers:
2037                repo.ui.status(_(b'%i new obsolescence markers\n') % newmarkers)
2038            if obsoleted:
2039                msg = _(b'obsoleted %i changesets\n')
2040                if as_validator:
2041                    msg = _(b'obsoleting %i changesets\n')
2042                repo.ui.status(msg % len(obsoleted))
2043
2044    if obsolete.isenabled(
2045        repo, obsolete.createmarkersopt
2046    ) and repo.ui.configbool(
2047        b'experimental', b'evolution.report-instabilities'
2048    ):
2049        instabilitytypes = [
2050            (b'orphan', b'orphan'),
2051            (b'phase-divergent', b'phasedivergent'),
2052            (b'content-divergent', b'contentdivergent'),
2053        ]
2054
2055        def getinstabilitycounts(repo):
2056            filtered = repo.changelog.filteredrevs
2057            counts = {}
2058            for instability, revset in instabilitytypes:
2059                counts[instability] = len(
2060                    set(obsolete.getrevs(repo, revset)) - filtered
2061                )
2062            return counts
2063
2064        oldinstabilitycounts = getinstabilitycounts(repo)
2065
2066        @reportsummary
2067        def reportnewinstabilities(repo, tr):
2068            newinstabilitycounts = getinstabilitycounts(repo)
2069            for instability, revset in instabilitytypes:
2070                delta = (
2071                    newinstabilitycounts[instability]
2072                    - oldinstabilitycounts[instability]
2073                )
2074                msg = getinstabilitymessage(delta, instability)
2075                if msg:
2076                    repo.ui.warn(msg)
2077
2078    if txmatch(_reportnewcssource):
2079
2080        @reportsummary
2081        def reportnewcs(repo, tr):
2082            """Report the range of new revisions pulled/unbundled."""
2083            origrepolen = tr.changes.get(b'origrepolen', len(repo))
2084            unfi = repo.unfiltered()
2085            if origrepolen >= len(unfi):
2086                return
2087
2088            # Compute the bounds of new visible revisions' range.
2089            revs = smartset.spanset(repo, start=origrepolen)
2090            if revs:
2091                minrev, maxrev = repo[revs.min()], repo[revs.max()]
2092
2093                if minrev == maxrev:
2094                    revrange = minrev
2095                else:
2096                    revrange = b'%s:%s' % (minrev, maxrev)
2097                draft = len(repo.revs(b'%ld and draft()', revs))
2098                secret = len(repo.revs(b'%ld and secret()', revs))
2099                if not (draft or secret):
2100                    msg = _(b'new changesets %s\n') % revrange
2101                elif draft and secret:
2102                    msg = _(b'new changesets %s (%d drafts, %d secrets)\n')
2103                    msg %= (revrange, draft, secret)
2104                elif draft:
2105                    msg = _(b'new changesets %s (%d drafts)\n')
2106                    msg %= (revrange, draft)
2107                elif secret:
2108                    msg = _(b'new changesets %s (%d secrets)\n')
2109                    msg %= (revrange, secret)
2110                else:
2111                    errormsg = b'entered unreachable condition'
2112                    raise error.ProgrammingError(errormsg)
2113                repo.ui.status(msg)
2114
2115            # search new changesets directly pulled as obsolete
2116            duplicates = tr.changes.get(b'revduplicates', ())
2117            obsadded = unfi.revs(
2118                b'(%d: + %ld) and obsolete()', origrepolen, duplicates
2119            )
2120            cl = repo.changelog
2121            extinctadded = [r for r in obsadded if r not in cl]
2122            if extinctadded:
2123                # They are not just obsolete, but obsolete and invisible
2124                # we call them "extinct" internally but the terms have not been
2125                # exposed to users.
2126                msg = b'(%d other changesets obsolete on arrival)\n'
2127                repo.ui.status(msg % len(extinctadded))
2128
2129        @reportsummary
2130        def reportphasechanges(repo, tr):
2131            """Report statistics of phase changes for changesets pre-existing
2132            pull/unbundle.
2133            """
2134            origrepolen = tr.changes.get(b'origrepolen', len(repo))
2135            published = []
2136            for revs, (old, new) in tr.changes.get(b'phases', []):
2137                if new != phases.public:
2138                    continue
2139                published.extend(rev for rev in revs if rev < origrepolen)
2140            if not published:
2141                return
2142            msg = _(b'%d local changesets published\n')
2143            if as_validator:
2144                msg = _(b'%d local changesets will be published\n')
2145            repo.ui.status(msg % len(published))
2146
2147
2148def getinstabilitymessage(delta, instability):
2149    """function to return the message to show warning about new instabilities
2150
2151    exists as a separate function so that extension can wrap to show more
2152    information like how to fix instabilities"""
2153    if delta > 0:
2154        return _(b'%i new %s changesets\n') % (delta, instability)
2155
2156
2157def nodesummaries(repo, nodes, maxnumnodes=4):
2158    if len(nodes) <= maxnumnodes or repo.ui.verbose:
2159        return b' '.join(short(h) for h in nodes)
2160    first = b' '.join(short(h) for h in nodes[:maxnumnodes])
2161    return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
2162
2163
2164def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
2165    """check that no named branch has multiple heads"""
2166    if desc in (b'strip', b'repair'):
2167        # skip the logic during strip
2168        return
2169    visible = repo.filtered(filtername)
2170    # possible improvement: we could restrict the check to affected branch
2171    bm = visible.branchmap()
2172    for name in bm:
2173        heads = bm.branchheads(name, closed=accountclosed)
2174        if len(heads) > 1:
2175            msg = _(b'rejecting multiple heads on branch "%s"')
2176            msg %= name
2177            hint = _(b'%d heads: %s')
2178            hint %= (len(heads), nodesummaries(repo, heads))
2179            raise error.Abort(msg, hint=hint)
2180
2181
2182def wrapconvertsink(sink):
2183    """Allow extensions to wrap the sink returned by convcmd.convertsink()
2184    before it is used, whether or not the convert extension was formally loaded.
2185    """
2186    return sink
2187
2188
2189def unhidehashlikerevs(repo, specs, hiddentype):
2190    """parse the user specs and unhide changesets whose hash or revision number
2191    is passed.
2192
2193    hiddentype can be: 1) 'warn': warn while unhiding changesets
2194                       2) 'nowarn': don't warn while unhiding changesets
2195
2196    returns a repo object with the required changesets unhidden
2197    """
2198    if not repo.filtername or not repo.ui.configbool(
2199        b'experimental', b'directaccess'
2200    ):
2201        return repo
2202
2203    if repo.filtername not in (b'visible', b'visible-hidden'):
2204        return repo
2205
2206    symbols = set()
2207    for spec in specs:
2208        try:
2209            tree = revsetlang.parse(spec)
2210        except error.ParseError:  # will be reported by scmutil.revrange()
2211            continue
2212
2213        symbols.update(revsetlang.gethashlikesymbols(tree))
2214
2215    if not symbols:
2216        return repo
2217
2218    revs = _getrevsfromsymbols(repo, symbols)
2219
2220    if not revs:
2221        return repo
2222
2223    if hiddentype == b'warn':
2224        unfi = repo.unfiltered()
2225        revstr = b", ".join([pycompat.bytestr(unfi[l]) for l in revs])
2226        repo.ui.warn(
2227            _(
2228                b"warning: accessing hidden changesets for write "
2229                b"operation: %s\n"
2230            )
2231            % revstr
2232        )
2233
2234    # we have to use new filtername to separate branch/tags cache until we can
2235    # disbale these cache when revisions are dynamically pinned.
2236    return repo.filtered(b'visible-hidden', revs)
2237
2238
2239def _getrevsfromsymbols(repo, symbols):
2240    """parse the list of symbols and returns a set of revision numbers of hidden
2241    changesets present in symbols"""
2242    revs = set()
2243    unfi = repo.unfiltered()
2244    unficl = unfi.changelog
2245    cl = repo.changelog
2246    tiprev = len(unficl)
2247    allowrevnums = repo.ui.configbool(b'experimental', b'directaccess.revnums')
2248    for s in symbols:
2249        try:
2250            n = int(s)
2251            if n <= tiprev:
2252                if not allowrevnums:
2253                    continue
2254                else:
2255                    if n not in cl:
2256                        revs.add(n)
2257                    continue
2258        except ValueError:
2259            pass
2260
2261        try:
2262            s = resolvehexnodeidprefix(unfi, s)
2263        except (error.LookupError, error.WdirUnsupported):
2264            s = None
2265
2266        if s is not None:
2267            rev = unficl.rev(s)
2268            if rev not in cl:
2269                revs.add(rev)
2270
2271    return revs
2272
2273
2274def bookmarkrevs(repo, mark):
2275    """Select revisions reachable by a given bookmark
2276
2277    If the bookmarked revision isn't a head, an empty set will be returned.
2278    """
2279    return repo.revs(format_bookmark_revspec(mark))
2280
2281
2282def format_bookmark_revspec(mark):
2283    """Build a revset expression to select revisions reachable by a given
2284    bookmark"""
2285    mark = b'literal:' + mark
2286    return revsetlang.formatspec(
2287        b"ancestors(bookmark(%s)) - "
2288        b"ancestors(head() and not bookmark(%s)) - "
2289        b"ancestors(bookmark() and not bookmark(%s))",
2290        mark,
2291        mark,
2292        mark,
2293    )
2294