1# -*- coding: utf-8 -*-
2"""Misc. xonsh tools.
3
4The following implementations were forked from the IPython project:
5
6* Copyright (c) 2008-2014, IPython Development Team
7* Copyright (C) 2001-2007 Fernando Perez <fperez@colorado.edu>
8* Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
9* Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
10
11Implementations:
12
13* decode()
14* encode()
15* cast_unicode()
16* safe_hasattr()
17* indent()
18
19"""
20import builtins
21import collections
22import collections.abc as cabc
23import contextlib
24import ctypes
25import datetime
26from distutils.version import LooseVersion
27import functools
28import glob
29import itertools
30import os
31import pathlib
32import re
33import subprocess
34import sys
35import threading
36import traceback
37import warnings
38import operator
39
40# adding imports from further xonsh modules is discouraged to avoid circular
41# dependencies
42from xonsh import __version__
43from xonsh.lazyasd import LazyObject, LazyDict, lazyobject
44from xonsh.platform import (
45    scandir,
46    DEFAULT_ENCODING,
47    ON_LINUX,
48    ON_WINDOWS,
49    PYTHON_VERSION_INFO,
50    expanduser,
51    os_environ,
52)
53
54
55@functools.lru_cache(1)
56def is_superuser():
57    if ON_WINDOWS:
58        rtn = ctypes.windll.shell32.IsUserAnAdmin() != 0
59    else:
60        rtn = os.getuid() == 0
61    return rtn
62
63
64class XonshError(Exception):
65    pass
66
67
68class XonshCalledProcessError(XonshError, subprocess.CalledProcessError):
69    """Raised when there's an error with a called process
70
71    Inherits from XonshError and subprocess.CalledProcessError, catching
72    either will also catch this error.
73
74    Raised *after* iterating over stdout of a captured command, if the
75    returncode of the command is nonzero.
76
77    Example:
78        try:
79            for line in !(ls):
80                print(line)
81        except subprocess.CalledProcessError as error:
82            print("Error in process: {}.format(error.completed_command.pid))
83
84    This also handles differences between Python3.4 and 3.5 where
85    CalledProcessError is concerned.
86    """
87
88    def __init__(
89        self, returncode, command, output=None, stderr=None, completed_command=None
90    ):
91        super().__init__(returncode, command, output)
92        self.stderr = stderr
93        self.completed_command = completed_command
94
95
96def expand_path(s, expand_user=True):
97    """Takes a string path and expands ~ to home if expand_user is set
98    and environment vars if EXPAND_ENV_VARS is set."""
99    env = getattr(builtins, "__xonsh_env__", os_environ)
100    if env.get("EXPAND_ENV_VARS", False):
101        s = expandvars(s)
102    if expand_user:
103        # expand ~ according to Bash unquoted rules "Each variable assignment is
104        # checked for unquoted tilde-prefixes immediately following a ':' or the
105        # first '='". See the following for more details.
106        # https://www.gnu.org/software/bash/manual/html_node/Tilde-Expansion.html
107        pre, char, post = s.partition("=")
108        if char:
109            s = expanduser(pre) + char
110            s += os.pathsep.join(map(expanduser, post.split(os.pathsep)))
111        else:
112            s = expanduser(s)
113    return s
114
115
116def _expandpath(path):
117    """Performs environment variable / user expansion on a given path
118    if EXPAND_ENV_VARS is set.
119    """
120    env = getattr(builtins, "__xonsh_env__", os_environ)
121    expand_user = env.get("EXPAND_ENV_VARS", False)
122    return expand_path(path, expand_user=expand_user)
123
124
125def decode_bytes(b):
126    """Tries to decode the bytes using XONSH_ENCODING if available,
127    otherwise using sys.getdefaultencoding().
128    """
129    env = getattr(builtins, "__xonsh_env__", os_environ)
130    enc = env.get("XONSH_ENCODING") or DEFAULT_ENCODING
131    err = env.get("XONSH_ENCODING_ERRORS") or "strict"
132    return b.decode(encoding=enc, errors=err)
133
134
135def findfirst(s, substrs):
136    """Finds whichever of the given substrings occurs first in the given string
137    and returns that substring, or returns None if no such strings occur.
138    """
139    i = len(s)
140    result = None
141    for substr in substrs:
142        pos = s.find(substr)
143        if -1 < pos < i:
144            i = pos
145            result = substr
146    return i, result
147
148
149class EnvPath(collections.MutableSequence):
150    """A class that implements an environment path, which is a list of
151    strings. Provides a custom method that expands all paths if the
152    relevant env variable has been set.
153    """
154
155    def __init__(self, args=None):
156        if not args:
157            self._l = []
158        else:
159            if isinstance(args, str):
160                self._l = args.split(os.pathsep)
161            elif isinstance(args, pathlib.Path):
162                self._l = [args]
163            elif isinstance(args, bytes):
164                # decode bytes to a string and then split based on
165                # the default path separator
166                self._l = decode_bytes(args).split(os.pathsep)
167            elif isinstance(args, collections.Iterable):
168                # put everything in a list -before- performing the type check
169                # in order to be able to retrieve it later, for cases such as
170                # when a generator expression was passed as an argument
171                args = list(args)
172                if not all(isinstance(i, (str, bytes, pathlib.Path)) for i in args):
173                    # make TypeError's message as informative as possible
174                    # when given an invalid initialization sequence
175                    raise TypeError(
176                        "EnvPath's initialization sequence should only "
177                        "contain str, bytes and pathlib.Path entries"
178                    )
179                self._l = args
180            else:
181                raise TypeError(
182                    "EnvPath cannot be initialized with items "
183                    "of type %s" % type(args)
184                )
185
186    def __getitem__(self, item):
187        # handle slices separately
188        if isinstance(item, slice):
189            return [_expandpath(i) for i in self._l[item]]
190        else:
191            return _expandpath(self._l[item])
192
193    def __setitem__(self, index, item):
194        self._l.__setitem__(index, item)
195
196    def __len__(self):
197        return len(self._l)
198
199    def __delitem__(self, key):
200        self._l.__delitem__(key)
201
202    def insert(self, index, value):
203        self._l.insert(index, value)
204
205    @property
206    def paths(self):
207        """
208        Returns the list of directories that this EnvPath contains.
209        """
210        return list(self)
211
212    def __repr__(self):
213        return repr(self._l)
214
215    def __eq__(self, other):
216        if len(self) != len(other):
217            return False
218        return all(map(operator.eq, self, other))
219
220    def _repr_pretty_(self, p, cycle):
221        """ Pretty print path list """
222        if cycle:
223            p.text("EnvPath(...)")
224        else:
225            with p.group(1, "EnvPath(\n[", "]\n)"):
226                for idx, item in enumerate(self):
227                    if idx:
228                        p.text(",")
229                        p.breakable()
230                    p.pretty(item)
231
232    def __add__(self, other):
233        if isinstance(other, EnvPath):
234            other = other._l
235        return EnvPath(self._l + other)
236
237    def __radd__(self, other):
238        if isinstance(other, EnvPath):
239            other = other._l
240        return EnvPath(other + self._l)
241
242    def add(self, data, front=False, replace=False):
243        """Add a value to this EnvPath,
244
245        path.add(data, front=bool, replace=bool) -> ensures that path contains data, with position determined by kwargs
246
247        Parameters
248        ----------
249        data : string or bytes or pathlib.Path
250            value to be added
251        front : bool
252            whether the value should be added to the front, will be
253            ignored if the data already exists in this EnvPath and
254            replace is False
255            Default : False
256        replace : bool
257            If True, the value will be removed and added to the
258            start or end(depending on the value of front)
259            Default : False
260
261        Returns
262        -------
263        None
264
265        """
266        if data not in self._l:
267            self._l.insert(0 if front else len(self._l), data)
268        elif replace:
269            self._l.remove(data)
270            self._l.insert(0 if front else len(self._l), data)
271
272
273class DefaultNotGivenType(object):
274    """Singleton for representing when no default value is given."""
275
276    __inst = None
277
278    def __new__(cls):
279        if DefaultNotGivenType.__inst is None:
280            DefaultNotGivenType.__inst = object.__new__(cls)
281        return DefaultNotGivenType.__inst
282
283
284DefaultNotGiven = DefaultNotGivenType()
285
286BEG_TOK_SKIPS = LazyObject(
287    lambda: frozenset(["WS", "INDENT", "NOT", "LPAREN"]), globals(), "BEG_TOK_SKIPS"
288)
289END_TOK_TYPES = LazyObject(
290    lambda: frozenset(["SEMI", "AND", "OR", "RPAREN"]), globals(), "END_TOK_TYPES"
291)
292RE_END_TOKS = LazyObject(
293    lambda: re.compile("(;|and|\&\&|or|\|\||\))"), globals(), "RE_END_TOKS"
294)
295LPARENS = LazyObject(
296    lambda: frozenset(
297        ["LPAREN", "AT_LPAREN", "BANG_LPAREN", "DOLLAR_LPAREN", "ATDOLLAR_LPAREN"]
298    ),
299    globals(),
300    "LPARENS",
301)
302
303
304def _is_not_lparen_and_rparen(lparens, rtok):
305    """Tests if an RPAREN token is matched with something other than a plain old
306    LPAREN type.
307    """
308    # note that any([]) is False, so this covers len(lparens) == 0
309    return rtok.type == "RPAREN" and any(x != "LPAREN" for x in lparens)
310
311
312def balanced_parens(line, mincol=0, maxcol=None, lexer=None):
313    """Determines if parentheses are balanced in an expression."""
314    line = line[mincol:maxcol]
315    if lexer is None:
316        lexer = builtins.__xonsh_execer__.parser.lexer
317    if "(" not in line and ")" not in line:
318        return True
319    cnt = 0
320    lexer.input(line)
321    for tok in lexer:
322        if tok.type in LPARENS:
323            cnt += 1
324        elif tok.type == "RPAREN":
325            cnt -= 1
326        elif tok.type == "ERRORTOKEN" and ")" in tok.value:
327            cnt -= 1
328    return cnt == 0
329
330
331def find_next_break(line, mincol=0, lexer=None):
332    """Returns the column number of the next logical break in subproc mode.
333    This function may be useful in finding the maxcol argument of
334    subproc_toks().
335    """
336    if mincol >= 1:
337        line = line[mincol:]
338    if lexer is None:
339        lexer = builtins.__xonsh_execer__.parser.lexer
340    if RE_END_TOKS.search(line) is None:
341        return None
342    maxcol = None
343    lparens = []
344    lexer.input(line)
345    for tok in lexer:
346        if tok.type in LPARENS:
347            lparens.append(tok.type)
348        elif tok.type in END_TOK_TYPES:
349            if _is_not_lparen_and_rparen(lparens, tok):
350                lparens.pop()
351            else:
352                maxcol = tok.lexpos + mincol + 1
353                break
354        elif tok.type == "ERRORTOKEN" and ")" in tok.value:
355            maxcol = tok.lexpos + mincol + 1
356            break
357        elif tok.type == "BANG":
358            maxcol = mincol + len(line) + 1
359            break
360    return maxcol
361
362
363def _offset_from_prev_lines(line, last):
364    lines = line.splitlines(keepends=True)[:last]
365    return sum(map(len, lines))
366
367
368def subproc_toks(
369    line, mincol=-1, maxcol=None, lexer=None, returnline=False, greedy=False
370):
371    """Encapsulates tokens in a source code line in a uncaptured
372    subprocess ![] starting at a minimum column. If there are no tokens
373    (ie in a comment line) this returns None. If greedy is True, it will encapsulate
374    normal parentheses. Greedy is False by default.
375    """
376    if lexer is None:
377        lexer = builtins.__xonsh_execer__.parser.lexer
378    if maxcol is None:
379        maxcol = len(line) + 1
380    lexer.reset()
381    lexer.input(line)
382    toks = []
383    lparens = []
384    saw_macro = False
385    end_offset = 0
386    for tok in lexer:
387        pos = tok.lexpos
388        if tok.type not in END_TOK_TYPES and pos >= maxcol:
389            break
390        if tok.type == "BANG":
391            saw_macro = True
392        if saw_macro and tok.type not in ("NEWLINE", "DEDENT"):
393            toks.append(tok)
394            continue
395        if tok.type in LPARENS:
396            lparens.append(tok.type)
397        if greedy and len(lparens) > 0 and "LPAREN" in lparens:
398            toks.append(tok)
399            if tok.type == "RPAREN":
400                lparens.pop()
401            continue
402        if len(toks) == 0 and tok.type in BEG_TOK_SKIPS:
403            continue  # handle indentation
404        elif len(toks) > 0 and toks[-1].type in END_TOK_TYPES:
405            if _is_not_lparen_and_rparen(lparens, toks[-1]):
406                lparens.pop()  # don't continue or break
407            elif pos < maxcol and tok.type not in ("NEWLINE", "DEDENT", "WS"):
408                if not greedy:
409                    toks.clear()
410                if tok.type in BEG_TOK_SKIPS:
411                    continue
412            else:
413                break
414        if pos < mincol:
415            continue
416        toks.append(tok)
417        if tok.type == "WS" and tok.value == "\\":
418            pass  # line continuation
419        elif tok.type == "NEWLINE":
420            break
421        elif tok.type == "DEDENT":
422            # fake a newline when dedenting without a newline
423            tok.type = "NEWLINE"
424            tok.value = "\n"
425            tok.lineno -= 1
426            if len(toks) >= 2:
427                prev_tok_end = toks[-2].lexpos + len(toks[-2].value)
428            else:
429                prev_tok_end = len(line)
430            if "#" in line[prev_tok_end:]:
431                tok.lexpos = prev_tok_end  # prevents wrapping comments
432            else:
433                tok.lexpos = len(line)
434            break
435        elif check_bad_str_token(tok):
436            return
437    else:
438        if len(toks) > 0 and toks[-1].type in END_TOK_TYPES:
439            if _is_not_lparen_and_rparen(lparens, toks[-1]):
440                pass
441            elif greedy and toks[-1].type == "RPAREN":
442                pass
443            else:
444                toks.pop()
445        if len(toks) == 0:
446            return  # handle comment lines
447        tok = toks[-1]
448        pos = tok.lexpos
449        if isinstance(tok.value, str):
450            end_offset = len(tok.value.rstrip())
451        else:
452            el = line[pos:].split("#")[0].rstrip()
453            end_offset = len(el)
454    if len(toks) == 0:
455        return  # handle comment lines
456    elif saw_macro or greedy:
457        end_offset = len(toks[-1].value.rstrip()) + 1
458    if toks[0].lineno != toks[-1].lineno:
459        # handle multiline cases
460        end_offset += _offset_from_prev_lines(line, toks[-1].lineno)
461    beg, end = toks[0].lexpos, (toks[-1].lexpos + end_offset)
462    end = len(line[:end].rstrip())
463    rtn = "![" + line[beg:end] + "]"
464    if returnline:
465        rtn = line[:beg] + rtn + line[end:]
466    return rtn
467
468
469def check_bad_str_token(tok):
470    """Checks if a token is a bad string."""
471    if tok.type == "ERRORTOKEN" and tok.value == "EOF in multi-line string":
472        return True
473    elif isinstance(tok.value, str) and not check_quotes(tok.value):
474        return True
475    else:
476        return False
477
478
479def check_quotes(s):
480    """Checks a string to make sure that if it starts with quotes, it also
481    ends with quotes.
482    """
483    starts_as_str = RE_BEGIN_STRING.match(s) is not None
484    ends_as_str = s.endswith('"') or s.endswith("'")
485    if not starts_as_str and not ends_as_str:
486        ok = True
487    elif starts_as_str and not ends_as_str:
488        ok = False
489    elif not starts_as_str and ends_as_str:
490        ok = False
491    else:
492        m = RE_COMPLETE_STRING.match(s)
493        ok = m is not None
494    return ok
495
496
497def _have_open_triple_quotes(s):
498    if s.count('"""') % 2 == 1:
499        open_triple = '"""'
500    elif s.count("'''") % 2 == 1:
501        open_triple = "'''"
502    else:
503        open_triple = False
504    return open_triple
505
506
507def get_line_continuation():
508    """ The line continuation characters used in subproc mode. In interactive
509         mode on Windows the backslash must be preceded by a space. This is because
510         paths on Windows may end in a backslash.
511    """
512    if (
513        ON_WINDOWS
514        and hasattr(builtins, "__xonsh_env__")
515        and builtins.__xonsh_env__.get("XONSH_INTERACTIVE", False)
516    ):
517        return " \\"
518    else:
519        return "\\"
520
521
522def get_logical_line(lines, idx):
523    """Returns a single logical line (i.e. one without line continuations)
524    from a list of lines.  This line should begin at index idx. This also
525    returns the number of physical lines the logical line spans. The lines
526    should not contain newlines
527    """
528    n = 1
529    nlines = len(lines)
530    linecont = get_line_continuation()
531    while idx > 0 and lines[idx - 1].endswith(linecont):
532        idx -= 1
533    start = idx
534    line = lines[idx]
535    open_triple = _have_open_triple_quotes(line)
536    while (line.endswith(linecont) or open_triple) and idx < nlines - 1:
537        n += 1
538        idx += 1
539        if line.endswith(linecont):
540            line = line[:-1] + lines[idx]
541        else:
542            line = line + "\n" + lines[idx]
543        open_triple = _have_open_triple_quotes(line)
544    return line, n, start
545
546
547def replace_logical_line(lines, logical, idx, n):
548    """Replaces lines at idx that may end in line continuation with a logical
549    line that spans n lines.
550    """
551    linecont = get_line_continuation()
552    if n == 1:
553        lines[idx] = logical
554        return
555    space = " "
556    for i in range(idx, idx + n - 1):
557        a = len(lines[i])
558        b = logical.find(space, a - 1)
559        if b < 0:
560            # no space found
561            lines[i] = logical
562            logical = ""
563        else:
564            # found space to split on
565            lines[i] = logical[:b] + linecont
566            logical = logical[b:]
567    lines[idx + n - 1] = logical
568
569
570def is_balanced(expr, ltok, rtok):
571    """Determines whether an expression has unbalanced opening and closing tokens."""
572    lcnt = expr.count(ltok)
573    if lcnt == 0:
574        return True
575    rcnt = expr.count(rtok)
576    if lcnt == rcnt:
577        return True
578    else:
579        return False
580
581
582def subexpr_from_unbalanced(expr, ltok, rtok):
583    """Attempts to pull out a valid subexpression for unbalanced grouping,
584    based on opening tokens, eg. '(', and closing tokens, eg. ')'.  This
585    does not do full tokenization, but should be good enough for tab
586    completion.
587    """
588    if is_balanced(expr, ltok, rtok):
589        return expr
590    subexpr = expr.rsplit(ltok, 1)[-1]
591    subexpr = subexpr.rsplit(",", 1)[-1]
592    subexpr = subexpr.rsplit(":", 1)[-1]
593    return subexpr
594
595
596def subexpr_before_unbalanced(expr, ltok, rtok):
597    """Obtains the expression prior to last unbalanced left token."""
598    subexpr, _, post = expr.rpartition(ltok)
599    nrtoks_in_post = post.count(rtok)
600    while nrtoks_in_post != 0:
601        for i in range(nrtoks_in_post):
602            subexpr, _, post = subexpr.rpartition(ltok)
603        nrtoks_in_post = post.count(rtok)
604    _, _, subexpr = subexpr.rpartition(rtok)
605    _, _, subexpr = subexpr.rpartition(ltok)
606    return subexpr
607
608
609def decode(s, encoding=None):
610    encoding = encoding or DEFAULT_ENCODING
611    return s.decode(encoding, "replace")
612
613
614def encode(u, encoding=None):
615    encoding = encoding or DEFAULT_ENCODING
616    return u.encode(encoding, "replace")
617
618
619def cast_unicode(s, encoding=None):
620    if isinstance(s, bytes):
621        return decode(s, encoding)
622    return s
623
624
625def safe_hasattr(obj, attr):
626    """In recent versions of Python, hasattr() only catches AttributeError.
627    This catches all errors.
628    """
629    try:
630        getattr(obj, attr)
631        return True
632    except Exception:  # pylint:disable=bare-except
633        return False
634
635
636def indent(instr, nspaces=4, ntabs=0, flatten=False):
637    """Indent a string a given number of spaces or tabstops.
638
639    indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
640
641    Parameters
642    ----------
643    instr : basestring
644        The string to be indented.
645    nspaces : int (default: 4)
646        The number of spaces to be indented.
647    ntabs : int (default: 0)
648        The number of tabs to be indented.
649    flatten : bool (default: False)
650        Whether to scrub existing indentation.  If True, all lines will be
651        aligned to the same indentation.  If False, existing indentation will
652        be strictly increased.
653
654    Returns
655    -------
656    outstr : string indented by ntabs and nspaces.
657
658    """
659    if instr is None:
660        return
661    ind = "\t" * ntabs + " " * nspaces
662    if flatten:
663        pat = re.compile(r"^\s*", re.MULTILINE)
664    else:
665        pat = re.compile(r"^", re.MULTILINE)
666    outstr = re.sub(pat, ind, instr)
667    if outstr.endswith(os.linesep + ind):
668        return outstr[: -len(ind)]
669    else:
670        return outstr
671
672
673def get_sep():
674    """ Returns the appropriate filepath separator char depending on OS and
675    xonsh options set
676    """
677    if ON_WINDOWS and builtins.__xonsh_env__.get("FORCE_POSIX_PATHS"):
678        return os.altsep
679    else:
680        return os.sep
681
682
683def fallback(cond, backup):
684    """Decorator for returning the object if cond is true and a backup if cond
685    is false.
686    """
687
688    def dec(obj):
689        return obj if cond else backup
690
691    return dec
692
693
694# The following redirect classes were taken directly from Python 3.5's source
695# code (from the contextlib module). This can be removed when 3.5 is released,
696# although redirect_stdout exists in 3.4, redirect_stderr does not.
697# See the Python software license: https://docs.python.org/3/license.html
698# Copyright (c) Python Software Foundation. All rights reserved.
699class _RedirectStream:
700
701    _stream = None
702
703    def __init__(self, new_target):
704        self._new_target = new_target
705        # We use a list of old targets to make this CM re-entrant
706        self._old_targets = []
707
708    def __enter__(self):
709        self._old_targets.append(getattr(sys, self._stream))
710        setattr(sys, self._stream, self._new_target)
711        return self._new_target
712
713    def __exit__(self, exctype, excinst, exctb):
714        setattr(sys, self._stream, self._old_targets.pop())
715
716
717class redirect_stdout(_RedirectStream):
718    """Context manager for temporarily redirecting stdout to another file::
719
720        # How to send help() to stderr
721        with redirect_stdout(sys.stderr):
722            help(dir)
723
724        # How to write help() to a file
725        with open('help.txt', 'w') as f:
726            with redirect_stdout(f):
727                help(pow)
728
729    Mostly for backwards compatibility.
730    """
731
732    _stream = "stdout"
733
734
735class redirect_stderr(_RedirectStream):
736    """Context manager for temporarily redirecting stderr to another file."""
737
738    _stream = "stderr"
739
740
741def _yield_accessible_unix_file_names(path):
742    """yield file names of executable files in path."""
743    if not os.path.exists(path):
744        return
745    for file_ in scandir(path):
746        try:
747            if file_.is_file() and os.access(file_.path, os.X_OK):
748                yield file_.name
749        except (FileNotFoundError, NotADirectoryError):
750            # broken Symlink are neither dir not files
751            pass
752
753
754def _executables_in_posix(path):
755    if not os.path.exists(path):
756        return
757    elif PYTHON_VERSION_INFO < (3, 5, 0):
758        for fname in os.listdir(path):
759            fpath = os.path.join(path, fname)
760            if (
761                os.path.exists(fpath)
762                and os.access(fpath, os.X_OK)
763                and (not os.path.isdir(fpath))
764            ):
765                yield fname
766    else:
767        yield from _yield_accessible_unix_file_names(path)
768
769
770def _executables_in_windows(path):
771    if not os.path.isdir(path):
772        return
773    extensions = builtins.__xonsh_env__["PATHEXT"]
774    if PYTHON_VERSION_INFO < (3, 5, 0):
775        for fname in os.listdir(path):
776            fpath = os.path.join(path, fname)
777            if os.path.exists(fpath) and not os.path.isdir(fpath):
778                base_name, ext = os.path.splitext(fname)
779                if ext.upper() in extensions:
780                    yield fname
781    else:
782        for x in scandir(path):
783            if x.is_file():
784                fname = x.name
785            else:
786                continue
787            base_name, ext = os.path.splitext(fname)
788            if ext.upper() in extensions:
789                yield fname
790
791
792def executables_in(path):
793    """Returns a generator of files in path that the user could execute. """
794    if ON_WINDOWS:
795        func = _executables_in_windows
796    else:
797        func = _executables_in_posix
798    try:
799        yield from func(path)
800    except PermissionError:
801        return
802
803
804def command_not_found(cmd):
805    """Uses the debian/ubuntu command-not-found utility to suggest packages for a
806    command that cannot currently be found.
807    """
808    if not ON_LINUX:
809        return ""
810    elif not os.path.isfile("/usr/lib/command-not-found"):
811        # utility is not on PATH
812        return ""
813    c = "/usr/lib/command-not-found {0}; exit 0"
814    s = subprocess.check_output(
815        c.format(cmd), universal_newlines=True, stderr=subprocess.STDOUT, shell=True
816    )
817    s = "\n".join(s.rstrip().splitlines()).strip()
818    return s
819
820
821def suggest_commands(cmd, env, aliases):
822    """Suggests alternative commands given an environment and aliases."""
823    if not env.get("SUGGEST_COMMANDS"):
824        return ""
825    thresh = env.get("SUGGEST_THRESHOLD")
826    max_sugg = env.get("SUGGEST_MAX_NUM")
827    if max_sugg < 0:
828        max_sugg = float("inf")
829    cmd = cmd.lower()
830    suggested = {}
831
832    for alias in builtins.aliases:
833        if alias not in suggested:
834            if levenshtein(alias.lower(), cmd, thresh) < thresh:
835                suggested[alias] = "Alias"
836
837    for path in filter(os.path.isdir, env.get("PATH")):
838        for _file in executables_in(path):
839            if (
840                _file not in suggested
841                and levenshtein(_file.lower(), cmd, thresh) < thresh
842            ):
843                suggested[_file] = "Command ({0})".format(os.path.join(path, _file))
844
845    suggested = collections.OrderedDict(
846        sorted(
847            suggested.items(), key=lambda x: suggestion_sort_helper(x[0].lower(), cmd)
848        )
849    )
850    num = min(len(suggested), max_sugg)
851
852    if num == 0:
853        rtn = command_not_found(cmd)
854    else:
855        oneof = "" if num == 1 else "one of "
856        tips = "Did you mean {}the following?".format(oneof)
857        items = list(suggested.popitem(False) for _ in range(num))
858        length = max(len(key) for key, _ in items) + 2
859        alternatives = "\n".join(
860            "    {: <{}} {}".format(key + ":", length, val) for key, val in items
861        )
862        rtn = "{}\n{}".format(tips, alternatives)
863        c = command_not_found(cmd)
864        rtn += ("\n\n" + c) if len(c) > 0 else ""
865    return rtn
866
867
868def print_exception(msg=None):
869    """Print exceptions with/without traceback."""
870    env = getattr(builtins, "__xonsh_env__", None)
871    # flags indicating whether the traceback options have been manually set
872    if env is None:
873        env = os_environ
874        manually_set_trace = "XONSH_SHOW_TRACEBACK" in env
875        manually_set_logfile = "XONSH_TRACEBACK_LOGFILE" in env
876    else:
877        manually_set_trace = env.is_manually_set("XONSH_SHOW_TRACEBACK")
878        manually_set_logfile = env.is_manually_set("XONSH_TRACEBACK_LOGFILE")
879    if (not manually_set_trace) and (not manually_set_logfile):
880        # Notify about the traceback output possibility if neither of
881        # the two options have been manually set
882        sys.stderr.write(
883            "xonsh: For full traceback set: " "$XONSH_SHOW_TRACEBACK = True\n"
884        )
885    # get env option for traceback and convert it if necessary
886    show_trace = env.get("XONSH_SHOW_TRACEBACK", False)
887    if not is_bool(show_trace):
888        show_trace = to_bool(show_trace)
889    # if the trace option has been set, print all traceback info to stderr
890    if show_trace:
891        # notify user about XONSH_TRACEBACK_LOGFILE if it has
892        # not been set manually
893        if not manually_set_logfile:
894            sys.stderr.write(
895                "xonsh: To log full traceback to a file set: "
896                "$XONSH_TRACEBACK_LOGFILE = <filename>\n"
897            )
898        traceback.print_exc()
899    # additionally, check if a file for traceback logging has been
900    # specified and convert to a proper option if needed
901    log_file = env.get("XONSH_TRACEBACK_LOGFILE", None)
902    log_file = to_logfile_opt(log_file)
903    if log_file:
904        # if log_file <> '' or log_file <> None, append
905        # traceback log there as well
906        with open(os.path.abspath(log_file), "a") as f:
907            traceback.print_exc(file=f)
908
909    if not show_trace:
910        # if traceback output is disabled, print the exception's
911        # error message on stderr.
912        display_error_message()
913    if msg:
914        msg = msg if msg.endswith("\n") else msg + "\n"
915        sys.stderr.write(msg)
916
917
918def display_error_message(strip_xonsh_error_types=True):
919    """
920    Prints the error message of the current exception on stderr.
921    """
922    exc_type, exc_value, exc_traceback = sys.exc_info()
923    exception_only = traceback.format_exception_only(exc_type, exc_value)
924    if exc_type is XonshError and strip_xonsh_error_types:
925        exception_only[0] = exception_only[0].partition(": ")[-1]
926    sys.stderr.write("".join(exception_only))
927
928
929def is_writable_file(filepath):
930    """
931    Checks if a filepath is valid for writing.
932    """
933    filepath = expand_path(filepath)
934    # convert to absolute path if needed
935    if not os.path.isabs(filepath):
936        filepath = os.path.abspath(filepath)
937    # cannot write to directories
938    if os.path.isdir(filepath):
939        return False
940    # if the file exists and is writable, we're fine
941    if os.path.exists(filepath):
942        return True if os.access(filepath, os.W_OK) else False
943    # if the path doesn't exist, isolate its directory component
944    # and ensure that directory is writable instead
945    return os.access(os.path.dirname(filepath), os.W_OK)
946
947
948# Modified from Public Domain code, by Magnus Lie Hetland
949# from http://hetland.org/coding/python/levenshtein.py
950def levenshtein(a, b, max_dist=float("inf")):
951    """Calculates the Levenshtein distance between a and b."""
952    n, m = len(a), len(b)
953    if abs(n - m) > max_dist:
954        return float("inf")
955    if n > m:
956        # Make sure n <= m, to use O(min(n,m)) space
957        a, b = b, a
958        n, m = m, n
959    current = range(n + 1)
960    for i in range(1, m + 1):
961        previous, current = current, [i] + [0] * n
962        for j in range(1, n + 1):
963            add, delete = previous[j] + 1, current[j - 1] + 1
964            change = previous[j - 1]
965            if a[j - 1] != b[i - 1]:
966                change = change + 1
967            current[j] = min(add, delete, change)
968    return current[n]
969
970
971def suggestion_sort_helper(x, y):
972    """Returns a score (lower is better) for x based on how similar
973    it is to y.  Used to rank suggestions."""
974    x = x.lower()
975    y = y.lower()
976    lendiff = len(x) + len(y)
977    inx = len([i for i in x if i not in y])
978    iny = len([i for i in y if i not in x])
979    return lendiff + inx + iny
980
981
982def escape_windows_cmd_string(s):
983    """Returns a string that is usable by the Windows cmd.exe.
984    The escaping is based on details here and empirical testing:
985    http://www.robvanderwoude.com/escapechars.php
986    """
987    for c in '^()%!<>&|"':
988        s = s.replace(c, "^" + c)
989    return s
990
991
992def argvquote(arg, force=False):
993    """ Returns an argument quoted in such a way that that CommandLineToArgvW
994    on Windows will return the argument string unchanged.
995    This is the same thing Popen does when supplied with an list of arguments.
996    Arguments in a command line should be separated by spaces; this
997    function does not add these spaces. This implementation follows the
998    suggestions outlined here:
999    https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
1000    """
1001    if not force and len(arg) != 0 and not any([c in arg for c in ' \t\n\v"']):
1002        return arg
1003    else:
1004        n_backslashes = 0
1005        cmdline = '"'
1006        for c in arg:
1007            if c == "\\":
1008                # first count the number of current backslashes
1009                n_backslashes += 1
1010                continue
1011            if c == '"':
1012                # Escape all backslashes and the following double quotation mark
1013                cmdline += (n_backslashes * 2 + 1) * "\\"
1014            else:
1015                # backslashes are not special here
1016                cmdline += n_backslashes * "\\"
1017            n_backslashes = 0
1018            cmdline += c
1019        # Escape all backslashes, but let the terminating
1020        # double quotation mark we add below be interpreted
1021        # as a metacharacter
1022        cmdline += +n_backslashes * 2 * "\\" + '"'
1023        return cmdline
1024
1025
1026def on_main_thread():
1027    """Checks if we are on the main thread or not."""
1028    return threading.current_thread() is threading.main_thread()
1029
1030
1031_DEFAULT_SENTINEL = object()
1032
1033
1034@contextlib.contextmanager
1035def swap(namespace, name, value, default=_DEFAULT_SENTINEL):
1036    """Swaps a current variable name in a namespace for another value, and then
1037    replaces it when the context is exited.
1038    """
1039    old = getattr(namespace, name, default)
1040    setattr(namespace, name, value)
1041    yield value
1042    if old is default:
1043        delattr(namespace, name)
1044    else:
1045        setattr(namespace, name, old)
1046
1047
1048@contextlib.contextmanager
1049def swap_values(d, updates, default=_DEFAULT_SENTINEL):
1050    """Updates a dictionary (or other mapping) with values from another mapping,
1051    and then restores the original mapping when the context is exited.
1052    """
1053    old = {k: d.get(k, default) for k in updates}
1054    d.update(updates)
1055    yield
1056    for k, v in old.items():
1057        if v is default and k in d:
1058            del d[k]
1059        else:
1060            d[k] = v
1061
1062
1063#
1064# Validators and converters
1065#
1066
1067
1068def is_int(x):
1069    """Tests if something is an integer"""
1070    return isinstance(x, int)
1071
1072
1073def is_float(x):
1074    """Tests if something is a float"""
1075    return isinstance(x, float)
1076
1077
1078def is_string(x):
1079    """Tests if something is a string"""
1080    return isinstance(x, str)
1081
1082
1083def is_slice(x):
1084    """Tests if something is a slice"""
1085    return isinstance(x, slice)
1086
1087
1088def is_callable(x):
1089    """Tests if something is callable"""
1090    return callable(x)
1091
1092
1093def is_string_or_callable(x):
1094    """Tests if something is a string or callable"""
1095    return is_string(x) or is_callable(x)
1096
1097
1098def is_class(x):
1099    """Tests if something is a class"""
1100    return isinstance(x, type)
1101
1102
1103def always_true(x):
1104    """Returns True"""
1105    return True
1106
1107
1108def always_false(x):
1109    """Returns False"""
1110    return False
1111
1112
1113def ensure_string(x):
1114    """Returns a string if x is not a string, and x if it already is."""
1115    return str(x)
1116
1117
1118def is_env_path(x):
1119    """This tests if something is an environment path, ie a list of strings."""
1120    return isinstance(x, EnvPath)
1121
1122
1123def str_to_env_path(x):
1124    """Converts a string to an environment path, ie a list of strings,
1125    splitting on the OS separator.
1126    """
1127    # splitting will be done implicitly in EnvPath's __init__
1128    return EnvPath(x)
1129
1130
1131def env_path_to_str(x):
1132    """Converts an environment path to a string by joining on the OS
1133    separator.
1134    """
1135    return os.pathsep.join(x)
1136
1137
1138def is_bool(x):
1139    """Tests if something is a boolean."""
1140    return isinstance(x, bool)
1141
1142
1143def is_logfile_opt(x):
1144    """
1145    Checks if x is a valid $XONSH_TRACEBACK_LOGFILE option. Returns False
1146    if x is not a writable/creatable file or an empty string or None.
1147    """
1148    if x is None:
1149        return True
1150    if not isinstance(x, str):
1151        return False
1152    else:
1153        return is_writable_file(x) or x == ""
1154
1155
1156def to_logfile_opt(x):
1157    """
1158    Converts a $XONSH_TRACEBACK_LOGFILE option to either a str containing
1159    the filepath if it is a writable file or None if the filepath is not
1160    valid, informing the user on stderr about the invalid choice.
1161    """
1162    if is_logfile_opt(x):
1163        return x
1164    else:
1165        # if option is not valid, return a proper
1166        # option and inform the user on stderr
1167        sys.stderr.write(
1168            "xonsh: $XONSH_TRACEBACK_LOGFILE must be a "
1169            "filepath pointing to a file that either exists "
1170            "and is writable or that can be created.\n"
1171        )
1172        return None
1173
1174
1175def logfile_opt_to_str(x):
1176    """
1177    Detypes a $XONSH_TRACEBACK_LOGFILE option.
1178    """
1179    if x is None:
1180        # None should not be detyped to 'None', as 'None' constitutes
1181        # a perfectly valid filename and retyping it would introduce
1182        # ambiguity. Detype to the empty string instead.
1183        return ""
1184    return str(x)
1185
1186
1187_FALSES = LazyObject(
1188    lambda: frozenset(["", "0", "n", "f", "no", "none", "false"]), globals(), "_FALSES"
1189)
1190
1191
1192def to_bool(x):
1193    """"Converts to a boolean in a semantically meaningful way."""
1194    if isinstance(x, bool):
1195        return x
1196    elif isinstance(x, str):
1197        return False if x.lower() in _FALSES else True
1198    else:
1199        return bool(x)
1200
1201
1202def to_itself(x):
1203    """No conversion, returns itself."""
1204    return x
1205
1206
1207def bool_to_str(x):
1208    """Converts a bool to an empty string if False and the string '1' if
1209    True.
1210    """
1211    return "1" if x else ""
1212
1213
1214_BREAKS = LazyObject(
1215    lambda: frozenset(["b", "break", "s", "skip", "q", "quit"]), globals(), "_BREAKS"
1216)
1217
1218
1219def to_bool_or_break(x):
1220    if isinstance(x, str) and x.lower() in _BREAKS:
1221        return "break"
1222    else:
1223        return to_bool(x)
1224
1225
1226def is_bool_or_int(x):
1227    """Returns whether a value is a boolean or integer."""
1228    return is_bool(x) or is_int(x)
1229
1230
1231def to_bool_or_int(x):
1232    """Converts a value to a boolean or an integer."""
1233    if isinstance(x, str):
1234        return int(x) if x.isdigit() else to_bool(x)
1235    elif is_int(x):  # bools are ints too!
1236        return x
1237    else:
1238        return bool(x)
1239
1240
1241def bool_or_int_to_str(x):
1242    """Converts a boolean or integer to a string."""
1243    return bool_to_str(x) if is_bool(x) else str(x)
1244
1245
1246@lazyobject
1247def SLICE_REG():
1248    return re.compile(
1249        r"(?P<start>(?:-\d)?\d*):(?P<end>(?:-\d)?\d*):?(?P<step>(?:-\d)?\d*)"
1250    )
1251
1252
1253def ensure_slice(x):
1254    """Try to convert an object into a slice, complain on failure"""
1255    if not x and x != 0:
1256        return slice(None)
1257    elif is_slice(x):
1258        return x
1259    try:
1260        x = int(x)
1261        if x != -1:
1262            s = slice(x, x + 1)
1263        else:
1264            s = slice(-1, None, None)
1265    except ValueError:
1266        x = x.strip("[]()")
1267        m = SLICE_REG.fullmatch(x)
1268        if m:
1269            groups = (int(i) if i else None for i in m.groups())
1270            s = slice(*groups)
1271        else:
1272            raise ValueError("cannot convert {!r} to slice".format(x))
1273    except TypeError:
1274        try:
1275            s = slice(*(int(i) for i in x))
1276        except (TypeError, ValueError):
1277            raise ValueError("cannot convert {!r} to slice".format(x))
1278    return s
1279
1280
1281def get_portions(it, slices):
1282    """Yield from portions of an iterable.
1283
1284    Parameters
1285    ----------
1286    it: iterable
1287    slices: a slice or a list of slice objects
1288    """
1289    if is_slice(slices):
1290        slices = [slices]
1291    if len(slices) == 1:
1292        s = slices[0]
1293        try:
1294            yield from itertools.islice(it, s.start, s.stop, s.step)
1295            return
1296        except ValueError:  # islice failed
1297            pass
1298    it = list(it)
1299    for s in slices:
1300        yield from it[s]
1301
1302
1303def is_slice_as_str(x):
1304    """
1305    Test if string x is a slice. If not a string return False.
1306    """
1307    try:
1308        x = x.strip("[]()")
1309        m = SLICE_REG.fullmatch(x)
1310        if m:
1311            return True
1312    except AttributeError:
1313        pass
1314    return False
1315
1316
1317def is_int_as_str(x):
1318    """
1319    Test if string x is an integer. If not a string return False.
1320    """
1321    try:
1322        return x.isdecimal()
1323    except AttributeError:
1324        return False
1325
1326
1327def is_string_set(x):
1328    """Tests if something is a set of strings"""
1329    return isinstance(x, cabc.Set) and all(isinstance(a, str) for a in x)
1330
1331
1332def csv_to_set(x):
1333    """Convert a comma-separated list of strings to a set of strings."""
1334    if not x:
1335        return set()
1336    else:
1337        return set(x.split(","))
1338
1339
1340def set_to_csv(x):
1341    """Convert a set of strings to a comma-separated list of strings."""
1342    return ",".join(x)
1343
1344
1345def pathsep_to_set(x):
1346    """Converts a os.pathsep separated string to a set of strings."""
1347    if not x:
1348        return set()
1349    else:
1350        return set(x.split(os.pathsep))
1351
1352
1353def set_to_pathsep(x, sort=False):
1354    """Converts a set to an os.pathsep separated string. The sort kwarg
1355    specifies whether to sort the set prior to str conversion.
1356    """
1357    if sort:
1358        x = sorted(x)
1359    return os.pathsep.join(x)
1360
1361
1362def is_string_seq(x):
1363    """Tests if something is a sequence of strings"""
1364    return isinstance(x, cabc.Sequence) and all(isinstance(a, str) for a in x)
1365
1366
1367def is_nonstring_seq_of_strings(x):
1368    """Tests if something is a sequence of strings, where the top-level
1369    sequence is not a string itself.
1370    """
1371    return (
1372        isinstance(x, cabc.Sequence)
1373        and not isinstance(x, str)
1374        and all(isinstance(a, str) for a in x)
1375    )
1376
1377
1378def pathsep_to_seq(x):
1379    """Converts a os.pathsep separated string to a sequence of strings."""
1380    if not x:
1381        return []
1382    else:
1383        return x.split(os.pathsep)
1384
1385
1386def seq_to_pathsep(x):
1387    """Converts a sequence to an os.pathsep separated string."""
1388    return os.pathsep.join(x)
1389
1390
1391def pathsep_to_upper_seq(x):
1392    """Converts a os.pathsep separated string to a sequence of
1393    uppercase strings.
1394    """
1395    if not x:
1396        return []
1397    else:
1398        return x.upper().split(os.pathsep)
1399
1400
1401def seq_to_upper_pathsep(x):
1402    """Converts a sequence to an uppercase os.pathsep separated string."""
1403    return os.pathsep.join(x).upper()
1404
1405
1406def is_bool_seq(x):
1407    """Tests if an object is a sequence of bools."""
1408    return isinstance(x, cabc.Sequence) and all(isinstance(y, bool) for y in x)
1409
1410
1411def csv_to_bool_seq(x):
1412    """Takes a comma-separated string and converts it into a list of bools."""
1413    return [to_bool(y) for y in csv_to_set(x)]
1414
1415
1416def bool_seq_to_csv(x):
1417    """Converts a sequence of bools to a comma-separated string."""
1418    return ",".join(map(str, x))
1419
1420
1421def ptk2_color_depth_setter(x):
1422    """ Setter function for $PROMPT_TOOLKIT_COLOR_DEPTH. Also
1423        updates os.environ so prompt toolkit can pickup the value.
1424    """
1425    x = str(x)
1426    if x in {
1427        "DEPTH_1_BIT",
1428        "MONOCHROME",
1429        "DEPTH_4_BIT",
1430        "ANSI_COLORS_ONLY",
1431        "DEPTH_8_BIT",
1432        "DEFAULT",
1433        "DEPTH_24_BIT",
1434        "TRUE_COLOR",
1435    }:
1436        pass
1437    elif x in {"", None}:
1438        x = ""
1439    else:
1440        msg = '"{}" is not a valid value for $PROMPT_TOOLKIT_COLOR_DEPTH. '.format(x)
1441        warnings.warn(msg, RuntimeWarning)
1442        x = ""
1443    if x == "" and "PROMPT_TOOLKIT_COLOR_DEPTH" in os_environ:
1444        del os_environ["PROMPT_TOOLKIT_COLOR_DEPTH"]
1445    else:
1446        os_environ["PROMPT_TOOLKIT_COLOR_DEPTH"] = x
1447    return x
1448
1449
1450def is_completions_display_value(x):
1451    return x in {"none", "single", "multi"}
1452
1453
1454def to_completions_display_value(x):
1455    x = str(x).lower()
1456    if x in {"none", "false"}:
1457        x = "none"
1458    elif x in {"multi", "true"}:
1459        x = "multi"
1460    elif x in {"single", "readline"}:
1461        pass
1462    else:
1463        msg = '"{}" is not a valid value for $COMPLETIONS_DISPLAY. '.format(x)
1464        msg += 'Using "multi".'
1465        warnings.warn(msg, RuntimeWarning)
1466        x = "multi"
1467    return x
1468
1469
1470def setup_win_unicode_console(enable):
1471    """"Enables or disables unicode display on windows."""
1472    try:
1473        import win_unicode_console
1474    except ImportError:
1475        win_unicode_console = False
1476    enable = to_bool(enable)
1477    if ON_WINDOWS and win_unicode_console:
1478        if enable:
1479            win_unicode_console.enable()
1480        else:
1481            win_unicode_console.disable()
1482    return enable
1483
1484
1485# history validation
1486
1487_min_to_sec = lambda x: 60.0 * float(x)
1488_hour_to_sec = lambda x: 60.0 * _min_to_sec(x)
1489_day_to_sec = lambda x: 24.0 * _hour_to_sec(x)
1490_month_to_sec = lambda x: 30.4375 * _day_to_sec(x)
1491_year_to_sec = lambda x: 365.25 * _day_to_sec(x)
1492_kb_to_b = lambda x: 1024 * int(x)
1493_mb_to_b = lambda x: 1024 * _kb_to_b(x)
1494_gb_to_b = lambda x: 1024 * _mb_to_b(x)
1495_tb_to_b = lambda x: 1024 * _tb_to_b(x)
1496
1497CANON_HISTORY_UNITS = LazyObject(
1498    lambda: frozenset(["commands", "files", "s", "b"]), globals(), "CANON_HISTORY_UNITS"
1499)
1500
1501HISTORY_UNITS = LazyObject(
1502    lambda: {
1503        "": ("commands", int),
1504        "c": ("commands", int),
1505        "cmd": ("commands", int),
1506        "cmds": ("commands", int),
1507        "command": ("commands", int),
1508        "commands": ("commands", int),
1509        "f": ("files", int),
1510        "files": ("files", int),
1511        "s": ("s", float),
1512        "sec": ("s", float),
1513        "second": ("s", float),
1514        "seconds": ("s", float),
1515        "m": ("s", _min_to_sec),
1516        "min": ("s", _min_to_sec),
1517        "mins": ("s", _min_to_sec),
1518        "h": ("s", _hour_to_sec),
1519        "hr": ("s", _hour_to_sec),
1520        "hour": ("s", _hour_to_sec),
1521        "hours": ("s", _hour_to_sec),
1522        "d": ("s", _day_to_sec),
1523        "day": ("s", _day_to_sec),
1524        "days": ("s", _day_to_sec),
1525        "mon": ("s", _month_to_sec),
1526        "month": ("s", _month_to_sec),
1527        "months": ("s", _month_to_sec),
1528        "y": ("s", _year_to_sec),
1529        "yr": ("s", _year_to_sec),
1530        "yrs": ("s", _year_to_sec),
1531        "year": ("s", _year_to_sec),
1532        "years": ("s", _year_to_sec),
1533        "b": ("b", int),
1534        "byte": ("b", int),
1535        "bytes": ("b", int),
1536        "kb": ("b", _kb_to_b),
1537        "kilobyte": ("b", _kb_to_b),
1538        "kilobytes": ("b", _kb_to_b),
1539        "mb": ("b", _mb_to_b),
1540        "meg": ("b", _mb_to_b),
1541        "megs": ("b", _mb_to_b),
1542        "megabyte": ("b", _mb_to_b),
1543        "megabytes": ("b", _mb_to_b),
1544        "gb": ("b", _gb_to_b),
1545        "gig": ("b", _gb_to_b),
1546        "gigs": ("b", _gb_to_b),
1547        "gigabyte": ("b", _gb_to_b),
1548        "gigabytes": ("b", _gb_to_b),
1549        "tb": ("b", _tb_to_b),
1550        "terabyte": ("b", _tb_to_b),
1551        "terabytes": ("b", _tb_to_b),
1552    },
1553    globals(),
1554    "HISTORY_UNITS",
1555)
1556"""Maps lowercase unit names to canonical name and conversion utilities."""
1557
1558
1559def is_history_tuple(x):
1560    """Tests if something is a proper history value, units tuple."""
1561    if (
1562        isinstance(x, cabc.Sequence)
1563        and len(x) == 2
1564        and isinstance(x[0], (int, float))
1565        and x[1].lower() in CANON_HISTORY_UNITS
1566    ):
1567        return True
1568    return False
1569
1570
1571def is_history_backend(x):
1572    """Tests if something is a valid history backend."""
1573    return is_string(x) or is_class(x) or isinstance(x, object)
1574
1575
1576def is_dynamic_cwd_width(x):
1577    """ Determine if the input is a valid input for the DYNAMIC_CWD_WIDTH
1578    environment variable.
1579    """
1580    return (
1581        isinstance(x, tuple)
1582        and len(x) == 2
1583        and isinstance(x[0], float)
1584        and x[1] in set("c%")
1585    )
1586
1587
1588def to_dynamic_cwd_tuple(x):
1589    """Convert to a canonical cwd_width tuple."""
1590    unit = "c"
1591    if isinstance(x, str):
1592        if x[-1] == "%":
1593            x = x[:-1]
1594            unit = "%"
1595        else:
1596            unit = "c"
1597        return (float(x), unit)
1598    else:
1599        return (float(x[0]), x[1])
1600
1601
1602def dynamic_cwd_tuple_to_str(x):
1603    """Convert a canonical cwd_width tuple to a string."""
1604    if x[1] == "%":
1605        return str(x[0]) + "%"
1606    else:
1607        return str(x[0])
1608
1609
1610RE_HISTORY_TUPLE = LazyObject(
1611    lambda: re.compile("([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)\s*([A-Za-z]*)"),
1612    globals(),
1613    "RE_HISTORY_TUPLE",
1614)
1615
1616
1617def to_history_tuple(x):
1618    """Converts to a canonical history tuple."""
1619    if not isinstance(x, (cabc.Sequence, float, int)):
1620        raise ValueError("history size must be given as a sequence or number")
1621    if isinstance(x, str):
1622        m = RE_HISTORY_TUPLE.match(x.strip().lower())
1623        return to_history_tuple((m.group(1), m.group(3)))
1624    elif isinstance(x, (float, int)):
1625        return to_history_tuple((x, "commands"))
1626    units, converter = HISTORY_UNITS[x[1]]
1627    value = converter(x[0])
1628    return (value, units)
1629
1630
1631def history_tuple_to_str(x):
1632    """Converts a valid history tuple to a canonical string."""
1633    return "{0} {1}".format(*x)
1634
1635
1636def format_color(string, **kwargs):
1637    """Formats strings that may contain colors. This simply dispatches to the
1638    shell instances method of the same name. The results of this function should
1639    be directly usable by print_color().
1640    """
1641    return builtins.__xonsh_shell__.shell.format_color(string, **kwargs)
1642
1643
1644def print_color(string, **kwargs):
1645    """Prints a string that may contain colors. This dispatched to the shell
1646    method of the same name. Colors will be formatted if they have not already
1647    been.
1648    """
1649    builtins.__xonsh_shell__.shell.print_color(string, **kwargs)
1650
1651
1652def color_style_names():
1653    """Returns an iterable of all available style names."""
1654    return builtins.__xonsh_shell__.shell.color_style_names()
1655
1656
1657def color_style():
1658    """Returns the current color map."""
1659    return builtins.__xonsh_shell__.shell.color_style()
1660
1661
1662def _token_attr_from_stylemap(stylemap):
1663    """yields tokens attr, and index from a stylemap """
1664    import prompt_toolkit as ptk
1665
1666    if builtins.__xonsh_shell__.shell_type == "prompt_toolkit1":
1667        style = ptk.styles.style_from_dict(stylemap)
1668        for token in stylemap:
1669            yield token, style.token_to_attrs[token]
1670    else:
1671        style = ptk.styles.style_from_pygments_dict(stylemap)
1672        for token in stylemap:
1673            style_str = "class:{}".format(
1674                ptk.styles.pygments.pygments_token_to_classname(token)
1675            )
1676            yield (token, style.get_attrs_for_style_str(style_str))
1677
1678
1679def _get_color_lookup_table():
1680    """Returns the prompt_toolkit win32 ColorLookupTable """
1681    if builtins.__xonsh_shell__.shell_type == "prompt_toolkit1":
1682        from prompt_toolkit.terminal.win32_output import ColorLookupTable
1683    else:
1684        from prompt_toolkit.output.win32 import ColorLookupTable
1685    return ColorLookupTable()
1686
1687
1688def _get_color_indexes(style_map):
1689    """Generates the color and windows color index for a style """
1690    table = _get_color_lookup_table()
1691    for token, attr in _token_attr_from_stylemap(style_map):
1692        if attr.color:
1693            index = table.lookup_fg_color(attr.color)
1694            try:
1695                rgb = (
1696                    int(attr.color[0:2], 16),
1697                    int(attr.color[2:4], 16),
1698                    int(attr.color[4:6], 16),
1699                )
1700            except Exception:
1701                rgb = None
1702            yield token, index, rgb
1703
1704
1705# Map of new PTK2 color names to PTK1 variants
1706PTK_NEW_OLD_COLOR_MAP = LazyObject(
1707    lambda: {
1708        "black": "black",
1709        "red": "darkred",
1710        "green": "darkgreen",
1711        "yellow": "brown",
1712        "blue": "darkblue",
1713        "magenta": "purple",
1714        "cyan": "teal",
1715        "gray": "lightgray",
1716        "brightblack": "darkgray",
1717        "brightred": "red",
1718        "brightgreen": "green",
1719        "brightyellow": "yellow",
1720        "brightblue": "blue",
1721        "brightmagenta": "fuchsia",
1722        "brightcyan": "turquoise",
1723        "white": "white",
1724    },
1725    globals(),
1726    "PTK_NEW_OLD_COLOR_MAP",
1727)
1728
1729# Map of new ansicolor names to old PTK1 names
1730ANSICOLOR_NAMES_MAP = LazyObject(
1731    lambda: {"ansi" + k: "#ansi" + v for k, v in PTK_NEW_OLD_COLOR_MAP.items()},
1732    globals(),
1733    "ANSICOLOR_NAMES_MAP",
1734)
1735
1736
1737def _win10_color_map():
1738    cmap = {
1739        "ansiblack": (12, 12, 12),
1740        "ansiblue": (0, 55, 218),
1741        "ansigreen": (19, 161, 14),
1742        "ansicyan": (58, 150, 221),
1743        "ansired": (197, 15, 31),
1744        "ansimagenta": (136, 23, 152),
1745        "ansiyellow": (193, 156, 0),
1746        "ansigray": (204, 204, 204),
1747        "ansibrightblack": (118, 118, 118),
1748        "ansibrightblue": (59, 120, 255),
1749        "ansibrightgreen": (22, 198, 12),
1750        "ansibrightcyan": (97, 214, 214),
1751        "ansibrightred": (231, 72, 86),
1752        "ansibrightmagenta": (180, 0, 158),
1753        "ansibrightyellow": (249, 241, 165),
1754        "ansiwhite": (242, 242, 242),
1755    }
1756    return {
1757        k: "#{0:02x}{1:02x}{2:02x}".format(r, g, b) for k, (r, g, b) in cmap.items()
1758    }
1759
1760
1761WIN10_COLOR_MAP = LazyObject(_win10_color_map, globals(), "WIN10_COLOR_MAP")
1762
1763
1764def _win_bold_color_map():
1765    """ Map dark ansi colors to lighter version. """
1766    return {
1767        "ansiblack": "ansibrightblack",
1768        "ansiblue": "ansibrightblue",
1769        "ansigreen": "ansibrightgreen",
1770        "ansicyan": "ansibrightcyan",
1771        "ansired": "ansibrightred",
1772        "ansimagenta": "ansibrightmagenta",
1773        "ansiyellow": "ansibrightyellow",
1774        "ansigray": "ansiwhite",
1775    }
1776
1777
1778WIN_BOLD_COLOR_MAP = LazyObject(_win_bold_color_map, globals(), "WIN_BOLD_COLOR_MAP")
1779
1780
1781def hardcode_colors_for_win10(style_map):
1782    """Replace all ansi colors with hardcoded colors to avoid unreadable defaults
1783       in conhost.exe
1784    """
1785    modified_style = {}
1786    if not builtins.__xonsh_env__["PROMPT_TOOLKIT_COLOR_DEPTH"]:
1787        builtins.__xonsh_env__["PROMPT_TOOLKIT_COLOR_DEPTH"] = "DEPTH_24_BIT"
1788    # Replace all ansi colors with hardcoded colors to avoid unreadable defaults
1789    # in conhost.exe
1790    for token, style_str in style_map.items():
1791        for ansicolor in WIN10_COLOR_MAP:
1792            if ansicolor in style_str:
1793                if "bold" in style_str and "nobold" not in style_str:
1794                    # Win10  doesn't yet handle bold colors. Instead dark
1795                    # colors are mapped to their lighter version. We simulate
1796                    # the same here.
1797                    style_str.replace("bold", "")
1798                    hexcolor = WIN10_COLOR_MAP[
1799                        WIN_BOLD_COLOR_MAP.get(ansicolor, ansicolor)
1800                    ]
1801                else:
1802                    hexcolor = WIN10_COLOR_MAP[ansicolor]
1803                style_str = style_str.replace(ansicolor, hexcolor)
1804        modified_style[token] = style_str
1805    return modified_style
1806
1807
1808def ansicolors_to_ptk1_names(stylemap):
1809    """Converts ansicolor names in a stylemap to old PTK1 color names
1810    """
1811    modified_stylemap = {}
1812    for token, style_str in stylemap.items():
1813        for color, ptk1_color in ANSICOLOR_NAMES_MAP.items():
1814            if "#" + color not in style_str:
1815                style_str = style_str.replace(color, ptk1_color)
1816        modified_stylemap[token] = style_str
1817    return modified_stylemap
1818
1819
1820def intensify_colors_for_cmd_exe(style_map):
1821    """Returns a modified style to where colors that maps to dark
1822       colors are replaced with brighter versions.
1823    """
1824    modified_style = {}
1825    replace_colors = {
1826        1: "ansibrightcyan",  # subst blue with bright cyan
1827        2: "ansibrightgreen",  # subst green with bright green
1828        4: "ansibrightred",  # subst red with bright red
1829        5: "ansibrightmagenta",  # subst magenta with bright magenta
1830        6: "ansibrightyellow",  # subst yellow with bright yellow
1831        9: "ansicyan",  # subst intense blue with dark cyan (more readable)
1832    }
1833    if builtins.__xonsh_shell__.shell_type == "prompt_toolkit1":
1834        replace_colors = ansicolors_to_ptk1_names(replace_colors)
1835    for token, idx, _ in _get_color_indexes(style_map):
1836        if idx in replace_colors:
1837            modified_style[token] = replace_colors[idx]
1838    return modified_style
1839
1840
1841def intensify_colors_on_win_setter(enable):
1842    """Resets the style when setting the INTENSIFY_COLORS_ON_WIN
1843    environment variable.
1844    """
1845    enable = to_bool(enable)
1846    if hasattr(builtins, "__xonsh_shell__"):
1847        builtins.__xonsh_shell__.shell.styler.trap.clear()
1848        if hasattr(builtins.__xonsh_shell__.shell.styler, "style_name"):
1849            delattr(builtins.__xonsh_shell__.shell.styler, "style_name")
1850    return enable
1851
1852
1853def format_std_prepost(template, env=None):
1854    """Formats a template prefix/postfix string for a standard buffer.
1855    Returns a string suitable for prepending or appending.
1856    """
1857    if not template:
1858        return ""
1859    env = builtins.__xonsh_env__ if env is None else env
1860    shell = builtins.__xonsh_shell__.shell
1861    try:
1862        s = shell.prompt_formatter(template)
1863    except Exception:
1864        print_exception()
1865    # \001\002 is there to fool pygments into not returning an empty string
1866    # for potentially empty input. This happens when the template is just a
1867    # color code with no visible text.
1868    invis = "\001\002"
1869    s = shell.format_color(invis + s + invis, force_string=True)
1870    s = s.replace(invis, "")
1871    return s
1872
1873
1874_RE_STRING_START = "[bBprRuUf]*"
1875_RE_STRING_TRIPLE_DOUBLE = '"""'
1876_RE_STRING_TRIPLE_SINGLE = "'''"
1877_RE_STRING_DOUBLE = '"'
1878_RE_STRING_SINGLE = "'"
1879_STRINGS = (
1880    _RE_STRING_TRIPLE_DOUBLE,
1881    _RE_STRING_TRIPLE_SINGLE,
1882    _RE_STRING_DOUBLE,
1883    _RE_STRING_SINGLE,
1884)
1885RE_BEGIN_STRING = LazyObject(
1886    lambda: re.compile("(" + _RE_STRING_START + "(" + "|".join(_STRINGS) + "))"),
1887    globals(),
1888    "RE_BEGIN_STRING",
1889)
1890"""Regular expression matching the start of a string, including quotes and
1891leading characters (r, b, or u)"""
1892
1893RE_STRING_START = LazyObject(
1894    lambda: re.compile(_RE_STRING_START), globals(), "RE_STRING_START"
1895)
1896"""Regular expression matching the characters before the quotes when starting a
1897string (r, b, or u, case insensitive)"""
1898
1899RE_STRING_CONT = LazyDict(
1900    {
1901        '"': lambda: re.compile(r'((\\(.|\n))|([^"\\]))*'),
1902        "'": lambda: re.compile(r"((\\(.|\n))|([^'\\]))*"),
1903        '"""': lambda: re.compile(r'((\\(.|\n))|([^"\\])|("(?!""))|\n)*'),
1904        "'''": lambda: re.compile(r"((\\(.|\n))|([^'\\])|('(?!''))|\n)*"),
1905    },
1906    globals(),
1907    "RE_STRING_CONT",
1908)
1909"""Dictionary mapping starting quote sequences to regular expressions that
1910match the contents of a string beginning with those quotes (not including the
1911terminating quotes)"""
1912
1913
1914@lazyobject
1915def RE_COMPLETE_STRING():
1916    ptrn = (
1917        "^"
1918        + _RE_STRING_START
1919        + "(?P<quote>"
1920        + "|".join(_STRINGS)
1921        + ")"
1922        + ".*?(?P=quote)$"
1923    )
1924    return re.compile(ptrn, re.DOTALL)
1925
1926
1927def check_for_partial_string(x):
1928    """Returns the starting index (inclusive), ending index (exclusive), and
1929    starting quote string of the most recent Python string found in the input.
1930
1931    check_for_partial_string(x) -> (startix, endix, quote)
1932
1933    Parameters
1934    ----------
1935    x : str
1936        The string to be checked (representing a line of terminal input)
1937
1938    Returns
1939    -------
1940    startix : int (or None)
1941        The index where the most recent Python string found started
1942        (inclusive), or None if no strings exist in the input
1943
1944    endix : int (or None)
1945        The index where the most recent Python string found ended (exclusive),
1946        or None if no strings exist in the input OR if the input ended in the
1947        middle of a Python string
1948
1949    quote : str (or None)
1950        A string containing the quote used to start the string (e.g., b", ",
1951        '''), or None if no string was found.
1952    """
1953    string_indices = []
1954    starting_quote = []
1955    current_index = 0
1956    match = re.search(RE_BEGIN_STRING, x)
1957    while match is not None:
1958        # add the start in
1959        start = match.start()
1960        quote = match.group(0)
1961        lenquote = len(quote)
1962        current_index += start
1963        # store the starting index of the string, as well as the
1964        # characters in the starting quotes (e.g., ", ', """, r", etc)
1965        string_indices.append(current_index)
1966        starting_quote.append(quote)
1967        # determine the string that should terminate this string
1968        ender = re.sub(RE_STRING_START, "", quote)
1969        x = x[start + lenquote :]
1970        current_index += lenquote
1971        # figure out what is inside the string
1972        continuer = RE_STRING_CONT[ender]
1973        contents = re.match(continuer, x)
1974        inside = contents.group(0)
1975        leninside = len(inside)
1976        current_index += contents.start() + leninside + len(ender)
1977        # if we are not at the end of the input string, add the ending index of
1978        # the string to string_indices
1979        if contents.end() < len(x):
1980            string_indices.append(current_index)
1981        x = x[leninside + len(ender) :]
1982        # find the next match
1983        match = re.search(RE_BEGIN_STRING, x)
1984    numquotes = len(string_indices)
1985    if numquotes == 0:
1986        return (None, None, None)
1987    elif numquotes % 2:
1988        return (string_indices[-1], None, starting_quote[-1])
1989    else:
1990        return (string_indices[-2], string_indices[-1], starting_quote[-1])
1991
1992
1993# regular expressions for matching environment variables
1994# i.e $FOO, ${'FOO'}
1995@lazyobject
1996def POSIX_ENVVAR_REGEX():
1997    pat = r"""\$({(?P<quote>['"])|)(?P<envvar>\w+)((?P=quote)}|(?:\1\b))"""
1998    return re.compile(pat)
1999
2000
2001def expandvars(path):
2002    """Expand shell variables of the forms $var, ${var} and %var%.
2003    Unknown variables are left unchanged."""
2004    env = builtins.__xonsh_env__
2005    if isinstance(path, bytes):
2006        path = path.decode(
2007            encoding=env.get("XONSH_ENCODING"), errors=env.get("XONSH_ENCODING_ERRORS")
2008        )
2009    elif isinstance(path, pathlib.Path):
2010        # get the path's string representation
2011        path = str(path)
2012    if "$" in path:
2013        for match in POSIX_ENVVAR_REGEX.finditer(path):
2014            name = match.group("envvar")
2015            if name in env:
2016                ensurer = env.get_ensurer(name)
2017                value = ensurer.detype(env[name])
2018                path = POSIX_ENVVAR_REGEX.sub(value, path, count=1)
2019    return path
2020
2021
2022#
2023# File handling tools
2024#
2025
2026
2027def backup_file(fname):
2028    """Moves an existing file to a new name that has the current time right
2029    before the extension.
2030    """
2031    # lazy imports
2032    import shutil
2033    from datetime import datetime
2034
2035    base, ext = os.path.splitext(fname)
2036    timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S-%f")
2037    newfname = "%s.%s%s" % (base, timestamp, ext)
2038    shutil.move(fname, newfname)
2039
2040
2041def normabspath(p):
2042    """Returns as normalized absolute path, namely, normcase(abspath(p))"""
2043    return os.path.normcase(os.path.abspath(p))
2044
2045
2046def expanduser_abs_path(inp):
2047    """ Provides user expanded absolute path """
2048    return os.path.abspath(expanduser(inp))
2049
2050
2051WINDOWS_DRIVE_MATCHER = LazyObject(
2052    lambda: re.compile(r"^\w:"), globals(), "WINDOWS_DRIVE_MATCHER"
2053)
2054
2055
2056def expand_case_matching(s):
2057    """Expands a string to a case insensitive globable string."""
2058    t = []
2059    openers = {"[", "{"}
2060    closers = {"]", "}"}
2061    nesting = 0
2062
2063    drive_part = WINDOWS_DRIVE_MATCHER.match(s) if ON_WINDOWS else None
2064
2065    if drive_part:
2066        drive_part = drive_part.group(0)
2067        t.append(drive_part)
2068        s = s[len(drive_part) :]
2069
2070    for c in s:
2071        if c in openers:
2072            nesting += 1
2073        elif c in closers:
2074            nesting -= 1
2075        elif nesting > 0:
2076            pass
2077        elif c.isalpha():
2078            folded = c.casefold()
2079            if len(folded) == 1:
2080                c = "[{0}{1}]".format(c.upper(), c.lower())
2081            else:
2082                newc = ["[{0}{1}]?".format(f.upper(), f.lower()) for f in folded[:-1]]
2083                newc = "".join(newc)
2084                newc += "[{0}{1}{2}]".format(folded[-1].upper(), folded[-1].lower(), c)
2085                c = newc
2086        t.append(c)
2087    return "".join(t)
2088
2089
2090def globpath(
2091    s, ignore_case=False, return_empty=False, sort_result=None, include_dotfiles=None
2092):
2093    """Simple wrapper around glob that also expands home and env vars."""
2094    o, s = _iglobpath(
2095        s,
2096        ignore_case=ignore_case,
2097        sort_result=sort_result,
2098        include_dotfiles=include_dotfiles,
2099    )
2100    o = list(o)
2101    no_match = [] if return_empty else [s]
2102    return o if len(o) != 0 else no_match
2103
2104
2105def _dotglobstr(s):
2106    modified = False
2107    dotted_s = s
2108    if "/*" in dotted_s:
2109        dotted_s = dotted_s.replace("/*", "/.*")
2110        dotted_s = dotted_s.replace("/.**/.*", "/**/.*")
2111        modified = True
2112    if dotted_s.startswith("*") and not dotted_s.startswith("**"):
2113        dotted_s = "." + dotted_s
2114        modified = True
2115    return dotted_s, modified
2116
2117
2118def _iglobpath(s, ignore_case=False, sort_result=None, include_dotfiles=None):
2119    s = builtins.__xonsh_expand_path__(s)
2120    if sort_result is None:
2121        sort_result = builtins.__xonsh_env__.get("GLOB_SORTED")
2122    if include_dotfiles is None:
2123        include_dotfiles = builtins.__xonsh_env__.get("DOTGLOB")
2124    if ignore_case:
2125        s = expand_case_matching(s)
2126    if sys.version_info > (3, 5):
2127        if "**" in s and "**/*" not in s:
2128            s = s.replace("**", "**/*")
2129        if include_dotfiles:
2130            dotted_s, dotmodified = _dotglobstr(s)
2131        # `recursive` is only a 3.5+ kwarg.
2132        if sort_result:
2133            paths = glob.glob(s, recursive=True)
2134            if include_dotfiles and dotmodified:
2135                paths.extend(glob.iglob(dotted_s, recursive=True))
2136            paths.sort()
2137            paths = iter(paths)
2138        else:
2139            paths = glob.iglob(s, recursive=True)
2140            if include_dotfiles and dotmodified:
2141                paths = itertools.chain(glob.iglob(dotted_s, recursive=True), paths)
2142        return paths, s
2143    else:
2144        if include_dotfiles:
2145            dotted_s, dotmodified = _dotglobstr(s)
2146        if sort_result:
2147            paths = glob.glob(s)
2148            if include_dotfiles and dotmodified:
2149                paths.extend(glob.iglob(dotted_s))
2150            paths.sort()
2151            paths = iter(paths)
2152        else:
2153            paths = glob.iglob(s)
2154            if include_dotfiles and dotmodified:
2155                paths = itertools.chain(glob.iglob(dotted_s), paths)
2156        return paths, s
2157
2158
2159def iglobpath(s, ignore_case=False, sort_result=None, include_dotfiles=None):
2160    """Simple wrapper around iglob that also expands home and env vars."""
2161    try:
2162        return _iglobpath(
2163            s,
2164            ignore_case=ignore_case,
2165            sort_result=sort_result,
2166            include_dotfiles=include_dotfiles,
2167        )[0]
2168    except IndexError:
2169        # something went wrong in the actual iglob() call
2170        return iter(())
2171
2172
2173def ensure_timestamp(t, datetime_format=None):
2174    if isinstance(t, (int, float)):
2175        return t
2176    try:
2177        return float(t)
2178    except (ValueError, TypeError):
2179        pass
2180    if datetime_format is None:
2181        datetime_format = builtins.__xonsh_env__["XONSH_DATETIME_FORMAT"]
2182    if isinstance(t, datetime.datetime):
2183        t = t.timestamp()
2184    else:
2185        t = datetime.datetime.strptime(t, datetime_format).timestamp()
2186    return t
2187
2188
2189def format_datetime(dt):
2190    """Format datetime object to string base on $XONSH_DATETIME_FORMAT Env."""
2191    format_ = builtins.__xonsh_env__["XONSH_DATETIME_FORMAT"]
2192    return dt.strftime(format_)
2193
2194
2195def columnize(elems, width=80, newline="\n"):
2196    """Takes an iterable of strings and returns a list of lines with the
2197    elements placed in columns. Each line will be at most *width* columns.
2198    The newline character will be appended to the end of each line.
2199    """
2200    sizes = [len(e) + 1 for e in elems]
2201    total = sum(sizes)
2202    nelem = len(elems)
2203    if total - 1 <= width:
2204        ncols = len(sizes)
2205        nrows = 1
2206        columns = [sizes]
2207        last_longest_row = total
2208        enter_loop = False
2209    else:
2210        ncols = 1
2211        nrows = len(sizes)
2212        columns = [sizes]
2213        last_longest_row = max(sizes)
2214        enter_loop = True
2215    while enter_loop:
2216        longest_row = sum(map(max, columns))
2217        if longest_row - 1 <= width:
2218            # we might be able to fit another column.
2219            ncols += 1
2220            nrows = nelem // ncols
2221            columns = [sizes[i * nrows : (i + 1) * nrows] for i in range(ncols)]
2222            last_longest_row = longest_row
2223        else:
2224            # we can't fit another column
2225            ncols -= 1
2226            nrows = nelem // ncols
2227            break
2228    pad = (width - last_longest_row + ncols) // ncols
2229    pad = pad if pad > 1 else 1
2230    data = [elems[i * nrows : (i + 1) * nrows] for i in range(ncols)]
2231    colwidths = [max(map(len, d)) + pad for d in data]
2232    colwidths[-1] -= pad
2233    row_t = "".join(["{{row[{i}]: <{{w[{i}]}}}}".format(i=i) for i in range(ncols)])
2234    row_t += newline
2235    lines = [
2236        row_t.format(row=row, w=colwidths)
2237        for row in itertools.zip_longest(*data, fillvalue="")
2238    ]
2239    return lines
2240
2241
2242def unthreadable(f):
2243    """Decorator that specifies that a callable alias should be run only
2244    on the main thread process. This is often needed for debuggers and
2245    profilers.
2246    """
2247    f.__xonsh_threadable__ = False
2248    return f
2249
2250
2251def uncapturable(f):
2252    """Decorator that specifies that a callable alias should not be run with
2253    any capturing. This is often needed if the alias call interactive
2254    subprocess, like pagers and text editors.
2255    """
2256    f.__xonsh_capturable__ = False
2257    return f
2258
2259
2260def carriage_return():
2261    """Writes a carriage return to stdout, and nothing else."""
2262    print("\r", flush=True, end="")
2263
2264
2265def deprecated(deprecated_in=None, removed_in=None):
2266    """Parametrized decorator that deprecates a function in a graceful manner.
2267
2268    Updates the decorated function's docstring to mention the version
2269    that deprecation occurred in and the version it will be removed
2270    in if both of these values are passed.
2271
2272    When removed_in is not a release equal to or less than the current
2273    release, call ``warnings.warn`` with details, while raising
2274    ``DeprecationWarning``.
2275
2276    When removed_in is a release equal to or less than the current release,
2277    raise an ``AssertionError``.
2278
2279    Parameters
2280    ----------
2281    deprecated_in : str
2282        The version number that deprecated this function.
2283    removed_in : str
2284        The version number that this function will be removed in.
2285    """
2286    message_suffix = _deprecated_message_suffix(deprecated_in, removed_in)
2287    if not message_suffix:
2288        message_suffix = ""
2289
2290    def decorated(func):
2291        warning_message = "{} has been deprecated".format(func.__name__)
2292        warning_message += message_suffix
2293
2294        @functools.wraps(func)
2295        def wrapped(*args, **kwargs):
2296            _deprecated_error_on_expiration(func.__name__, removed_in)
2297            func(*args, **kwargs)
2298            warnings.warn(warning_message, DeprecationWarning)
2299
2300        wrapped.__doc__ = (
2301            "{}\n\n{}".format(wrapped.__doc__, warning_message)
2302            if wrapped.__doc__
2303            else warning_message
2304        )
2305
2306        return wrapped
2307
2308    return decorated
2309
2310
2311def _deprecated_message_suffix(deprecated_in, removed_in):
2312    if deprecated_in and removed_in:
2313        message_suffix = " in version {} and will be removed in version {}".format(
2314            deprecated_in, removed_in
2315        )
2316    elif deprecated_in and not removed_in:
2317        message_suffix = " in version {}".format(deprecated_in)
2318    elif not deprecated_in and removed_in:
2319        message_suffix = " and will be removed in version {}".format(removed_in)
2320    else:
2321        message_suffix = None
2322
2323    return message_suffix
2324
2325
2326def _deprecated_error_on_expiration(name, removed_in):
2327    if not removed_in:
2328        return
2329    elif LooseVersion(__version__) >= LooseVersion(removed_in):
2330        raise AssertionError(
2331            "{} has passed its version {} expiry date!".format(name, removed_in)
2332        )
2333