1# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
2# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com> 2016
3# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
4
5from __future__ import absolute_import, division, print_function
6__metaclass__ = type
7
8FILE_ATTRIBUTES = {
9    'A': 'noatime',
10    'a': 'append',
11    'c': 'compressed',
12    'C': 'nocow',
13    'd': 'nodump',
14    'D': 'dirsync',
15    'e': 'extents',
16    'E': 'encrypted',
17    'h': 'blocksize',
18    'i': 'immutable',
19    'I': 'indexed',
20    'j': 'journalled',
21    'N': 'inline',
22    's': 'zero',
23    'S': 'synchronous',
24    't': 'notail',
25    'T': 'blockroot',
26    'u': 'undelete',
27    'X': 'compressedraw',
28    'Z': 'compresseddirty',
29}
30
31# Ansible modules can be written in any language.
32# The functions available here can be used to do many common tasks,
33# to simplify development of Python modules.
34
35import __main__
36import atexit
37import errno
38import datetime
39import grp
40import fcntl
41import locale
42import os
43import pwd
44import platform
45import re
46import select
47import shlex
48import shutil
49import signal
50import stat
51import subprocess
52import sys
53import tempfile
54import time
55import traceback
56import types
57
58from collections import deque
59from itertools import chain, repeat
60
61try:
62    import syslog
63    HAS_SYSLOG = True
64except ImportError:
65    HAS_SYSLOG = False
66
67try:
68    from systemd import journal
69    # Makes sure that systemd.journal has method sendv()
70    # Double check that journal has method sendv (some packages don't)
71    has_journal = hasattr(journal, 'sendv')
72except ImportError:
73    has_journal = False
74
75HAVE_SELINUX = False
76try:
77    import selinux
78    HAVE_SELINUX = True
79except ImportError:
80    pass
81
82# Python2 & 3 way to get NoneType
83NoneType = type(None)
84
85from ansible.module_utils.compat import selectors
86
87from ._text import to_native, to_bytes, to_text
88from ansible.module_utils.common.text.converters import (
89    jsonify,
90    container_to_bytes as json_dict_unicode_to_bytes,
91    container_to_text as json_dict_bytes_to_unicode,
92)
93
94from ansible.module_utils.common.text.formatters import (
95    lenient_lowercase,
96    bytes_to_human,
97    human_to_bytes,
98    SIZE_RANGES,
99)
100
101try:
102    from ansible.module_utils.common._json_compat import json
103except ImportError as e:
104    print('\n{{"msg": "Error: ansible requires the stdlib json: {0}", "failed": true}}'.format(to_native(e)))
105    sys.exit(1)
106
107
108AVAILABLE_HASH_ALGORITHMS = dict()
109try:
110    import hashlib
111
112    # python 2.7.9+ and 2.7.0+
113    for attribute in ('available_algorithms', 'algorithms'):
114        algorithms = getattr(hashlib, attribute, None)
115        if algorithms:
116            break
117    if algorithms is None:
118        # python 2.5+
119        algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
120    for algorithm in algorithms:
121        AVAILABLE_HASH_ALGORITHMS[algorithm] = getattr(hashlib, algorithm)
122
123    # we may have been able to import md5 but it could still not be available
124    try:
125        hashlib.md5()
126    except ValueError:
127        AVAILABLE_HASH_ALGORITHMS.pop('md5', None)
128except Exception:
129    import sha
130    AVAILABLE_HASH_ALGORITHMS = {'sha1': sha.sha}
131    try:
132        import md5
133        AVAILABLE_HASH_ALGORITHMS['md5'] = md5.md5
134    except Exception:
135        pass
136
137from ansible.module_utils.common._collections_compat import (
138    KeysView,
139    Mapping, MutableMapping,
140    Sequence, MutableSequence,
141    Set, MutableSet,
142)
143from ansible.module_utils.common.process import get_bin_path
144from ansible.module_utils.common.file import (
145    _PERM_BITS as PERM_BITS,
146    _EXEC_PERM_BITS as EXEC_PERM_BITS,
147    _DEFAULT_PERM as DEFAULT_PERM,
148    is_executable,
149    format_attributes,
150    get_flags_from_attributes,
151)
152from ansible.module_utils.common.sys_info import (
153    get_distribution,
154    get_distribution_version,
155    get_platform_subclass,
156)
157from ansible.module_utils.pycompat24 import get_exception, literal_eval
158from ansible.module_utils.common.parameters import (
159    handle_aliases,
160    list_deprecations,
161    list_no_log_values,
162    PASS_VARS,
163    PASS_BOOLS,
164)
165
166from ansible.module_utils.six import (
167    PY2,
168    PY3,
169    b,
170    binary_type,
171    integer_types,
172    iteritems,
173    string_types,
174    text_type,
175)
176from ansible.module_utils.six.moves import map, reduce, shlex_quote
177from ansible.module_utils.common.validation import (
178    check_missing_parameters,
179    check_mutually_exclusive,
180    check_required_arguments,
181    check_required_by,
182    check_required_if,
183    check_required_one_of,
184    check_required_together,
185    count_terms,
186    check_type_bool,
187    check_type_bits,
188    check_type_bytes,
189    check_type_float,
190    check_type_int,
191    check_type_jsonarg,
192    check_type_list,
193    check_type_dict,
194    check_type_path,
195    check_type_raw,
196    check_type_str,
197    safe_eval,
198)
199from ansible.module_utils.common._utils import get_all_subclasses as _get_all_subclasses
200from ansible.module_utils.parsing.convert_bool import BOOLEANS, BOOLEANS_FALSE, BOOLEANS_TRUE, boolean
201from ansible.module_utils.common.warnings import (
202    deprecate,
203    get_deprecation_messages,
204    get_warning_messages,
205    warn,
206)
207
208# Note: When getting Sequence from collections, it matches with strings. If
209# this matters, make sure to check for strings before checking for sequencetype
210SEQUENCETYPE = frozenset, KeysView, Sequence
211
212PASSWORD_MATCH = re.compile(r'^(?:.+[-_\s])?pass(?:[-_\s]?(?:word|phrase|wrd|wd)?)(?:[-_\s].+)?$', re.I)
213
214imap = map
215
216try:
217    # Python 2
218    unicode
219except NameError:
220    # Python 3
221    unicode = text_type
222
223try:
224    # Python 2
225    basestring
226except NameError:
227    # Python 3
228    basestring = string_types
229
230_literal_eval = literal_eval
231
232# End of deprecated names
233
234# Internal global holding passed in params.  This is consulted in case
235# multiple AnsibleModules are created.  Otherwise each AnsibleModule would
236# attempt to read from stdin.  Other code should not use this directly as it
237# is an internal implementation detail
238_ANSIBLE_ARGS = None
239
240
241def env_fallback(*args, **kwargs):
242    ''' Load value from environment '''
243    for arg in args:
244        if arg in os.environ:
245            return os.environ[arg]
246    raise AnsibleFallbackNotFound
247
248
249FILE_COMMON_ARGUMENTS = dict(
250    # These are things we want. About setting metadata (mode, ownership, permissions in general) on
251    # created files (these are used by set_fs_attributes_if_different and included in
252    # load_file_common_arguments)
253    mode=dict(type='raw'),
254    owner=dict(type='str'),
255    group=dict(type='str'),
256    seuser=dict(type='str'),
257    serole=dict(type='str'),
258    selevel=dict(type='str'),
259    setype=dict(type='str'),
260    attributes=dict(type='str', aliases=['attr']),
261    unsafe_writes=dict(type='bool', default=False, fallback=(env_fallback, ['ANSIBLE_UNSAFE_WRITES'])),  # should be available to any module using atomic_move
262)
263
264PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
265
266# Used for parsing symbolic file perms
267MODE_OPERATOR_RE = re.compile(r'[+=-]')
268USERS_RE = re.compile(r'[^ugo]')
269PERMS_RE = re.compile(r'[^rwxXstugo]')
270
271# Used for determining if the system is running a new enough python version
272# and should only restrict on our documented minimum versions
273_PY3_MIN = sys.version_info[:2] >= (3, 5)
274_PY2_MIN = (2, 6) <= sys.version_info[:2] < (3,)
275_PY_MIN = _PY3_MIN or _PY2_MIN
276if not _PY_MIN:
277    print(
278        '\n{"failed": true, '
279        '"msg": "Ansible requires a minimum of Python2 version 2.6 or Python3 version 3.5. Current version: %s"}' % ''.join(sys.version.splitlines())
280    )
281    sys.exit(1)
282
283
284#
285# Deprecated functions
286#
287
288def get_platform():
289    '''
290    **Deprecated** Use :py:func:`platform.system` directly.
291
292    :returns: Name of the platform the module is running on in a native string
293
294    Returns a native string that labels the platform ("Linux", "Solaris", etc). Currently, this is
295    the result of calling :py:func:`platform.system`.
296    '''
297    return platform.system()
298
299# End deprecated functions
300
301
302#
303# Compat shims
304#
305
306def load_platform_subclass(cls, *args, **kwargs):
307    """**Deprecated**: Use ansible.module_utils.common.sys_info.get_platform_subclass instead"""
308    platform_cls = get_platform_subclass(cls)
309    return super(cls, platform_cls).__new__(platform_cls)
310
311
312def get_all_subclasses(cls):
313    """**Deprecated**: Use ansible.module_utils.common._utils.get_all_subclasses instead"""
314    return list(_get_all_subclasses(cls))
315
316
317# End compat shims
318
319
320def _remove_values_conditions(value, no_log_strings, deferred_removals):
321    """
322    Helper function for :meth:`remove_values`.
323
324    :arg value: The value to check for strings that need to be stripped
325    :arg no_log_strings: set of strings which must be stripped out of any values
326    :arg deferred_removals: List which holds information about nested
327        containers that have to be iterated for removals.  It is passed into
328        this function so that more entries can be added to it if value is
329        a container type.  The format of each entry is a 2-tuple where the first
330        element is the ``value`` parameter and the second value is a new
331        container to copy the elements of ``value`` into once iterated.
332    :returns: if ``value`` is a scalar, returns ``value`` with two exceptions:
333        1. :class:`~datetime.datetime` objects which are changed into a string representation.
334        2. objects which are in no_log_strings are replaced with a placeholder
335            so that no sensitive data is leaked.
336        If ``value`` is a container type, returns a new empty container.
337
338    ``deferred_removals`` is added to as a side-effect of this function.
339
340    .. warning:: It is up to the caller to make sure the order in which value
341        is passed in is correct.  For instance, higher level containers need
342        to be passed in before lower level containers. For example, given
343        ``{'level1': {'level2': 'level3': [True]} }`` first pass in the
344        dictionary for ``level1``, then the dict for ``level2``, and finally
345        the list for ``level3``.
346    """
347    if isinstance(value, (text_type, binary_type)):
348        # Need native str type
349        native_str_value = value
350        if isinstance(value, text_type):
351            value_is_text = True
352            if PY2:
353                native_str_value = to_bytes(value, errors='surrogate_or_strict')
354        elif isinstance(value, binary_type):
355            value_is_text = False
356            if PY3:
357                native_str_value = to_text(value, errors='surrogate_or_strict')
358
359        if native_str_value in no_log_strings:
360            return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
361        for omit_me in no_log_strings:
362            native_str_value = native_str_value.replace(omit_me, '*' * 8)
363
364        if value_is_text and isinstance(native_str_value, binary_type):
365            value = to_text(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
366        elif not value_is_text and isinstance(native_str_value, text_type):
367            value = to_bytes(native_str_value, encoding='utf-8', errors='surrogate_then_replace')
368        else:
369            value = native_str_value
370
371    elif isinstance(value, Sequence):
372        if isinstance(value, MutableSequence):
373            new_value = type(value)()
374        else:
375            new_value = []  # Need a mutable value
376        deferred_removals.append((value, new_value))
377        value = new_value
378
379    elif isinstance(value, Set):
380        if isinstance(value, MutableSet):
381            new_value = type(value)()
382        else:
383            new_value = set()  # Need a mutable value
384        deferred_removals.append((value, new_value))
385        value = new_value
386
387    elif isinstance(value, Mapping):
388        if isinstance(value, MutableMapping):
389            new_value = type(value)()
390        else:
391            new_value = {}  # Need a mutable value
392        deferred_removals.append((value, new_value))
393        value = new_value
394
395    elif isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))):
396        stringy_value = to_native(value, encoding='utf-8', errors='surrogate_or_strict')
397        if stringy_value in no_log_strings:
398            return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
399        for omit_me in no_log_strings:
400            if omit_me in stringy_value:
401                return 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
402
403    elif isinstance(value, datetime.datetime):
404        value = value.isoformat()
405    else:
406        raise TypeError('Value of unknown type: %s, %s' % (type(value), value))
407
408    return value
409
410
411def remove_values(value, no_log_strings):
412    """ Remove strings in no_log_strings from value.  If value is a container
413    type, then remove a lot more.
414
415    Use of deferred_removals exists, rather than a pure recursive solution,
416    because of the potential to hit the maximum recursion depth when dealing with
417    large amounts of data (see issue #24560).
418    """
419
420    deferred_removals = deque()
421
422    no_log_strings = [to_native(s, errors='surrogate_or_strict') for s in no_log_strings]
423    new_value = _remove_values_conditions(value, no_log_strings, deferred_removals)
424
425    while deferred_removals:
426        old_data, new_data = deferred_removals.popleft()
427        if isinstance(new_data, Mapping):
428            for old_key, old_elem in old_data.items():
429                new_elem = _remove_values_conditions(old_elem, no_log_strings, deferred_removals)
430                new_data[old_key] = new_elem
431        else:
432            for elem in old_data:
433                new_elem = _remove_values_conditions(elem, no_log_strings, deferred_removals)
434                if isinstance(new_data, MutableSequence):
435                    new_data.append(new_elem)
436                elif isinstance(new_data, MutableSet):
437                    new_data.add(new_elem)
438                else:
439                    raise TypeError('Unknown container type encountered when removing private values from output')
440
441    return new_value
442
443
444def _sanitize_keys_conditions(value, no_log_strings, ignore_keys, deferred_removals):
445    """ Helper method to sanitize_keys() to build deferred_removals and avoid deep recursion. """
446    if isinstance(value, (text_type, binary_type)):
447        return value
448
449    if isinstance(value, Sequence):
450        if isinstance(value, MutableSequence):
451            new_value = type(value)()
452        else:
453            new_value = []  # Need a mutable value
454        deferred_removals.append((value, new_value))
455        return new_value
456
457    if isinstance(value, Set):
458        if isinstance(value, MutableSet):
459            new_value = type(value)()
460        else:
461            new_value = set()  # Need a mutable value
462        deferred_removals.append((value, new_value))
463        return new_value
464
465    if isinstance(value, Mapping):
466        if isinstance(value, MutableMapping):
467            new_value = type(value)()
468        else:
469            new_value = {}  # Need a mutable value
470        deferred_removals.append((value, new_value))
471        return new_value
472
473    if isinstance(value, tuple(chain(integer_types, (float, bool, NoneType)))):
474        return value
475
476    if isinstance(value, (datetime.datetime, datetime.date)):
477        return value
478
479    raise TypeError('Value of unknown type: %s, %s' % (type(value), value))
480
481
482def sanitize_keys(obj, no_log_strings, ignore_keys=frozenset()):
483    """ Sanitize the keys in a container object by removing no_log values from key names.
484
485    This is a companion function to the `remove_values()` function. Similar to that function,
486    we make use of deferred_removals to avoid hitting maximum recursion depth in cases of
487    large data structures.
488
489    :param obj: The container object to sanitize. Non-container objects are returned unmodified.
490    :param no_log_strings: A set of string values we do not want logged.
491    :param ignore_keys: A set of string values of keys to not sanitize.
492
493    :returns: An object with sanitized keys.
494    """
495
496    deferred_removals = deque()
497
498    no_log_strings = [to_native(s, errors='surrogate_or_strict') for s in no_log_strings]
499    new_value = _sanitize_keys_conditions(obj, no_log_strings, ignore_keys, deferred_removals)
500
501    while deferred_removals:
502        old_data, new_data = deferred_removals.popleft()
503
504        if isinstance(new_data, Mapping):
505            for old_key, old_elem in old_data.items():
506                if old_key in ignore_keys or old_key.startswith('_ansible'):
507                    new_data[old_key] = _sanitize_keys_conditions(old_elem, no_log_strings, ignore_keys, deferred_removals)
508                else:
509                    # Sanitize the old key. We take advantage of the sanitizing code in
510                    # _remove_values_conditions() rather than recreating it here.
511                    new_key = _remove_values_conditions(old_key, no_log_strings, None)
512                    new_data[new_key] = _sanitize_keys_conditions(old_elem, no_log_strings, ignore_keys, deferred_removals)
513        else:
514            for elem in old_data:
515                new_elem = _sanitize_keys_conditions(elem, no_log_strings, ignore_keys, deferred_removals)
516                if isinstance(new_data, MutableSequence):
517                    new_data.append(new_elem)
518                elif isinstance(new_data, MutableSet):
519                    new_data.add(new_elem)
520                else:
521                    raise TypeError('Unknown container type encountered when removing private values from keys')
522
523    return new_value
524
525
526def heuristic_log_sanitize(data, no_log_values=None):
527    ''' Remove strings that look like passwords from log messages '''
528    # Currently filters:
529    # user:pass@foo/whatever and http://username:pass@wherever/foo
530    # This code has false positives and consumes parts of logs that are
531    # not passwds
532
533    # begin: start of a passwd containing string
534    # end: end of a passwd containing string
535    # sep: char between user and passwd
536    # prev_begin: where in the overall string to start a search for
537    #   a passwd
538    # sep_search_end: where in the string to end a search for the sep
539    data = to_native(data)
540
541    output = []
542    begin = len(data)
543    prev_begin = begin
544    sep = 1
545    while sep:
546        # Find the potential end of a passwd
547        try:
548            end = data.rindex('@', 0, begin)
549        except ValueError:
550            # No passwd in the rest of the data
551            output.insert(0, data[0:begin])
552            break
553
554        # Search for the beginning of a passwd
555        sep = None
556        sep_search_end = end
557        while not sep:
558            # URL-style username+password
559            try:
560                begin = data.rindex('://', 0, sep_search_end)
561            except ValueError:
562                # No url style in the data, check for ssh style in the
563                # rest of the string
564                begin = 0
565            # Search for separator
566            try:
567                sep = data.index(':', begin + 3, end)
568            except ValueError:
569                # No separator; choices:
570                if begin == 0:
571                    # Searched the whole string so there's no password
572                    # here.  Return the remaining data
573                    output.insert(0, data[0:begin])
574                    break
575                # Search for a different beginning of the password field.
576                sep_search_end = begin
577                continue
578        if sep:
579            # Password was found; remove it.
580            output.insert(0, data[end:prev_begin])
581            output.insert(0, '********')
582            output.insert(0, data[begin:sep + 1])
583            prev_begin = begin
584
585    output = ''.join(output)
586    if no_log_values:
587        output = remove_values(output, no_log_values)
588    return output
589
590
591def _load_params():
592    ''' read the modules parameters and store them globally.
593
594    This function may be needed for certain very dynamic custom modules which
595    want to process the parameters that are being handed the module.  Since
596    this is so closely tied to the implementation of modules we cannot
597    guarantee API stability for it (it may change between versions) however we
598    will try not to break it gratuitously.  It is certainly more future-proof
599    to call this function and consume its outputs than to implement the logic
600    inside it as a copy in your own code.
601    '''
602    global _ANSIBLE_ARGS
603    if _ANSIBLE_ARGS is not None:
604        buffer = _ANSIBLE_ARGS
605    else:
606        # debug overrides to read args from file or cmdline
607
608        # Avoid tracebacks when locale is non-utf8
609        # We control the args and we pass them as utf8
610        if len(sys.argv) > 1:
611            if os.path.isfile(sys.argv[1]):
612                fd = open(sys.argv[1], 'rb')
613                buffer = fd.read()
614                fd.close()
615            else:
616                buffer = sys.argv[1]
617                if PY3:
618                    buffer = buffer.encode('utf-8', errors='surrogateescape')
619        # default case, read from stdin
620        else:
621            if PY2:
622                buffer = sys.stdin.read()
623            else:
624                buffer = sys.stdin.buffer.read()
625        _ANSIBLE_ARGS = buffer
626
627    try:
628        params = json.loads(buffer.decode('utf-8'))
629    except ValueError:
630        # This helper used too early for fail_json to work.
631        print('\n{"msg": "Error: Module unable to decode valid JSON on stdin.  Unable to figure out what parameters were passed", "failed": true}')
632        sys.exit(1)
633
634    if PY2:
635        params = json_dict_unicode_to_bytes(params)
636
637    try:
638        return params['ANSIBLE_MODULE_ARGS']
639    except KeyError:
640        # This helper does not have access to fail_json so we have to print
641        # json output on our own.
642        print('\n{"msg": "Error: Module unable to locate ANSIBLE_MODULE_ARGS in json data from stdin.  Unable to figure out what parameters were passed", '
643              '"failed": true}')
644        sys.exit(1)
645
646
647def missing_required_lib(library, reason=None, url=None):
648    hostname = platform.node()
649    msg = "Failed to import the required Python library (%s) on %s's Python %s." % (library, hostname, sys.executable)
650    if reason:
651        msg += " This is required %s." % reason
652    if url:
653        msg += " See %s for more info." % url
654
655    msg += (" Please read the module documentation and install it in the appropriate location."
656            " If the required library is installed, but Ansible is using the wrong Python interpreter,"
657            " please consult the documentation on ansible_python_interpreter")
658    return msg
659
660
661class AnsibleFallbackNotFound(Exception):
662    pass
663
664
665class AnsibleModule(object):
666    def __init__(self, argument_spec, bypass_checks=False, no_log=False,
667                 mutually_exclusive=None, required_together=None,
668                 required_one_of=None, add_file_common_args=False,
669                 supports_check_mode=False, required_if=None, required_by=None):
670
671        '''
672        Common code for quickly building an ansible module in Python
673        (although you can write modules with anything that can return JSON).
674
675        See :ref:`developing_modules_general` for a general introduction
676        and :ref:`developing_program_flow_modules` for more detailed explanation.
677        '''
678
679        self._name = os.path.basename(__file__)  # initialize name until we can parse from options
680        self.argument_spec = argument_spec
681        self.supports_check_mode = supports_check_mode
682        self.check_mode = False
683        self.bypass_checks = bypass_checks
684        self.no_log = no_log
685
686        self.mutually_exclusive = mutually_exclusive
687        self.required_together = required_together
688        self.required_one_of = required_one_of
689        self.required_if = required_if
690        self.required_by = required_by
691        self.cleanup_files = []
692        self._debug = False
693        self._diff = False
694        self._socket_path = None
695        self._shell = None
696        self._verbosity = 0
697        # May be used to set modifications to the environment for any
698        # run_command invocation
699        self.run_command_environ_update = {}
700        self._clean = {}
701        self._string_conversion_action = ''
702
703        self.aliases = {}
704        self._legal_inputs = []
705        self._options_context = list()
706        self._tmpdir = None
707
708        self._created_files = set()
709
710        if add_file_common_args:
711            self._uses_common_file_args = True
712            for k, v in FILE_COMMON_ARGUMENTS.items():
713                if k not in self.argument_spec:
714                    self.argument_spec[k] = v
715
716        # Save parameter values that should never be logged
717        self.no_log_values = set()
718
719        self._load_params()
720        self._set_fallbacks()
721
722        # append to legal_inputs and then possibly check against them
723        try:
724            self.aliases = self._handle_aliases()
725        except (ValueError, TypeError) as e:
726            # Use exceptions here because it isn't safe to call fail_json until no_log is processed
727            print('\n{"failed": true, "msg": "Module alias error: %s"}' % to_native(e))
728            sys.exit(1)
729
730        self._handle_no_log_values()
731
732        # check the locale as set by the current environment, and reset to
733        # a known valid (LANG=C) if it's an invalid/unavailable locale
734        self._check_locale()
735
736        self._check_arguments()
737
738        # check exclusive early
739        if not bypass_checks:
740            self._check_mutually_exclusive(mutually_exclusive)
741
742        self._set_defaults(pre=True)
743
744        self._CHECK_ARGUMENT_TYPES_DISPATCHER = {
745            'str': self._check_type_str,
746            'list': self._check_type_list,
747            'dict': self._check_type_dict,
748            'bool': self._check_type_bool,
749            'int': self._check_type_int,
750            'float': self._check_type_float,
751            'path': self._check_type_path,
752            'raw': self._check_type_raw,
753            'jsonarg': self._check_type_jsonarg,
754            'json': self._check_type_jsonarg,
755            'bytes': self._check_type_bytes,
756            'bits': self._check_type_bits,
757        }
758        if not bypass_checks:
759            self._check_required_arguments()
760            self._check_argument_types()
761            self._check_argument_values()
762            self._check_required_together(required_together)
763            self._check_required_one_of(required_one_of)
764            self._check_required_if(required_if)
765            self._check_required_by(required_by)
766
767        self._set_defaults(pre=False)
768
769        # deal with options sub-spec
770        self._handle_options()
771
772        if not self.no_log:
773            self._log_invocation()
774
775        # finally, make sure we're in a sane working dir
776        self._set_cwd()
777
778    @property
779    def tmpdir(self):
780        # if _ansible_tmpdir was not set and we have a remote_tmp,
781        # the module needs to create it and clean it up once finished.
782        # otherwise we create our own module tmp dir from the system defaults
783        if self._tmpdir is None:
784            basedir = None
785
786            if self._remote_tmp is not None:
787                basedir = os.path.expanduser(os.path.expandvars(self._remote_tmp))
788
789            if basedir is not None and not os.path.exists(basedir):
790                try:
791                    os.makedirs(basedir, mode=0o700)
792                except (OSError, IOError) as e:
793                    self.warn("Unable to use %s as temporary directory, "
794                              "failing back to system: %s" % (basedir, to_native(e)))
795                    basedir = None
796                else:
797                    self.warn("Module remote_tmp %s did not exist and was "
798                              "created with a mode of 0700, this may cause"
799                              " issues when running as another user. To "
800                              "avoid this, create the remote_tmp dir with "
801                              "the correct permissions manually" % basedir)
802
803            basefile = "ansible-moduletmp-%s-" % time.time()
804            try:
805                tmpdir = tempfile.mkdtemp(prefix=basefile, dir=basedir)
806            except (OSError, IOError) as e:
807                self.fail_json(
808                    msg="Failed to create remote module tmp path at dir %s "
809                        "with prefix %s: %s" % (basedir, basefile, to_native(e))
810                )
811            if not self._keep_remote_files:
812                atexit.register(shutil.rmtree, tmpdir)
813            self._tmpdir = tmpdir
814
815        return self._tmpdir
816
817    def warn(self, warning):
818        warn(warning)
819        self.log('[WARNING] %s' % warning)
820
821    def deprecate(self, msg, version=None, date=None, collection_name=None):
822        if version is not None and date is not None:
823            raise AssertionError("implementation error -- version and date must not both be set")
824        deprecate(msg, version=version, date=date, collection_name=collection_name)
825        # For compatibility, we accept that neither version nor date is set,
826        # and treat that the same as if version would haven been set
827        if date is not None:
828            self.log('[DEPRECATION WARNING] %s %s' % (msg, date))
829        else:
830            self.log('[DEPRECATION WARNING] %s %s' % (msg, version))
831
832    def load_file_common_arguments(self, params, path=None):
833        '''
834        many modules deal with files, this encapsulates common
835        options that the file module accepts such that it is directly
836        available to all modules and they can share code.
837
838        Allows to overwrite the path/dest module argument by providing path.
839        '''
840
841        if path is None:
842            path = params.get('path', params.get('dest', None))
843        if path is None:
844            return {}
845        else:
846            path = os.path.expanduser(os.path.expandvars(path))
847
848        b_path = to_bytes(path, errors='surrogate_or_strict')
849        # if the path is a symlink, and we're following links, get
850        # the target of the link instead for testing
851        if params.get('follow', False) and os.path.islink(b_path):
852            b_path = os.path.realpath(b_path)
853            path = to_native(b_path)
854
855        mode = params.get('mode', None)
856        owner = params.get('owner', None)
857        group = params.get('group', None)
858
859        # selinux related options
860        seuser = params.get('seuser', None)
861        serole = params.get('serole', None)
862        setype = params.get('setype', None)
863        selevel = params.get('selevel', None)
864        secontext = [seuser, serole, setype]
865
866        if self.selinux_mls_enabled():
867            secontext.append(selevel)
868
869        default_secontext = self.selinux_default_context(path)
870        for i in range(len(default_secontext)):
871            if i is not None and secontext[i] == '_default':
872                secontext[i] = default_secontext[i]
873
874        attributes = params.get('attributes', None)
875        return dict(
876            path=path, mode=mode, owner=owner, group=group,
877            seuser=seuser, serole=serole, setype=setype,
878            selevel=selevel, secontext=secontext, attributes=attributes,
879        )
880
881    # Detect whether using selinux that is MLS-aware.
882    # While this means you can set the level/range with
883    # selinux.lsetfilecon(), it may or may not mean that you
884    # will get the selevel as part of the context returned
885    # by selinux.lgetfilecon().
886
887    def selinux_mls_enabled(self):
888        if not HAVE_SELINUX:
889            return False
890        if selinux.is_selinux_mls_enabled() == 1:
891            return True
892        else:
893            return False
894
895    def selinux_enabled(self):
896        if not HAVE_SELINUX:
897            seenabled = self.get_bin_path('selinuxenabled')
898            if seenabled is not None:
899                (rc, out, err) = self.run_command(seenabled)
900                if rc == 0:
901                    self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
902            return False
903        if selinux.is_selinux_enabled() == 1:
904            return True
905        else:
906            return False
907
908    # Determine whether we need a placeholder for selevel/mls
909    def selinux_initial_context(self):
910        context = [None, None, None]
911        if self.selinux_mls_enabled():
912            context.append(None)
913        return context
914
915    # If selinux fails to find a default, return an array of None
916    def selinux_default_context(self, path, mode=0):
917        context = self.selinux_initial_context()
918        if not HAVE_SELINUX or not self.selinux_enabled():
919            return context
920        try:
921            ret = selinux.matchpathcon(to_native(path, errors='surrogate_or_strict'), mode)
922        except OSError:
923            return context
924        if ret[0] == -1:
925            return context
926        # Limit split to 4 because the selevel, the last in the list,
927        # may contain ':' characters
928        context = ret[1].split(':', 3)
929        return context
930
931    def selinux_context(self, path):
932        context = self.selinux_initial_context()
933        if not HAVE_SELINUX or not self.selinux_enabled():
934            return context
935        try:
936            ret = selinux.lgetfilecon_raw(to_native(path, errors='surrogate_or_strict'))
937        except OSError as e:
938            if e.errno == errno.ENOENT:
939                self.fail_json(path=path, msg='path %s does not exist' % path)
940            else:
941                self.fail_json(path=path, msg='failed to retrieve selinux context')
942        if ret[0] == -1:
943            return context
944        # Limit split to 4 because the selevel, the last in the list,
945        # may contain ':' characters
946        context = ret[1].split(':', 3)
947        return context
948
949    def user_and_group(self, path, expand=True):
950        b_path = to_bytes(path, errors='surrogate_or_strict')
951        if expand:
952            b_path = os.path.expanduser(os.path.expandvars(b_path))
953        st = os.lstat(b_path)
954        uid = st.st_uid
955        gid = st.st_gid
956        return (uid, gid)
957
958    def find_mount_point(self, path):
959        '''
960            Takes a path and returns it's mount point
961
962        :param path: a string type with a filesystem path
963        :returns: the path to the mount point as a text type
964        '''
965
966        b_path = os.path.realpath(to_bytes(os.path.expanduser(os.path.expandvars(path)), errors='surrogate_or_strict'))
967        while not os.path.ismount(b_path):
968            b_path = os.path.dirname(b_path)
969
970        return to_text(b_path, errors='surrogate_or_strict')
971
972    def is_special_selinux_path(self, path):
973        """
974        Returns a tuple containing (True, selinux_context) if the given path is on a
975        NFS or other 'special' fs  mount point, otherwise the return will be (False, None).
976        """
977        try:
978            f = open('/proc/mounts', 'r')
979            mount_data = f.readlines()
980            f.close()
981        except Exception:
982            return (False, None)
983
984        path_mount_point = self.find_mount_point(path)
985
986        for line in mount_data:
987            (device, mount_point, fstype, options, rest) = line.split(' ', 4)
988            if to_bytes(path_mount_point) == to_bytes(mount_point):
989                for fs in self._selinux_special_fs:
990                    if fs in fstype:
991                        special_context = self.selinux_context(path_mount_point)
992                        return (True, special_context)
993
994        return (False, None)
995
996    def set_default_selinux_context(self, path, changed):
997        if not HAVE_SELINUX or not self.selinux_enabled():
998            return changed
999        context = self.selinux_default_context(path)
1000        return self.set_context_if_different(path, context, False)
1001
1002    def set_context_if_different(self, path, context, changed, diff=None):
1003
1004        if not HAVE_SELINUX or not self.selinux_enabled():
1005            return changed
1006
1007        if self.check_file_absent_if_check_mode(path):
1008            return True
1009
1010        cur_context = self.selinux_context(path)
1011        new_context = list(cur_context)
1012        # Iterate over the current context instead of the
1013        # argument context, which may have selevel.
1014
1015        (is_special_se, sp_context) = self.is_special_selinux_path(path)
1016        if is_special_se:
1017            new_context = sp_context
1018        else:
1019            for i in range(len(cur_context)):
1020                if len(context) > i:
1021                    if context[i] is not None and context[i] != cur_context[i]:
1022                        new_context[i] = context[i]
1023                    elif context[i] is None:
1024                        new_context[i] = cur_context[i]
1025
1026        if cur_context != new_context:
1027            if diff is not None:
1028                if 'before' not in diff:
1029                    diff['before'] = {}
1030                diff['before']['secontext'] = cur_context
1031                if 'after' not in diff:
1032                    diff['after'] = {}
1033                diff['after']['secontext'] = new_context
1034
1035            try:
1036                if self.check_mode:
1037                    return True
1038                rc = selinux.lsetfilecon(to_native(path), ':'.join(new_context))
1039            except OSError as e:
1040                self.fail_json(path=path, msg='invalid selinux context: %s' % to_native(e),
1041                               new_context=new_context, cur_context=cur_context, input_was=context)
1042            if rc != 0:
1043                self.fail_json(path=path, msg='set selinux context failed')
1044            changed = True
1045        return changed
1046
1047    def set_owner_if_different(self, path, owner, changed, diff=None, expand=True):
1048
1049        if owner is None:
1050            return changed
1051
1052        b_path = to_bytes(path, errors='surrogate_or_strict')
1053        if expand:
1054            b_path = os.path.expanduser(os.path.expandvars(b_path))
1055
1056        if self.check_file_absent_if_check_mode(b_path):
1057            return True
1058
1059        orig_uid, orig_gid = self.user_and_group(b_path, expand)
1060        try:
1061            uid = int(owner)
1062        except ValueError:
1063            try:
1064                uid = pwd.getpwnam(owner).pw_uid
1065            except KeyError:
1066                path = to_text(b_path)
1067                self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
1068
1069        if orig_uid != uid:
1070            if diff is not None:
1071                if 'before' not in diff:
1072                    diff['before'] = {}
1073                diff['before']['owner'] = orig_uid
1074                if 'after' not in diff:
1075                    diff['after'] = {}
1076                diff['after']['owner'] = uid
1077
1078            if self.check_mode:
1079                return True
1080            try:
1081                os.lchown(b_path, uid, -1)
1082            except (IOError, OSError) as e:
1083                path = to_text(b_path)
1084                self.fail_json(path=path, msg='chown failed: %s' % (to_text(e)))
1085            changed = True
1086        return changed
1087
1088    def set_group_if_different(self, path, group, changed, diff=None, expand=True):
1089
1090        if group is None:
1091            return changed
1092
1093        b_path = to_bytes(path, errors='surrogate_or_strict')
1094        if expand:
1095            b_path = os.path.expanduser(os.path.expandvars(b_path))
1096
1097        if self.check_file_absent_if_check_mode(b_path):
1098            return True
1099
1100        orig_uid, orig_gid = self.user_and_group(b_path, expand)
1101        try:
1102            gid = int(group)
1103        except ValueError:
1104            try:
1105                gid = grp.getgrnam(group).gr_gid
1106            except KeyError:
1107                path = to_text(b_path)
1108                self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
1109
1110        if orig_gid != gid:
1111            if diff is not None:
1112                if 'before' not in diff:
1113                    diff['before'] = {}
1114                diff['before']['group'] = orig_gid
1115                if 'after' not in diff:
1116                    diff['after'] = {}
1117                diff['after']['group'] = gid
1118
1119            if self.check_mode:
1120                return True
1121            try:
1122                os.lchown(b_path, -1, gid)
1123            except OSError:
1124                path = to_text(b_path)
1125                self.fail_json(path=path, msg='chgrp failed')
1126            changed = True
1127        return changed
1128
1129    def set_mode_if_different(self, path, mode, changed, diff=None, expand=True):
1130
1131        # Remove paths so we do not warn about creating with default permissions
1132        # since we are calling this method on the path and setting the specified mode.
1133        try:
1134            self._created_files.remove(path)
1135        except KeyError:
1136            pass
1137
1138        if mode is None:
1139            return changed
1140
1141        b_path = to_bytes(path, errors='surrogate_or_strict')
1142        if expand:
1143            b_path = os.path.expanduser(os.path.expandvars(b_path))
1144        path_stat = os.lstat(b_path)
1145
1146        if self.check_file_absent_if_check_mode(b_path):
1147            return True
1148
1149        if not isinstance(mode, int):
1150            try:
1151                mode = int(mode, 8)
1152            except Exception:
1153                try:
1154                    mode = self._symbolic_mode_to_octal(path_stat, mode)
1155                except Exception as e:
1156                    path = to_text(b_path)
1157                    self.fail_json(path=path,
1158                                   msg="mode must be in octal or symbolic form",
1159                                   details=to_native(e))
1160
1161                if mode != stat.S_IMODE(mode):
1162                    # prevent mode from having extra info orbeing invalid long number
1163                    path = to_text(b_path)
1164                    self.fail_json(path=path, msg="Invalid mode supplied, only permission info is allowed", details=mode)
1165
1166        prev_mode = stat.S_IMODE(path_stat.st_mode)
1167
1168        if prev_mode != mode:
1169
1170            if diff is not None:
1171                if 'before' not in diff:
1172                    diff['before'] = {}
1173                diff['before']['mode'] = '0%03o' % prev_mode
1174                if 'after' not in diff:
1175                    diff['after'] = {}
1176                diff['after']['mode'] = '0%03o' % mode
1177
1178            if self.check_mode:
1179                return True
1180            # FIXME: comparison against string above will cause this to be executed
1181            # every time
1182            try:
1183                if hasattr(os, 'lchmod'):
1184                    os.lchmod(b_path, mode)
1185                else:
1186                    if not os.path.islink(b_path):
1187                        os.chmod(b_path, mode)
1188                    else:
1189                        # Attempt to set the perms of the symlink but be
1190                        # careful not to change the perms of the underlying
1191                        # file while trying
1192                        underlying_stat = os.stat(b_path)
1193                        os.chmod(b_path, mode)
1194                        new_underlying_stat = os.stat(b_path)
1195                        if underlying_stat.st_mode != new_underlying_stat.st_mode:
1196                            os.chmod(b_path, stat.S_IMODE(underlying_stat.st_mode))
1197            except OSError as e:
1198                if os.path.islink(b_path) and e.errno in (
1199                    errno.EACCES,  # can't access symlink in sticky directory (stat)
1200                    errno.EPERM,  # can't set mode on symbolic links (chmod)
1201                    errno.EROFS,  # can't set mode on read-only filesystem
1202                ):
1203                    pass
1204                elif e.errno in (errno.ENOENT, errno.ELOOP):  # Can't set mode on broken symbolic links
1205                    pass
1206                else:
1207                    raise
1208            except Exception as e:
1209                path = to_text(b_path)
1210                self.fail_json(path=path, msg='chmod failed', details=to_native(e),
1211                               exception=traceback.format_exc())
1212
1213            path_stat = os.lstat(b_path)
1214            new_mode = stat.S_IMODE(path_stat.st_mode)
1215
1216            if new_mode != prev_mode:
1217                changed = True
1218        return changed
1219
1220    def set_attributes_if_different(self, path, attributes, changed, diff=None, expand=True):
1221
1222        if attributes is None:
1223            return changed
1224
1225        b_path = to_bytes(path, errors='surrogate_or_strict')
1226        if expand:
1227            b_path = os.path.expanduser(os.path.expandvars(b_path))
1228
1229        if self.check_file_absent_if_check_mode(b_path):
1230            return True
1231
1232        existing = self.get_file_attributes(b_path)
1233
1234        attr_mod = '='
1235        if attributes.startswith(('-', '+')):
1236            attr_mod = attributes[0]
1237            attributes = attributes[1:]
1238
1239        if existing.get('attr_flags', '') != attributes or attr_mod == '-':
1240            attrcmd = self.get_bin_path('chattr')
1241            if attrcmd:
1242                attrcmd = [attrcmd, '%s%s' % (attr_mod, attributes), b_path]
1243                changed = True
1244
1245                if diff is not None:
1246                    if 'before' not in diff:
1247                        diff['before'] = {}
1248                    diff['before']['attributes'] = existing.get('attr_flags')
1249                    if 'after' not in diff:
1250                        diff['after'] = {}
1251                    diff['after']['attributes'] = '%s%s' % (attr_mod, attributes)
1252
1253                if not self.check_mode:
1254                    try:
1255                        rc, out, err = self.run_command(attrcmd)
1256                        if rc != 0 or err:
1257                            raise Exception("Error while setting attributes: %s" % (out + err))
1258                    except Exception as e:
1259                        self.fail_json(path=to_text(b_path), msg='chattr failed',
1260                                       details=to_native(e), exception=traceback.format_exc())
1261        return changed
1262
1263    def get_file_attributes(self, path):
1264        output = {}
1265        attrcmd = self.get_bin_path('lsattr', False)
1266        if attrcmd:
1267            attrcmd = [attrcmd, '-vd', path]
1268            try:
1269                rc, out, err = self.run_command(attrcmd)
1270                if rc == 0:
1271                    res = out.split()
1272                    output['attr_flags'] = res[1].replace('-', '').strip()
1273                    output['version'] = res[0].strip()
1274                    output['attributes'] = format_attributes(output['attr_flags'])
1275            except Exception:
1276                pass
1277        return output
1278
1279    @classmethod
1280    def _symbolic_mode_to_octal(cls, path_stat, symbolic_mode):
1281        """
1282        This enables symbolic chmod string parsing as stated in the chmod man-page
1283
1284        This includes things like: "u=rw-x+X,g=r-x+X,o=r-x+X"
1285        """
1286
1287        new_mode = stat.S_IMODE(path_stat.st_mode)
1288
1289        # Now parse all symbolic modes
1290        for mode in symbolic_mode.split(','):
1291            # Per single mode. This always contains a '+', '-' or '='
1292            # Split it on that
1293            permlist = MODE_OPERATOR_RE.split(mode)
1294
1295            # And find all the operators
1296            opers = MODE_OPERATOR_RE.findall(mode)
1297
1298            # The user(s) where it's all about is the first element in the
1299            # 'permlist' list. Take that and remove it from the list.
1300            # An empty user or 'a' means 'all'.
1301            users = permlist.pop(0)
1302            use_umask = (users == '')
1303            if users == 'a' or users == '':
1304                users = 'ugo'
1305
1306            # Check if there are illegal characters in the user list
1307            # They can end up in 'users' because they are not split
1308            if USERS_RE.match(users):
1309                raise ValueError("bad symbolic permission for mode: %s" % mode)
1310
1311            # Now we have two list of equal length, one contains the requested
1312            # permissions and one with the corresponding operators.
1313            for idx, perms in enumerate(permlist):
1314                # Check if there are illegal characters in the permissions
1315                if PERMS_RE.match(perms):
1316                    raise ValueError("bad symbolic permission for mode: %s" % mode)
1317
1318                for user in users:
1319                    mode_to_apply = cls._get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask)
1320                    new_mode = cls._apply_operation_to_mode(user, opers[idx], mode_to_apply, new_mode)
1321
1322        return new_mode
1323
1324    @staticmethod
1325    def _apply_operation_to_mode(user, operator, mode_to_apply, current_mode):
1326        if operator == '=':
1327            if user == 'u':
1328                mask = stat.S_IRWXU | stat.S_ISUID
1329            elif user == 'g':
1330                mask = stat.S_IRWXG | stat.S_ISGID
1331            elif user == 'o':
1332                mask = stat.S_IRWXO | stat.S_ISVTX
1333
1334            # mask out u, g, or o permissions from current_mode and apply new permissions
1335            inverse_mask = mask ^ PERM_BITS
1336            new_mode = (current_mode & inverse_mask) | mode_to_apply
1337        elif operator == '+':
1338            new_mode = current_mode | mode_to_apply
1339        elif operator == '-':
1340            new_mode = current_mode - (current_mode & mode_to_apply)
1341        return new_mode
1342
1343    @staticmethod
1344    def _get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask):
1345        prev_mode = stat.S_IMODE(path_stat.st_mode)
1346
1347        is_directory = stat.S_ISDIR(path_stat.st_mode)
1348        has_x_permissions = (prev_mode & EXEC_PERM_BITS) > 0
1349        apply_X_permission = is_directory or has_x_permissions
1350
1351        # Get the umask, if the 'user' part is empty, the effect is as if (a) were
1352        # given, but bits that are set in the umask are not affected.
1353        # We also need the "reversed umask" for masking
1354        umask = os.umask(0)
1355        os.umask(umask)
1356        rev_umask = umask ^ PERM_BITS
1357
1358        # Permission bits constants documented at:
1359        # http://docs.python.org/2/library/stat.html#stat.S_ISUID
1360        if apply_X_permission:
1361            X_perms = {
1362                'u': {'X': stat.S_IXUSR},
1363                'g': {'X': stat.S_IXGRP},
1364                'o': {'X': stat.S_IXOTH},
1365            }
1366        else:
1367            X_perms = {
1368                'u': {'X': 0},
1369                'g': {'X': 0},
1370                'o': {'X': 0},
1371            }
1372
1373        user_perms_to_modes = {
1374            'u': {
1375                'r': rev_umask & stat.S_IRUSR if use_umask else stat.S_IRUSR,
1376                'w': rev_umask & stat.S_IWUSR if use_umask else stat.S_IWUSR,
1377                'x': rev_umask & stat.S_IXUSR if use_umask else stat.S_IXUSR,
1378                's': stat.S_ISUID,
1379                't': 0,
1380                'u': prev_mode & stat.S_IRWXU,
1381                'g': (prev_mode & stat.S_IRWXG) << 3,
1382                'o': (prev_mode & stat.S_IRWXO) << 6},
1383            'g': {
1384                'r': rev_umask & stat.S_IRGRP if use_umask else stat.S_IRGRP,
1385                'w': rev_umask & stat.S_IWGRP if use_umask else stat.S_IWGRP,
1386                'x': rev_umask & stat.S_IXGRP if use_umask else stat.S_IXGRP,
1387                's': stat.S_ISGID,
1388                't': 0,
1389                'u': (prev_mode & stat.S_IRWXU) >> 3,
1390                'g': prev_mode & stat.S_IRWXG,
1391                'o': (prev_mode & stat.S_IRWXO) << 3},
1392            'o': {
1393                'r': rev_umask & stat.S_IROTH if use_umask else stat.S_IROTH,
1394                'w': rev_umask & stat.S_IWOTH if use_umask else stat.S_IWOTH,
1395                'x': rev_umask & stat.S_IXOTH if use_umask else stat.S_IXOTH,
1396                's': 0,
1397                't': stat.S_ISVTX,
1398                'u': (prev_mode & stat.S_IRWXU) >> 6,
1399                'g': (prev_mode & stat.S_IRWXG) >> 3,
1400                'o': prev_mode & stat.S_IRWXO},
1401        }
1402
1403        # Insert X_perms into user_perms_to_modes
1404        for key, value in X_perms.items():
1405            user_perms_to_modes[key].update(value)
1406
1407        def or_reduce(mode, perm):
1408            return mode | user_perms_to_modes[user][perm]
1409
1410        return reduce(or_reduce, perms, 0)
1411
1412    def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True):
1413        # set modes owners and context as needed
1414        changed = self.set_context_if_different(
1415            file_args['path'], file_args['secontext'], changed, diff
1416        )
1417        changed = self.set_owner_if_different(
1418            file_args['path'], file_args['owner'], changed, diff, expand
1419        )
1420        changed = self.set_group_if_different(
1421            file_args['path'], file_args['group'], changed, diff, expand
1422        )
1423        changed = self.set_mode_if_different(
1424            file_args['path'], file_args['mode'], changed, diff, expand
1425        )
1426        changed = self.set_attributes_if_different(
1427            file_args['path'], file_args['attributes'], changed, diff, expand
1428        )
1429        return changed
1430
1431    def check_file_absent_if_check_mode(self, file_path):
1432        return self.check_mode and not os.path.exists(file_path)
1433
1434    def set_directory_attributes_if_different(self, file_args, changed, diff=None, expand=True):
1435        return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
1436
1437    def set_file_attributes_if_different(self, file_args, changed, diff=None, expand=True):
1438        return self.set_fs_attributes_if_different(file_args, changed, diff, expand)
1439
1440    def add_atomic_move_warnings(self):
1441        for path in sorted(self._created_files):
1442            self.warn("File '{0}' created with default permissions '{1:o}'. The previous default was '666'. "
1443                      "Specify 'mode' to avoid this warning.".format(to_native(path), DEFAULT_PERM))
1444
1445    def add_path_info(self, kwargs):
1446        '''
1447        for results that are files, supplement the info about the file
1448        in the return path with stats about the file path.
1449        '''
1450
1451        path = kwargs.get('path', kwargs.get('dest', None))
1452        if path is None:
1453            return kwargs
1454        b_path = to_bytes(path, errors='surrogate_or_strict')
1455        if os.path.exists(b_path):
1456            (uid, gid) = self.user_and_group(path)
1457            kwargs['uid'] = uid
1458            kwargs['gid'] = gid
1459            try:
1460                user = pwd.getpwuid(uid)[0]
1461            except KeyError:
1462                user = str(uid)
1463            try:
1464                group = grp.getgrgid(gid)[0]
1465            except KeyError:
1466                group = str(gid)
1467            kwargs['owner'] = user
1468            kwargs['group'] = group
1469            st = os.lstat(b_path)
1470            kwargs['mode'] = '0%03o' % stat.S_IMODE(st[stat.ST_MODE])
1471            # secontext not yet supported
1472            if os.path.islink(b_path):
1473                kwargs['state'] = 'link'
1474            elif os.path.isdir(b_path):
1475                kwargs['state'] = 'directory'
1476            elif os.stat(b_path).st_nlink > 1:
1477                kwargs['state'] = 'hard'
1478            else:
1479                kwargs['state'] = 'file'
1480            if HAVE_SELINUX and self.selinux_enabled():
1481                kwargs['secontext'] = ':'.join(self.selinux_context(path))
1482            kwargs['size'] = st[stat.ST_SIZE]
1483        return kwargs
1484
1485    def _check_locale(self):
1486        '''
1487        Uses the locale module to test the currently set locale
1488        (per the LANG and LC_CTYPE environment settings)
1489        '''
1490        try:
1491            # setting the locale to '' uses the default locale
1492            # as it would be returned by locale.getdefaultlocale()
1493            locale.setlocale(locale.LC_ALL, '')
1494        except locale.Error:
1495            # fallback to the 'C' locale, which may cause unicode
1496            # issues but is preferable to simply failing because
1497            # of an unknown locale
1498            locale.setlocale(locale.LC_ALL, 'C')
1499            os.environ['LANG'] = 'C'
1500            os.environ['LC_ALL'] = 'C'
1501            os.environ['LC_MESSAGES'] = 'C'
1502        except Exception as e:
1503            self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" %
1504                           to_native(e), exception=traceback.format_exc())
1505
1506    def _handle_aliases(self, spec=None, param=None, option_prefix=''):
1507        if spec is None:
1508            spec = self.argument_spec
1509        if param is None:
1510            param = self.params
1511
1512        # this uses exceptions as it happens before we can safely call fail_json
1513        alias_warnings = []
1514        alias_results, self._legal_inputs = handle_aliases(spec, param, alias_warnings=alias_warnings)
1515        for option, alias in alias_warnings:
1516            warn('Both option %s and its alias %s are set.' % (option_prefix + option, option_prefix + alias))
1517
1518        deprecated_aliases = []
1519        for i in spec.keys():
1520            if 'deprecated_aliases' in spec[i].keys():
1521                for alias in spec[i]['deprecated_aliases']:
1522                    deprecated_aliases.append(alias)
1523
1524        for deprecation in deprecated_aliases:
1525            if deprecation['name'] in param.keys():
1526                deprecate("Alias '%s' is deprecated. See the module docs for more information" % deprecation['name'],
1527                          version=deprecation.get('version'), date=deprecation.get('date'),
1528                          collection_name=deprecation.get('collection_name'))
1529        return alias_results
1530
1531    def _handle_no_log_values(self, spec=None, param=None):
1532        if spec is None:
1533            spec = self.argument_spec
1534        if param is None:
1535            param = self.params
1536
1537        try:
1538            self.no_log_values.update(list_no_log_values(spec, param))
1539        except TypeError as te:
1540            self.fail_json(msg="Failure when processing no_log parameters. Module invocation will be hidden. "
1541                               "%s" % to_native(te), invocation={'module_args': 'HIDDEN DUE TO FAILURE'})
1542
1543        for message in list_deprecations(spec, param):
1544            deprecate(message['msg'], version=message.get('version'), date=message.get('date'),
1545                      collection_name=message.get('collection_name'))
1546
1547    def _check_arguments(self, spec=None, param=None, legal_inputs=None):
1548        self._syslog_facility = 'LOG_USER'
1549        unsupported_parameters = set()
1550        if spec is None:
1551            spec = self.argument_spec
1552        if param is None:
1553            param = self.params
1554        if legal_inputs is None:
1555            legal_inputs = self._legal_inputs
1556
1557        for k in list(param.keys()):
1558
1559            if k not in legal_inputs:
1560                unsupported_parameters.add(k)
1561
1562        for k in PASS_VARS:
1563            # handle setting internal properties from internal ansible vars
1564            param_key = '_ansible_%s' % k
1565            if param_key in param:
1566                if k in PASS_BOOLS:
1567                    setattr(self, PASS_VARS[k][0], self.boolean(param[param_key]))
1568                else:
1569                    setattr(self, PASS_VARS[k][0], param[param_key])
1570
1571                # clean up internal top level params:
1572                if param_key in self.params:
1573                    del self.params[param_key]
1574            else:
1575                # use defaults if not already set
1576                if not hasattr(self, PASS_VARS[k][0]):
1577                    setattr(self, PASS_VARS[k][0], PASS_VARS[k][1])
1578
1579        if unsupported_parameters:
1580            msg = "Unsupported parameters for (%s) module: %s" % (self._name, ', '.join(sorted(list(unsupported_parameters))))
1581            if self._options_context:
1582                msg += " found in %s." % " -> ".join(self._options_context)
1583            msg += " Supported parameters include: %s" % (', '.join(sorted(spec.keys())))
1584            self.fail_json(msg=msg)
1585        if self.check_mode and not self.supports_check_mode:
1586            self.exit_json(skipped=True, msg="remote module (%s) does not support check mode" % self._name)
1587
1588    def _count_terms(self, check, param=None):
1589        if param is None:
1590            param = self.params
1591        return count_terms(check, param)
1592
1593    def _check_mutually_exclusive(self, spec, param=None):
1594        if param is None:
1595            param = self.params
1596
1597        try:
1598            check_mutually_exclusive(spec, param)
1599        except TypeError as e:
1600            msg = to_native(e)
1601            if self._options_context:
1602                msg += " found in %s" % " -> ".join(self._options_context)
1603            self.fail_json(msg=msg)
1604
1605    def _check_required_one_of(self, spec, param=None):
1606        if spec is None:
1607            return
1608
1609        if param is None:
1610            param = self.params
1611
1612        try:
1613            check_required_one_of(spec, param)
1614        except TypeError as e:
1615            msg = to_native(e)
1616            if self._options_context:
1617                msg += " found in %s" % " -> ".join(self._options_context)
1618            self.fail_json(msg=msg)
1619
1620    def _check_required_together(self, spec, param=None):
1621        if spec is None:
1622            return
1623        if param is None:
1624            param = self.params
1625
1626        try:
1627            check_required_together(spec, param)
1628        except TypeError as e:
1629            msg = to_native(e)
1630            if self._options_context:
1631                msg += " found in %s" % " -> ".join(self._options_context)
1632            self.fail_json(msg=msg)
1633
1634    def _check_required_by(self, spec, param=None):
1635        if spec is None:
1636            return
1637        if param is None:
1638            param = self.params
1639
1640        try:
1641            check_required_by(spec, param)
1642        except TypeError as e:
1643            self.fail_json(msg=to_native(e))
1644
1645    def _check_required_arguments(self, spec=None, param=None):
1646        if spec is None:
1647            spec = self.argument_spec
1648        if param is None:
1649            param = self.params
1650
1651        try:
1652            check_required_arguments(spec, param)
1653        except TypeError as e:
1654            msg = to_native(e)
1655            if self._options_context:
1656                msg += " found in %s" % " -> ".join(self._options_context)
1657            self.fail_json(msg=msg)
1658
1659    def _check_required_if(self, spec, param=None):
1660        ''' ensure that parameters which conditionally required are present '''
1661        if spec is None:
1662            return
1663        if param is None:
1664            param = self.params
1665
1666        try:
1667            check_required_if(spec, param)
1668        except TypeError as e:
1669            msg = to_native(e)
1670            if self._options_context:
1671                msg += " found in %s" % " -> ".join(self._options_context)
1672            self.fail_json(msg=msg)
1673
1674    def _check_argument_values(self, spec=None, param=None):
1675        ''' ensure all arguments have the requested values, and there are no stray arguments '''
1676        if spec is None:
1677            spec = self.argument_spec
1678        if param is None:
1679            param = self.params
1680        for (k, v) in spec.items():
1681            choices = v.get('choices', None)
1682            if choices is None:
1683                continue
1684            if isinstance(choices, SEQUENCETYPE) and not isinstance(choices, (binary_type, text_type)):
1685                if k in param:
1686                    # Allow one or more when type='list' param with choices
1687                    if isinstance(param[k], list):
1688                        diff_list = ", ".join([item for item in param[k] if item not in choices])
1689                        if diff_list:
1690                            choices_str = ", ".join([to_native(c) for c in choices])
1691                            msg = "value of %s must be one or more of: %s. Got no match for: %s" % (k, choices_str, diff_list)
1692                            if self._options_context:
1693                                msg += " found in %s" % " -> ".join(self._options_context)
1694                            self.fail_json(msg=msg)
1695                    elif param[k] not in choices:
1696                        # PyYaml converts certain strings to bools.  If we can unambiguously convert back, do so before checking
1697                        # the value.  If we can't figure this out, module author is responsible.
1698                        lowered_choices = None
1699                        if param[k] == 'False':
1700                            lowered_choices = lenient_lowercase(choices)
1701                            overlap = BOOLEANS_FALSE.intersection(choices)
1702                            if len(overlap) == 1:
1703                                # Extract from a set
1704                                (param[k],) = overlap
1705
1706                        if param[k] == 'True':
1707                            if lowered_choices is None:
1708                                lowered_choices = lenient_lowercase(choices)
1709                            overlap = BOOLEANS_TRUE.intersection(choices)
1710                            if len(overlap) == 1:
1711                                (param[k],) = overlap
1712
1713                        if param[k] not in choices:
1714                            choices_str = ", ".join([to_native(c) for c in choices])
1715                            msg = "value of %s must be one of: %s, got: %s" % (k, choices_str, param[k])
1716                            if self._options_context:
1717                                msg += " found in %s" % " -> ".join(self._options_context)
1718                            self.fail_json(msg=msg)
1719            else:
1720                msg = "internal error: choices for argument %s are not iterable: %s" % (k, choices)
1721                if self._options_context:
1722                    msg += " found in %s" % " -> ".join(self._options_context)
1723                self.fail_json(msg=msg)
1724
1725    def safe_eval(self, value, locals=None, include_exceptions=False):
1726        return safe_eval(value, locals, include_exceptions)
1727
1728    def _check_type_str(self, value, param=None, prefix=''):
1729        opts = {
1730            'error': False,
1731            'warn': False,
1732            'ignore': True
1733        }
1734
1735        # Ignore, warn, or error when converting to a string.
1736        allow_conversion = opts.get(self._string_conversion_action, True)
1737        try:
1738            return check_type_str(value, allow_conversion)
1739        except TypeError:
1740            common_msg = 'quote the entire value to ensure it does not change.'
1741            from_msg = '{0!r}'.format(value)
1742            to_msg = '{0!r}'.format(to_text(value))
1743
1744            if param is not None:
1745                if prefix:
1746                    param = '{0}{1}'.format(prefix, param)
1747
1748                from_msg = '{0}: {1!r}'.format(param, value)
1749                to_msg = '{0}: {1!r}'.format(param, to_text(value))
1750
1751            if self._string_conversion_action == 'error':
1752                msg = common_msg.capitalize()
1753                raise TypeError(to_native(msg))
1754            elif self._string_conversion_action == 'warn':
1755                msg = ('The value "{0}" (type {1.__class__.__name__}) was converted to "{2}" (type string). '
1756                       'If this does not look like what you expect, {3}').format(from_msg, value, to_msg, common_msg)
1757                self.warn(to_native(msg))
1758                return to_native(value, errors='surrogate_or_strict')
1759
1760    def _check_type_list(self, value):
1761        return check_type_list(value)
1762
1763    def _check_type_dict(self, value):
1764        return check_type_dict(value)
1765
1766    def _check_type_bool(self, value):
1767        return check_type_bool(value)
1768
1769    def _check_type_int(self, value):
1770        return check_type_int(value)
1771
1772    def _check_type_float(self, value):
1773        return check_type_float(value)
1774
1775    def _check_type_path(self, value):
1776        return check_type_path(value)
1777
1778    def _check_type_jsonarg(self, value):
1779        return check_type_jsonarg(value)
1780
1781    def _check_type_raw(self, value):
1782        return check_type_raw(value)
1783
1784    def _check_type_bytes(self, value):
1785        return check_type_bytes(value)
1786
1787    def _check_type_bits(self, value):
1788        return check_type_bits(value)
1789
1790    def _handle_options(self, argument_spec=None, params=None, prefix=''):
1791        ''' deal with options to create sub spec '''
1792        if argument_spec is None:
1793            argument_spec = self.argument_spec
1794        if params is None:
1795            params = self.params
1796
1797        for (k, v) in argument_spec.items():
1798            wanted = v.get('type', None)
1799            if wanted == 'dict' or (wanted == 'list' and v.get('elements', '') == 'dict'):
1800                spec = v.get('options', None)
1801                if v.get('apply_defaults', False):
1802                    if spec is not None:
1803                        if params.get(k) is None:
1804                            params[k] = {}
1805                    else:
1806                        continue
1807                elif spec is None or k not in params or params[k] is None:
1808                    continue
1809
1810                self._options_context.append(k)
1811
1812                if isinstance(params[k], dict):
1813                    elements = [params[k]]
1814                else:
1815                    elements = params[k]
1816
1817                for idx, param in enumerate(elements):
1818                    if not isinstance(param, dict):
1819                        self.fail_json(msg="value of %s must be of type dict or list of dict" % k)
1820
1821                    new_prefix = prefix + k
1822                    if wanted == 'list':
1823                        new_prefix += '[%d]' % idx
1824                    new_prefix += '.'
1825
1826                    self._set_fallbacks(spec, param)
1827                    options_aliases = self._handle_aliases(spec, param, option_prefix=new_prefix)
1828
1829                    options_legal_inputs = list(spec.keys()) + list(options_aliases.keys())
1830
1831                    self._check_arguments(spec, param, options_legal_inputs)
1832
1833                    # check exclusive early
1834                    if not self.bypass_checks:
1835                        self._check_mutually_exclusive(v.get('mutually_exclusive', None), param)
1836
1837                    self._set_defaults(pre=True, spec=spec, param=param)
1838
1839                    if not self.bypass_checks:
1840                        self._check_required_arguments(spec, param)
1841                        self._check_argument_types(spec, param, new_prefix)
1842                        self._check_argument_values(spec, param)
1843
1844                        self._check_required_together(v.get('required_together', None), param)
1845                        self._check_required_one_of(v.get('required_one_of', None), param)
1846                        self._check_required_if(v.get('required_if', None), param)
1847                        self._check_required_by(v.get('required_by', None), param)
1848
1849                    self._set_defaults(pre=False, spec=spec, param=param)
1850
1851                    # handle multi level options (sub argspec)
1852                    self._handle_options(spec, param, new_prefix)
1853                self._options_context.pop()
1854
1855    def _get_wanted_type(self, wanted, k):
1856        if not callable(wanted):
1857            if wanted is None:
1858                # Mostly we want to default to str.
1859                # For values set to None explicitly, return None instead as
1860                # that allows a user to unset a parameter
1861                wanted = 'str'
1862            try:
1863                type_checker = self._CHECK_ARGUMENT_TYPES_DISPATCHER[wanted]
1864            except KeyError:
1865                self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
1866        else:
1867            # set the type_checker to the callable, and reset wanted to the callable's name (or type if it doesn't have one, ala MagicMock)
1868            type_checker = wanted
1869            wanted = getattr(wanted, '__name__', to_native(type(wanted)))
1870
1871        return type_checker, wanted
1872
1873    def _handle_elements(self, wanted, param, values):
1874        type_checker, wanted_name = self._get_wanted_type(wanted, param)
1875        validated_params = []
1876        # Get param name for strings so we can later display this value in a useful error message if needed
1877        # Only pass 'kwargs' to our checkers and ignore custom callable checkers
1878        kwargs = {}
1879        if wanted_name == 'str' and isinstance(wanted, string_types):
1880            if isinstance(param, string_types):
1881                kwargs['param'] = param
1882            elif isinstance(param, dict):
1883                kwargs['param'] = list(param.keys())[0]
1884        for value in values:
1885            try:
1886                validated_params.append(type_checker(value, **kwargs))
1887            except (TypeError, ValueError) as e:
1888                msg = "Elements value for option %s" % param
1889                if self._options_context:
1890                    msg += " found in '%s'" % " -> ".join(self._options_context)
1891                msg += " is of type %s and we were unable to convert to %s: %s" % (type(value), wanted_name, to_native(e))
1892                self.fail_json(msg=msg)
1893        return validated_params
1894
1895    def _check_argument_types(self, spec=None, param=None, prefix=''):
1896        ''' ensure all arguments have the requested type '''
1897
1898        if spec is None:
1899            spec = self.argument_spec
1900        if param is None:
1901            param = self.params
1902
1903        for (k, v) in spec.items():
1904            wanted = v.get('type', None)
1905            if k not in param:
1906                continue
1907
1908            value = param[k]
1909            if value is None:
1910                continue
1911
1912            type_checker, wanted_name = self._get_wanted_type(wanted, k)
1913            # Get param name for strings so we can later display this value in a useful error message if needed
1914            # Only pass 'kwargs' to our checkers and ignore custom callable checkers
1915            kwargs = {}
1916            if wanted_name == 'str' and isinstance(type_checker, string_types):
1917                kwargs['param'] = list(param.keys())[0]
1918
1919                # Get the name of the parent key if this is a nested option
1920                if prefix:
1921                    kwargs['prefix'] = prefix
1922
1923            try:
1924                param[k] = type_checker(value, **kwargs)
1925                wanted_elements = v.get('elements', None)
1926                if wanted_elements:
1927                    if wanted != 'list' or not isinstance(param[k], list):
1928                        msg = "Invalid type %s for option '%s'" % (wanted_name, param)
1929                        if self._options_context:
1930                            msg += " found in '%s'." % " -> ".join(self._options_context)
1931                        msg += ", elements value check is supported only with 'list' type"
1932                        self.fail_json(msg=msg)
1933                    param[k] = self._handle_elements(wanted_elements, k, param[k])
1934
1935            except (TypeError, ValueError) as e:
1936                msg = "argument %s is of type %s" % (k, type(value))
1937                if self._options_context:
1938                    msg += " found in '%s'." % " -> ".join(self._options_context)
1939                msg += " and we were unable to convert to %s: %s" % (wanted_name, to_native(e))
1940                self.fail_json(msg=msg)
1941
1942    def _set_defaults(self, pre=True, spec=None, param=None):
1943        if spec is None:
1944            spec = self.argument_spec
1945        if param is None:
1946            param = self.params
1947        for (k, v) in spec.items():
1948            default = v.get('default', None)
1949
1950            # This prevents setting defaults on required items on the 1st run,
1951            # otherwise will set things without a default to None on the 2nd.
1952            if k not in param and (default is not None or not pre):
1953                # Make sure any default value for no_log fields are masked.
1954                if v.get('no_log', False) and default:
1955                    self.no_log_values.add(default)
1956
1957                param[k] = default
1958
1959    def _set_fallbacks(self, spec=None, param=None):
1960        if spec is None:
1961            spec = self.argument_spec
1962        if param is None:
1963            param = self.params
1964
1965        for (k, v) in spec.items():
1966            fallback = v.get('fallback', (None,))
1967            fallback_strategy = fallback[0]
1968            fallback_args = []
1969            fallback_kwargs = {}
1970            if k not in param and fallback_strategy is not None:
1971                for item in fallback[1:]:
1972                    if isinstance(item, dict):
1973                        fallback_kwargs = item
1974                    else:
1975                        fallback_args = item
1976                try:
1977                    fallback_value = fallback_strategy(*fallback_args, **fallback_kwargs)
1978                except AnsibleFallbackNotFound:
1979                    continue
1980                else:
1981                    if v.get('no_log', False) and fallback_value:
1982                        self.no_log_values.add(fallback_value)
1983                    param[k] = fallback_value
1984
1985    def _load_params(self):
1986        ''' read the input and set the params attribute.
1987
1988        This method is for backwards compatibility.  The guts of the function
1989        were moved out in 2.1 so that custom modules could read the parameters.
1990        '''
1991        # debug overrides to read args from file or cmdline
1992        self.params = _load_params()
1993
1994    def _log_to_syslog(self, msg):
1995        if HAS_SYSLOG:
1996            module = 'ansible-%s' % self._name
1997            facility = getattr(syslog, self._syslog_facility, syslog.LOG_USER)
1998            syslog.openlog(str(module), 0, facility)
1999            syslog.syslog(syslog.LOG_INFO, msg)
2000
2001    def debug(self, msg):
2002        if self._debug:
2003            self.log('[debug] %s' % msg)
2004
2005    def log(self, msg, log_args=None):
2006
2007        if not self.no_log:
2008
2009            if log_args is None:
2010                log_args = dict()
2011
2012            module = 'ansible-%s' % self._name
2013            if isinstance(module, binary_type):
2014                module = module.decode('utf-8', 'replace')
2015
2016            # 6655 - allow for accented characters
2017            if not isinstance(msg, (binary_type, text_type)):
2018                raise TypeError("msg should be a string (got %s)" % type(msg))
2019
2020            # We want journal to always take text type
2021            # syslog takes bytes on py2, text type on py3
2022            if isinstance(msg, binary_type):
2023                journal_msg = remove_values(msg.decode('utf-8', 'replace'), self.no_log_values)
2024            else:
2025                # TODO: surrogateescape is a danger here on Py3
2026                journal_msg = remove_values(msg, self.no_log_values)
2027
2028            if PY3:
2029                syslog_msg = journal_msg
2030            else:
2031                syslog_msg = journal_msg.encode('utf-8', 'replace')
2032
2033            if has_journal:
2034                journal_args = [("MODULE", os.path.basename(__file__))]
2035                for arg in log_args:
2036                    journal_args.append((arg.upper(), str(log_args[arg])))
2037                try:
2038                    if HAS_SYSLOG:
2039                        # If syslog_facility specified, it needs to convert
2040                        #  from the facility name to the facility code, and
2041                        #  set it as SYSLOG_FACILITY argument of journal.send()
2042                        facility = getattr(syslog,
2043                                           self._syslog_facility,
2044                                           syslog.LOG_USER) >> 3
2045                        journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
2046                                     SYSLOG_FACILITY=facility,
2047                                     **dict(journal_args))
2048                    else:
2049                        journal.send(MESSAGE=u"%s %s" % (module, journal_msg),
2050                                     **dict(journal_args))
2051                except IOError:
2052                    # fall back to syslog since logging to journal failed
2053                    self._log_to_syslog(syslog_msg)
2054            else:
2055                self._log_to_syslog(syslog_msg)
2056
2057    def _log_invocation(self):
2058        ''' log that ansible ran the module '''
2059        # TODO: generalize a separate log function and make log_invocation use it
2060        # Sanitize possible password argument when logging.
2061        log_args = dict()
2062
2063        for param in self.params:
2064            canon = self.aliases.get(param, param)
2065            arg_opts = self.argument_spec.get(canon, {})
2066            no_log = arg_opts.get('no_log', None)
2067
2068            # try to proactively capture password/passphrase fields
2069            if no_log is None and PASSWORD_MATCH.search(param):
2070                log_args[param] = 'NOT_LOGGING_PASSWORD'
2071                self.warn('Module did not set no_log for %s' % param)
2072            elif self.boolean(no_log):
2073                log_args[param] = 'NOT_LOGGING_PARAMETER'
2074            else:
2075                param_val = self.params[param]
2076                if not isinstance(param_val, (text_type, binary_type)):
2077                    param_val = str(param_val)
2078                elif isinstance(param_val, text_type):
2079                    param_val = param_val.encode('utf-8')
2080                log_args[param] = heuristic_log_sanitize(param_val, self.no_log_values)
2081
2082        msg = ['%s=%s' % (to_native(arg), to_native(val)) for arg, val in log_args.items()]
2083        if msg:
2084            msg = 'Invoked with %s' % ' '.join(msg)
2085        else:
2086            msg = 'Invoked'
2087
2088        self.log(msg, log_args=log_args)
2089
2090    def _set_cwd(self):
2091        try:
2092            cwd = os.getcwd()
2093            if not os.access(cwd, os.F_OK | os.R_OK):
2094                raise Exception()
2095            return cwd
2096        except Exception:
2097            # we don't have access to the cwd, probably because of sudo.
2098            # Try and move to a neutral location to prevent errors
2099            for cwd in [self.tmpdir, os.path.expandvars('$HOME'), tempfile.gettempdir()]:
2100                try:
2101                    if os.access(cwd, os.F_OK | os.R_OK):
2102                        os.chdir(cwd)
2103                        return cwd
2104                except Exception:
2105                    pass
2106        # we won't error here, as it may *not* be a problem,
2107        # and we don't want to break modules unnecessarily
2108        return None
2109
2110    def get_bin_path(self, arg, required=False, opt_dirs=None):
2111        '''
2112        Find system executable in PATH.
2113
2114        :param arg: The executable to find.
2115        :param required: if executable is not found and required is ``True``, fail_json
2116        :param opt_dirs: optional list of directories to search in addition to ``PATH``
2117        :returns: if found return full path; otherwise return None
2118        '''
2119
2120        bin_path = None
2121        try:
2122            bin_path = get_bin_path(arg=arg, opt_dirs=opt_dirs)
2123        except ValueError as e:
2124            if required:
2125                self.fail_json(msg=to_text(e))
2126            else:
2127                return bin_path
2128
2129        return bin_path
2130
2131    def boolean(self, arg):
2132        '''Convert the argument to a boolean'''
2133        if arg is None:
2134            return arg
2135
2136        try:
2137            return boolean(arg)
2138        except TypeError as e:
2139            self.fail_json(msg=to_native(e))
2140
2141    def jsonify(self, data):
2142        try:
2143            return jsonify(data)
2144        except UnicodeError as e:
2145            self.fail_json(msg=to_text(e))
2146
2147    def from_json(self, data):
2148        return json.loads(data)
2149
2150    def add_cleanup_file(self, path):
2151        if path not in self.cleanup_files:
2152            self.cleanup_files.append(path)
2153
2154    def do_cleanup_files(self):
2155        for path in self.cleanup_files:
2156            self.cleanup(path)
2157
2158    def _return_formatted(self, kwargs):
2159
2160        self.add_atomic_move_warnings()
2161        self.add_path_info(kwargs)
2162
2163        if 'invocation' not in kwargs:
2164            kwargs['invocation'] = {'module_args': self.params}
2165
2166        if 'warnings' in kwargs:
2167            if isinstance(kwargs['warnings'], list):
2168                for w in kwargs['warnings']:
2169                    self.warn(w)
2170            else:
2171                self.warn(kwargs['warnings'])
2172
2173        warnings = get_warning_messages()
2174        if warnings:
2175            kwargs['warnings'] = warnings
2176
2177        if 'deprecations' in kwargs:
2178            if isinstance(kwargs['deprecations'], list):
2179                for d in kwargs['deprecations']:
2180                    if isinstance(d, SEQUENCETYPE) and len(d) == 2:
2181                        self.deprecate(d[0], version=d[1])
2182                    elif isinstance(d, Mapping):
2183                        self.deprecate(d['msg'], version=d.get('version'), date=d.get('date'),
2184                                       collection_name=d.get('collection_name'))
2185                    else:
2186                        self.deprecate(d)  # pylint: disable=ansible-deprecated-no-version
2187            else:
2188                self.deprecate(kwargs['deprecations'])  # pylint: disable=ansible-deprecated-no-version
2189
2190        deprecations = get_deprecation_messages()
2191        if deprecations:
2192            kwargs['deprecations'] = deprecations
2193
2194        kwargs = remove_values(kwargs, self.no_log_values)
2195        print('\n%s' % self.jsonify(kwargs))
2196
2197    def exit_json(self, **kwargs):
2198        ''' return from the module, without error '''
2199
2200        self.do_cleanup_files()
2201        self._return_formatted(kwargs)
2202        sys.exit(0)
2203
2204    def fail_json(self, msg, **kwargs):
2205        ''' return from the module, with an error message '''
2206
2207        kwargs['failed'] = True
2208        kwargs['msg'] = msg
2209
2210        # Add traceback if debug or high verbosity and it is missing
2211        # NOTE: Badly named as exception, it really always has been a traceback
2212        if 'exception' not in kwargs and sys.exc_info()[2] and (self._debug or self._verbosity >= 3):
2213            if PY2:
2214                # On Python 2 this is the last (stack frame) exception and as such may be unrelated to the failure
2215                kwargs['exception'] = 'WARNING: The below traceback may *not* be related to the actual failure.\n' +\
2216                                      ''.join(traceback.format_tb(sys.exc_info()[2]))
2217            else:
2218                kwargs['exception'] = ''.join(traceback.format_tb(sys.exc_info()[2]))
2219
2220        self.do_cleanup_files()
2221        self._return_formatted(kwargs)
2222        sys.exit(1)
2223
2224    def fail_on_missing_params(self, required_params=None):
2225        if not required_params:
2226            return
2227        try:
2228            check_missing_parameters(self.params, required_params)
2229        except TypeError as e:
2230            self.fail_json(msg=to_native(e))
2231
2232    def digest_from_file(self, filename, algorithm):
2233        ''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
2234        b_filename = to_bytes(filename, errors='surrogate_or_strict')
2235
2236        if not os.path.exists(b_filename):
2237            return None
2238        if os.path.isdir(b_filename):
2239            self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
2240
2241        # preserve old behaviour where the third parameter was a hash algorithm object
2242        if hasattr(algorithm, 'hexdigest'):
2243            digest_method = algorithm
2244        else:
2245            try:
2246                digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]()
2247            except KeyError:
2248                self.fail_json(msg="Could not hash file '%s' with algorithm '%s'. Available algorithms: %s" %
2249                                   (filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
2250
2251        blocksize = 64 * 1024
2252        infile = open(os.path.realpath(b_filename), 'rb')
2253        block = infile.read(blocksize)
2254        while block:
2255            digest_method.update(block)
2256            block = infile.read(blocksize)
2257        infile.close()
2258        return digest_method.hexdigest()
2259
2260    def md5(self, filename):
2261        ''' Return MD5 hex digest of local file using digest_from_file().
2262
2263        Do not use this function unless you have no other choice for:
2264            1) Optional backwards compatibility
2265            2) Compatibility with a third party protocol
2266
2267        This function will not work on systems complying with FIPS-140-2.
2268
2269        Most uses of this function can use the module.sha1 function instead.
2270        '''
2271        if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
2272            raise ValueError('MD5 not available.  Possibly running in FIPS mode')
2273        return self.digest_from_file(filename, 'md5')
2274
2275    def sha1(self, filename):
2276        ''' Return SHA1 hex digest of local file using digest_from_file(). '''
2277        return self.digest_from_file(filename, 'sha1')
2278
2279    def sha256(self, filename):
2280        ''' Return SHA-256 hex digest of local file using digest_from_file(). '''
2281        return self.digest_from_file(filename, 'sha256')
2282
2283    def backup_local(self, fn):
2284        '''make a date-marked backup of the specified file, return True or False on success or failure'''
2285
2286        backupdest = ''
2287        if os.path.exists(fn):
2288            # backups named basename.PID.YYYY-MM-DD@HH:MM:SS~
2289            ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
2290            backupdest = '%s.%s.%s' % (fn, os.getpid(), ext)
2291
2292            try:
2293                self.preserved_copy(fn, backupdest)
2294            except (shutil.Error, IOError) as e:
2295                self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, to_native(e)))
2296
2297        return backupdest
2298
2299    def cleanup(self, tmpfile):
2300        if os.path.exists(tmpfile):
2301            try:
2302                os.unlink(tmpfile)
2303            except OSError as e:
2304                sys.stderr.write("could not cleanup %s: %s" % (tmpfile, to_native(e)))
2305
2306    def preserved_copy(self, src, dest):
2307        """Copy a file with preserved ownership, permissions and context"""
2308
2309        # shutil.copy2(src, dst)
2310        #   Similar to shutil.copy(), but metadata is copied as well - in fact,
2311        #   this is just shutil.copy() followed by copystat(). This is similar
2312        #   to the Unix command cp -p.
2313        #
2314        # shutil.copystat(src, dst)
2315        #   Copy the permission bits, last access time, last modification time,
2316        #   and flags from src to dst. The file contents, owner, and group are
2317        #   unaffected. src and dst are path names given as strings.
2318
2319        shutil.copy2(src, dest)
2320
2321        # Set the context
2322        if self.selinux_enabled():
2323            context = self.selinux_context(src)
2324            self.set_context_if_different(dest, context, False)
2325
2326        # chown it
2327        try:
2328            dest_stat = os.stat(src)
2329            tmp_stat = os.stat(dest)
2330            if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
2331                os.chown(dest, dest_stat.st_uid, dest_stat.st_gid)
2332        except OSError as e:
2333            if e.errno != errno.EPERM:
2334                raise
2335
2336        # Set the attributes
2337        current_attribs = self.get_file_attributes(src)
2338        current_attribs = current_attribs.get('attr_flags', '')
2339        self.set_attributes_if_different(dest, current_attribs, True)
2340
2341    def atomic_move(self, src, dest, unsafe_writes=False):
2342        '''atomically move src to dest, copying attributes from dest, returns true on success
2343        it uses os.rename to ensure this as it is an atomic operation, rest of the function is
2344        to work around limitations, corner cases and ensure selinux context is saved if possible'''
2345        context = None
2346        dest_stat = None
2347        b_src = to_bytes(src, errors='surrogate_or_strict')
2348        b_dest = to_bytes(dest, errors='surrogate_or_strict')
2349        if os.path.exists(b_dest):
2350            try:
2351                dest_stat = os.stat(b_dest)
2352
2353                # copy mode and ownership
2354                os.chmod(b_src, dest_stat.st_mode & PERM_BITS)
2355                os.chown(b_src, dest_stat.st_uid, dest_stat.st_gid)
2356
2357                # try to copy flags if possible
2358                if hasattr(os, 'chflags') and hasattr(dest_stat, 'st_flags'):
2359                    try:
2360                        os.chflags(b_src, dest_stat.st_flags)
2361                    except OSError as e:
2362                        for err in 'EOPNOTSUPP', 'ENOTSUP':
2363                            if hasattr(errno, err) and e.errno == getattr(errno, err):
2364                                break
2365                        else:
2366                            raise
2367            except OSError as e:
2368                if e.errno != errno.EPERM:
2369                    raise
2370            if self.selinux_enabled():
2371                context = self.selinux_context(dest)
2372        else:
2373            if self.selinux_enabled():
2374                context = self.selinux_default_context(dest)
2375
2376        creating = not os.path.exists(b_dest)
2377
2378        try:
2379            # Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
2380            os.rename(b_src, b_dest)
2381        except (IOError, OSError) as e:
2382            if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY, errno.EBUSY]:
2383                # only try workarounds for errno 18 (cross device), 1 (not permitted),  13 (permission denied)
2384                # and 26 (text file busy) which happens on vagrant synced folders and other 'exotic' non posix file systems
2385                self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
2386            else:
2387                # Use bytes here.  In the shippable CI, this fails with
2388                # a UnicodeError with surrogateescape'd strings for an unknown
2389                # reason (doesn't happen in a local Ubuntu16.04 VM)
2390                b_dest_dir = os.path.dirname(b_dest)
2391                b_suffix = os.path.basename(b_dest)
2392                error_msg = None
2393                tmp_dest_name = None
2394                try:
2395                    tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=b'.ansible_tmp', dir=b_dest_dir, suffix=b_suffix)
2396                except (OSError, IOError) as e:
2397                    error_msg = 'The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), to_native(e))
2398                except TypeError:
2399                    # We expect that this is happening because python3.4.x and
2400                    # below can't handle byte strings in mkstemp().
2401                    # Traceback would end in something like:
2402                    #     file = _os.path.join(dir, pre + name + suf)
2403                    # TypeError: can't concat bytes to str
2404                    error_msg = ('Failed creating tmp file for atomic move.  This usually happens when using Python3 less than Python3.5. '
2405                                 'Please use Python2.x or Python3.5 or greater.')
2406                finally:
2407                    if error_msg:
2408                        if unsafe_writes:
2409                            self._unsafe_writes(b_src, b_dest)
2410                        else:
2411                            self.fail_json(msg=error_msg, exception=traceback.format_exc())
2412
2413                if tmp_dest_name:
2414                    b_tmp_dest_name = to_bytes(tmp_dest_name, errors='surrogate_or_strict')
2415
2416                    try:
2417                        try:
2418                            # close tmp file handle before file operations to prevent text file busy errors on vboxfs synced folders (windows host)
2419                            os.close(tmp_dest_fd)
2420                            # leaves tmp file behind when sudo and not root
2421                            try:
2422                                shutil.move(b_src, b_tmp_dest_name)
2423                            except OSError:
2424                                # cleanup will happen by 'rm' of tmpdir
2425                                # copy2 will preserve some metadata
2426                                shutil.copy2(b_src, b_tmp_dest_name)
2427
2428                            if self.selinux_enabled():
2429                                self.set_context_if_different(
2430                                    b_tmp_dest_name, context, False)
2431                            try:
2432                                tmp_stat = os.stat(b_tmp_dest_name)
2433                                if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
2434                                    os.chown(b_tmp_dest_name, dest_stat.st_uid, dest_stat.st_gid)
2435                            except OSError as e:
2436                                if e.errno != errno.EPERM:
2437                                    raise
2438                            try:
2439                                os.rename(b_tmp_dest_name, b_dest)
2440                            except (shutil.Error, OSError, IOError) as e:
2441                                if unsafe_writes and e.errno == errno.EBUSY:
2442                                    self._unsafe_writes(b_tmp_dest_name, b_dest)
2443                                else:
2444                                    self.fail_json(msg='Unable to make %s into to %s, failed final rename from %s: %s' %
2445                                                       (src, dest, b_tmp_dest_name, to_native(e)), exception=traceback.format_exc())
2446                        except (shutil.Error, OSError, IOError) as e:
2447                            if unsafe_writes:
2448                                self._unsafe_writes(b_src, b_dest)
2449                            else:
2450                                self.fail_json(msg='Failed to replace file: %s to %s: %s' % (src, dest, to_native(e)), exception=traceback.format_exc())
2451                    finally:
2452                        self.cleanup(b_tmp_dest_name)
2453
2454        if creating:
2455            # Keep track of what files we create here with default permissions so later we can see if the permissions
2456            # are explicitly set with a follow up call to set_mode_if_different().
2457            #
2458            # Only warn if the module accepts 'mode' parameter so the user can take action.
2459            # If the module does not allow the user to set 'mode', then the warning is useless to the
2460            # user since it provides no actionable information.
2461            #
2462            if self.argument_spec.get('mode') and self.params.get('mode') is None:
2463                self._created_files.add(dest)
2464
2465            # make sure the file has the correct permissions
2466            # based on the current value of umask
2467            umask = os.umask(0)
2468            os.umask(umask)
2469            os.chmod(b_dest, DEFAULT_PERM & ~umask)
2470            try:
2471                os.chown(b_dest, os.geteuid(), os.getegid())
2472            except OSError:
2473                # We're okay with trying our best here.  If the user is not
2474                # root (or old Unices) they won't be able to chown.
2475                pass
2476
2477        if self.selinux_enabled():
2478            # rename might not preserve context
2479            self.set_context_if_different(dest, context, False)
2480
2481    def _unsafe_writes(self, src, dest):
2482        # sadly there are some situations where we cannot ensure atomicity, but only if
2483        # the user insists and we get the appropriate error we update the file unsafely
2484        try:
2485            out_dest = in_src = None
2486            try:
2487                out_dest = open(dest, 'wb')
2488                in_src = open(src, 'rb')
2489                shutil.copyfileobj(in_src, out_dest)
2490            finally:  # assuring closed files in 2.4 compatible way
2491                if out_dest:
2492                    out_dest.close()
2493                if in_src:
2494                    in_src.close()
2495        except (shutil.Error, OSError, IOError) as e:
2496            self.fail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, to_native(e)),
2497                           exception=traceback.format_exc())
2498
2499    def _clean_args(self, args):
2500
2501        if not self._clean:
2502            # create a printable version of the command for use in reporting later,
2503            # which strips out things like passwords from the args list
2504            to_clean_args = args
2505            if PY2:
2506                if isinstance(args, text_type):
2507                    to_clean_args = to_bytes(args)
2508            else:
2509                if isinstance(args, binary_type):
2510                    to_clean_args = to_text(args)
2511            if isinstance(args, (text_type, binary_type)):
2512                to_clean_args = shlex.split(to_clean_args)
2513
2514            clean_args = []
2515            is_passwd = False
2516            for arg in (to_native(a) for a in to_clean_args):
2517                if is_passwd:
2518                    is_passwd = False
2519                    clean_args.append('********')
2520                    continue
2521                if PASSWD_ARG_RE.match(arg):
2522                    sep_idx = arg.find('=')
2523                    if sep_idx > -1:
2524                        clean_args.append('%s=********' % arg[:sep_idx])
2525                        continue
2526                    else:
2527                        is_passwd = True
2528                arg = heuristic_log_sanitize(arg, self.no_log_values)
2529                clean_args.append(arg)
2530            self._clean = ' '.join(shlex_quote(arg) for arg in clean_args)
2531
2532        return self._clean
2533
2534    def _restore_signal_handlers(self):
2535        # Reset SIGPIPE to SIG_DFL, otherwise in Python2.7 it gets ignored in subprocesses.
2536        if PY2 and sys.platform != 'win32':
2537            signal.signal(signal.SIGPIPE, signal.SIG_DFL)
2538
2539    def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None,
2540                    use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict',
2541                    expand_user_and_vars=True, pass_fds=None, before_communicate_callback=None, ignore_invalid_cwd=True):
2542        '''
2543        Execute a command, returns rc, stdout, and stderr.
2544
2545        :arg args: is the command to run
2546            * If args is a list, the command will be run with shell=False.
2547            * If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
2548            * If args is a string and use_unsafe_shell=True it runs with shell=True.
2549        :kw check_rc: Whether to call fail_json in case of non zero RC.
2550            Default False
2551        :kw close_fds: See documentation for subprocess.Popen(). Default True
2552        :kw executable: See documentation for subprocess.Popen(). Default None
2553        :kw data: If given, information to write to the stdin of the command
2554        :kw binary_data: If False, append a newline to the data.  Default False
2555        :kw path_prefix: If given, additional path to find the command in.
2556            This adds to the PATH environment variable so helper commands in
2557            the same directory can also be found
2558        :kw cwd: If given, working directory to run the command inside
2559        :kw use_unsafe_shell: See `args` parameter.  Default False
2560        :kw prompt_regex: Regex string (not a compiled regex) which can be
2561            used to detect prompts in the stdout which would otherwise cause
2562            the execution to hang (especially if no input data is specified)
2563        :kw environ_update: dictionary to *update* os.environ with
2564        :kw umask: Umask to be used when running the command. Default None
2565        :kw encoding: Since we return native strings, on python3 we need to
2566            know the encoding to use to transform from bytes to text.  If you
2567            want to always get bytes back, use encoding=None.  The default is
2568            "utf-8".  This does not affect transformation of strings given as
2569            args.
2570        :kw errors: Since we return native strings, on python3 we need to
2571            transform stdout and stderr from bytes to text.  If the bytes are
2572            undecodable in the ``encoding`` specified, then use this error
2573            handler to deal with them.  The default is ``surrogate_or_strict``
2574            which means that the bytes will be decoded using the
2575            surrogateescape error handler if available (available on all
2576            python3 versions we support) otherwise a UnicodeError traceback
2577            will be raised.  This does not affect transformations of strings
2578            given as args.
2579        :kw expand_user_and_vars: When ``use_unsafe_shell=False`` this argument
2580            dictates whether ``~`` is expanded in paths and environment variables
2581            are expanded before running the command. When ``True`` a string such as
2582            ``$SHELL`` will be expanded regardless of escaping. When ``False`` and
2583            ``use_unsafe_shell=False`` no path or variable expansion will be done.
2584        :kw pass_fds: When running on Python 3 this argument
2585            dictates which file descriptors should be passed
2586            to an underlying ``Popen`` constructor. On Python 2, this will
2587            set ``close_fds`` to False.
2588        :kw before_communicate_callback: This function will be called
2589            after ``Popen`` object will be created
2590            but before communicating to the process.
2591            (``Popen`` object will be passed to callback as a first argument)
2592        :kw ignore_invalid_cwd: This flag indicates whether an invalid ``cwd``
2593            (non-existent or not a directory) should be ignored or should raise
2594            an exception.
2595        :returns: A 3-tuple of return code (integer), stdout (native string),
2596            and stderr (native string).  On python2, stdout and stderr are both
2597            byte strings.  On python3, stdout and stderr are text strings converted
2598            according to the encoding and errors parameters.  If you want byte
2599            strings on python3, use encoding=None to turn decoding to text off.
2600        '''
2601        # used by clean args later on
2602        self._clean = None
2603
2604        if not isinstance(args, (list, binary_type, text_type)):
2605            msg = "Argument 'args' to run_command must be list or string"
2606            self.fail_json(rc=257, cmd=args, msg=msg)
2607
2608        shell = False
2609        if use_unsafe_shell:
2610
2611            # stringify args for unsafe/direct shell usage
2612            if isinstance(args, list):
2613                args = b" ".join([to_bytes(shlex_quote(x), errors='surrogate_or_strict') for x in args])
2614            else:
2615                args = to_bytes(args, errors='surrogate_or_strict')
2616
2617            # not set explicitly, check if set by controller
2618            if executable:
2619                executable = to_bytes(executable, errors='surrogate_or_strict')
2620                args = [executable, b'-c', args]
2621            elif self._shell not in (None, '/bin/sh'):
2622                args = [to_bytes(self._shell, errors='surrogate_or_strict'), b'-c', args]
2623            else:
2624                shell = True
2625        else:
2626            # ensure args are a list
2627            if isinstance(args, (binary_type, text_type)):
2628                # On python2.6 and below, shlex has problems with text type
2629                # On python3, shlex needs a text type.
2630                if PY2:
2631                    args = to_bytes(args, errors='surrogate_or_strict')
2632                elif PY3:
2633                    args = to_text(args, errors='surrogateescape')
2634                args = shlex.split(args)
2635
2636            # expand ``~`` in paths, and all environment vars
2637            if expand_user_and_vars:
2638                args = [to_bytes(os.path.expanduser(os.path.expandvars(x)), errors='surrogate_or_strict') for x in args if x is not None]
2639            else:
2640                args = [to_bytes(x, errors='surrogate_or_strict') for x in args if x is not None]
2641
2642        prompt_re = None
2643        if prompt_regex:
2644            if isinstance(prompt_regex, text_type):
2645                if PY3:
2646                    prompt_regex = to_bytes(prompt_regex, errors='surrogateescape')
2647                elif PY2:
2648                    prompt_regex = to_bytes(prompt_regex, errors='surrogate_or_strict')
2649            try:
2650                prompt_re = re.compile(prompt_regex, re.MULTILINE)
2651            except re.error:
2652                self.fail_json(msg="invalid prompt regular expression given to run_command")
2653
2654        rc = 0
2655        msg = None
2656        st_in = None
2657
2658        # Manipulate the environ we'll send to the new process
2659        old_env_vals = {}
2660        # We can set this from both an attribute and per call
2661        for key, val in self.run_command_environ_update.items():
2662            old_env_vals[key] = os.environ.get(key, None)
2663            os.environ[key] = val
2664        if environ_update:
2665            for key, val in environ_update.items():
2666                old_env_vals[key] = os.environ.get(key, None)
2667                os.environ[key] = val
2668        if path_prefix:
2669            old_env_vals['PATH'] = os.environ['PATH']
2670            os.environ['PATH'] = "%s:%s" % (path_prefix, os.environ['PATH'])
2671
2672        # If using test-module.py and explode, the remote lib path will resemble:
2673        #   /tmp/test_module_scratch/debug_dir/ansible/module_utils/basic.py
2674        # If using ansible or ansible-playbook with a remote system:
2675        #   /tmp/ansible_vmweLQ/ansible_modlib.zip/ansible/module_utils/basic.py
2676
2677        # Clean out python paths set by ansiballz
2678        if 'PYTHONPATH' in os.environ:
2679            pypaths = os.environ['PYTHONPATH'].split(':')
2680            pypaths = [x for x in pypaths
2681                       if not x.endswith('/ansible_modlib.zip') and
2682                       not x.endswith('/debug_dir')]
2683            os.environ['PYTHONPATH'] = ':'.join(pypaths)
2684            if not os.environ['PYTHONPATH']:
2685                del os.environ['PYTHONPATH']
2686
2687        if data:
2688            st_in = subprocess.PIPE
2689
2690        kwargs = dict(
2691            executable=executable,
2692            shell=shell,
2693            close_fds=close_fds,
2694            stdin=st_in,
2695            stdout=subprocess.PIPE,
2696            stderr=subprocess.PIPE,
2697            preexec_fn=self._restore_signal_handlers,
2698        )
2699        if PY3 and pass_fds:
2700            kwargs["pass_fds"] = pass_fds
2701        elif PY2 and pass_fds:
2702            kwargs['close_fds'] = False
2703
2704        # store the pwd
2705        prev_dir = os.getcwd()
2706
2707        # make sure we're in the right working directory
2708        if cwd:
2709            if os.path.isdir(cwd):
2710                cwd = to_bytes(os.path.abspath(os.path.expanduser(cwd)), errors='surrogate_or_strict')
2711                kwargs['cwd'] = cwd
2712                try:
2713                    os.chdir(cwd)
2714                except (OSError, IOError) as e:
2715                    self.fail_json(rc=e.errno, msg="Could not chdir to %s, %s" % (cwd, to_native(e)),
2716                                   exception=traceback.format_exc())
2717            elif not ignore_invalid_cwd:
2718                self.fail_json(msg="Provided cwd is not a valid directory: %s" % cwd)
2719
2720        old_umask = None
2721        if umask:
2722            old_umask = os.umask(umask)
2723
2724        try:
2725            if self._debug:
2726                self.log('Executing: ' + self._clean_args(args))
2727            cmd = subprocess.Popen(args, **kwargs)
2728            if before_communicate_callback:
2729                before_communicate_callback(cmd)
2730
2731            # the communication logic here is essentially taken from that
2732            # of the _communicate() function in ssh.py
2733
2734            stdout = b''
2735            stderr = b''
2736            try:
2737                selector = selectors.DefaultSelector()
2738            except OSError:
2739                # Failed to detect default selector for the given platform
2740                # Select PollSelector which is supported by major platforms
2741                selector = selectors.PollSelector()
2742
2743            selector.register(cmd.stdout, selectors.EVENT_READ)
2744            selector.register(cmd.stderr, selectors.EVENT_READ)
2745            if os.name == 'posix':
2746                fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
2747                fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
2748
2749            if data:
2750                if not binary_data:
2751                    data += '\n'
2752                if isinstance(data, text_type):
2753                    data = to_bytes(data)
2754                cmd.stdin.write(data)
2755                cmd.stdin.close()
2756
2757            while True:
2758                events = selector.select(1)
2759                for key, event in events:
2760                    b_chunk = key.fileobj.read()
2761                    if b_chunk == b(''):
2762                        selector.unregister(key.fileobj)
2763                    if key.fileobj == cmd.stdout:
2764                        stdout += b_chunk
2765                    elif key.fileobj == cmd.stderr:
2766                        stderr += b_chunk
2767                # if we're checking for prompts, do it now
2768                if prompt_re:
2769                    if prompt_re.search(stdout) and not data:
2770                        if encoding:
2771                            stdout = to_native(stdout, encoding=encoding, errors=errors)
2772                        return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
2773                # only break out if no pipes are left to read or
2774                # the pipes are completely read and
2775                # the process is terminated
2776                if (not events or not selector.get_map()) and cmd.poll() is not None:
2777                    break
2778                # No pipes are left to read but process is not yet terminated
2779                # Only then it is safe to wait for the process to be finished
2780                # NOTE: Actually cmd.poll() is always None here if no selectors are left
2781                elif not selector.get_map() and cmd.poll() is None:
2782                    cmd.wait()
2783                    # The process is terminated. Since no pipes to read from are
2784                    # left, there is no need to call select() again.
2785                    break
2786
2787            cmd.stdout.close()
2788            cmd.stderr.close()
2789            selector.close()
2790
2791            rc = cmd.returncode
2792        except (OSError, IOError) as e:
2793            self.log("Error Executing CMD:%s Exception:%s" % (self._clean_args(args), to_native(e)))
2794            self.fail_json(rc=e.errno, msg=to_native(e), cmd=self._clean_args(args))
2795        except Exception as e:
2796            self.log("Error Executing CMD:%s Exception:%s" % (self._clean_args(args), to_native(traceback.format_exc())))
2797            self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=self._clean_args(args))
2798
2799        # Restore env settings
2800        for key, val in old_env_vals.items():
2801            if val is None:
2802                del os.environ[key]
2803            else:
2804                os.environ[key] = val
2805
2806        if old_umask:
2807            os.umask(old_umask)
2808
2809        if rc != 0 and check_rc:
2810            msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values)
2811            self.fail_json(cmd=self._clean_args(args), rc=rc, stdout=stdout, stderr=stderr, msg=msg)
2812
2813        # reset the pwd
2814        os.chdir(prev_dir)
2815
2816        if encoding is not None:
2817            return (rc, to_native(stdout, encoding=encoding, errors=errors),
2818                    to_native(stderr, encoding=encoding, errors=errors))
2819
2820        return (rc, stdout, stderr)
2821
2822    def append_to_file(self, filename, str):
2823        filename = os.path.expandvars(os.path.expanduser(filename))
2824        fh = open(filename, 'a')
2825        fh.write(str)
2826        fh.close()
2827
2828    def bytes_to_human(self, size):
2829        return bytes_to_human(size)
2830
2831    # for backwards compatibility
2832    pretty_bytes = bytes_to_human
2833
2834    def human_to_bytes(self, number, isbits=False):
2835        return human_to_bytes(number, isbits)
2836
2837    #
2838    # Backwards compat
2839    #
2840
2841    # In 2.0, moved from inside the module to the toplevel
2842    is_executable = is_executable
2843
2844    @staticmethod
2845    def get_buffer_size(fd):
2846        try:
2847            # 1032 == FZ_GETPIPE_SZ
2848            buffer_size = fcntl.fcntl(fd, 1032)
2849        except Exception:
2850            try:
2851                # not as exact as above, but should be good enough for most platforms that fail the previous call
2852                buffer_size = select.PIPE_BUF
2853            except Exception:
2854                buffer_size = 9000  # use sane default JIC
2855
2856        return buffer_size
2857
2858
2859def get_module_path():
2860    return os.path.dirname(os.path.realpath(__file__))
2861