1# coding: utf-8
2# Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
3# Copyright: (c) 2018, Ansible Project
4# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
5
6# Make coding more python3-ish
7from __future__ import (absolute_import, division, print_function)
8__metaclass__ = type
9
10import base64
11import json
12import os
13import random
14import re
15import stat
16import tempfile
17from abc import ABCMeta, abstractmethod
18
19from ansible import constants as C
20from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleActionSkip, AnsibleActionFail, AnsibleAuthenticationFailure
21from ansible.executor.module_common import modify_module
22from ansible.executor.interpreter_discovery import discover_interpreter, InterpreterDiscoveryRequiredError
23from ansible.module_utils.common._collections_compat import Sequence
24from ansible.module_utils.json_utils import _filter_non_json_lines
25from ansible.module_utils.six import binary_type, string_types, text_type, iteritems, with_metaclass
26from ansible.module_utils.six.moves import shlex_quote
27from ansible.module_utils._text import to_bytes, to_native, to_text
28from ansible.parsing.utils.jsonify import jsonify
29from ansible.release import __version__
30from ansible.utils.collection_loader import resource_from_fqcr
31from ansible.utils.display import Display
32from ansible.utils.unsafe_proxy import wrap_var, AnsibleUnsafeText
33from ansible.vars.clean import remove_internal_keys
34from ansible.utils.plugin_docs import get_versioned_doclink
35
36display = Display()
37
38
39class ActionBase(with_metaclass(ABCMeta, object)):
40
41    '''
42    This class is the base class for all action plugins, and defines
43    code common to all actions. The base class handles the connection
44    by putting/getting files and executing commands based on the current
45    action in use.
46    '''
47
48    # A set of valid arguments
49    _VALID_ARGS = frozenset([])
50
51    def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
52        self._task = task
53        self._connection = connection
54        self._play_context = play_context
55        self._loader = loader
56        self._templar = templar
57        self._shared_loader_obj = shared_loader_obj
58        self._cleanup_remote_tmp = False
59
60        self._supports_check_mode = True
61        self._supports_async = False
62
63        # interpreter discovery state
64        self._discovered_interpreter_key = None
65        self._discovered_interpreter = False
66        self._discovery_deprecation_warnings = []
67        self._discovery_warnings = []
68
69        # Backwards compat: self._display isn't really needed, just import the global display and use that.
70        self._display = display
71
72        self._used_interpreter = None
73
74    @abstractmethod
75    def run(self, tmp=None, task_vars=None):
76        """ Action Plugins should implement this method to perform their
77        tasks.  Everything else in this base class is a helper method for the
78        action plugin to do that.
79
80        :kwarg tmp: Deprecated parameter.  This is no longer used.  An action plugin that calls
81            another one and wants to use the same remote tmp for both should set
82            self._connection._shell.tmpdir rather than this parameter.
83        :kwarg task_vars: The variables (host vars, group vars, config vars,
84            etc) associated with this task.
85        :returns: dictionary of results from the module
86
87        Implementors of action modules may find the following variables especially useful:
88
89        * Module parameters.  These are stored in self._task.args
90        """
91
92        # does not default to {'changed': False, 'failed': False}, as it breaks async
93        result = {}
94
95        if tmp is not None:
96            result['warning'] = ['ActionModule.run() no longer honors the tmp parameter. Action'
97                                 ' plugins should set self._connection._shell.tmpdir to share'
98                                 ' the tmpdir']
99        del tmp
100
101        if self._task.async_val and not self._supports_async:
102            raise AnsibleActionFail('async is not supported for this task.')
103        elif self._play_context.check_mode and not self._supports_check_mode:
104            raise AnsibleActionSkip('check mode is not supported for this task.')
105        elif self._task.async_val and self._play_context.check_mode:
106            raise AnsibleActionFail('check mode and async cannot be used on same task.')
107
108        # Error if invalid argument is passed
109        if self._VALID_ARGS:
110            task_opts = frozenset(self._task.args.keys())
111            bad_opts = task_opts.difference(self._VALID_ARGS)
112            if bad_opts:
113                raise AnsibleActionFail('Invalid options for %s: %s' % (self._task.action, ','.join(list(bad_opts))))
114
115        if self._connection._shell.tmpdir is None and self._early_needs_tmp_path():
116            self._make_tmp_path()
117
118        return result
119
120    def cleanup(self, force=False):
121        """Method to perform a clean up at the end of an action plugin execution
122
123        By default this is designed to clean up the shell tmpdir, and is toggled based on whether
124        async is in use
125
126        Action plugins may override this if they deem necessary, but should still call this method
127        via super
128        """
129        if force or not self._task.async_val:
130            self._remove_tmp_path(self._connection._shell.tmpdir)
131
132    def get_plugin_option(self, plugin, option, default=None):
133        """Helper to get an option from a plugin without having to use
134        the try/except dance everywhere to set a default
135        """
136        try:
137            return plugin.get_option(option)
138        except (AttributeError, KeyError):
139            return default
140
141    def get_become_option(self, option, default=None):
142        return self.get_plugin_option(self._connection.become, option, default=default)
143
144    def get_connection_option(self, option, default=None):
145        return self.get_plugin_option(self._connection, option, default=default)
146
147    def get_shell_option(self, option, default=None):
148        return self.get_plugin_option(self._connection._shell, option, default=default)
149
150    def _remote_file_exists(self, path):
151        cmd = self._connection._shell.exists(path)
152        result = self._low_level_execute_command(cmd=cmd, sudoable=True)
153        if result['rc'] == 0:
154            return True
155        return False
156
157    def _configure_module(self, module_name, module_args, task_vars):
158        '''
159        Handles the loading and templating of the module code through the
160        modify_module() function.
161        '''
162        if self._task.delegate_to:
163            use_vars = task_vars.get('ansible_delegated_vars')[self._task.delegate_to]
164        else:
165            use_vars = task_vars
166
167        split_module_name = module_name.split('.')
168        collection_name = '.'.join(split_module_name[0:2]) if len(split_module_name) > 2 else ''
169        leaf_module_name = resource_from_fqcr(module_name)
170
171        # Search module path(s) for named module.
172        for mod_type in self._connection.module_implementation_preferences:
173            # Check to determine if PowerShell modules are supported, and apply
174            # some fixes (hacks) to module name + args.
175            if mod_type == '.ps1':
176                # FIXME: This should be temporary and moved to an exec subsystem plugin where we can define the mapping
177                # for each subsystem.
178                win_collection = 'ansible.windows'
179                rewrite_collection_names = ['ansible.builtin', 'ansible.legacy', '']
180                # async_status, win_stat, win_file, win_copy, and win_ping are not just like their
181                # python counterparts but they are compatible enough for our
182                # internal usage
183                # NB: we only rewrite the module if it's not being called by the user (eg, an action calling something else)
184                # and if it's unqualified or FQ to a builtin
185                if leaf_module_name in ('stat', 'file', 'copy', 'ping') and \
186                        collection_name in rewrite_collection_names and self._task.action != module_name:
187                    module_name = '%s.win_%s' % (win_collection, leaf_module_name)
188                elif leaf_module_name == 'async_status' and collection_name in rewrite_collection_names:
189                    module_name = '%s.%s' % (win_collection, leaf_module_name)
190
191                # TODO: move this tweak down to the modules, not extensible here
192                # Remove extra quotes surrounding path parameters before sending to module.
193                if leaf_module_name in ['win_stat', 'win_file', 'win_copy', 'slurp'] and module_args and \
194                        hasattr(self._connection._shell, '_unquote'):
195                    for key in ('src', 'dest', 'path'):
196                        if key in module_args:
197                            module_args[key] = self._connection._shell._unquote(module_args[key])
198
199            result = self._shared_loader_obj.module_loader.find_plugin_with_context(module_name, mod_type, collection_list=self._task.collections)
200
201            if not result.resolved:
202                if result.redirect_list and len(result.redirect_list) > 1:
203                    # take the last one in the redirect list, we may have successfully jumped through N other redirects
204                    target_module_name = result.redirect_list[-1]
205
206                    raise AnsibleError("The module {0} was redirected to {1}, which could not be loaded.".format(module_name, target_module_name))
207
208            module_path = result.plugin_resolved_path
209            if module_path:
210                break
211        else:  # This is a for-else: http://bit.ly/1ElPkyg
212            raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
213
214        # insert shared code and arguments into the module
215        final_environment = dict()
216        self._compute_environment_string(final_environment)
217
218        become_kwargs = {}
219        if self._connection.become:
220            become_kwargs['become'] = True
221            become_kwargs['become_method'] = self._connection.become.name
222            become_kwargs['become_user'] = self._connection.become.get_option('become_user',
223                                                                              playcontext=self._play_context)
224            become_kwargs['become_password'] = self._connection.become.get_option('become_pass',
225                                                                                  playcontext=self._play_context)
226            become_kwargs['become_flags'] = self._connection.become.get_option('become_flags',
227                                                                               playcontext=self._play_context)
228
229        # modify_module will exit early if interpreter discovery is required; re-run after if necessary
230        for dummy in (1, 2):
231            try:
232                (module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args, self._templar,
233                                                                            task_vars=use_vars,
234                                                                            module_compression=self._play_context.module_compression,
235                                                                            async_timeout=self._task.async_val,
236                                                                            environment=final_environment,
237                                                                            remote_is_local=bool(getattr(self._connection, '_remote_is_local', False)),
238                                                                            **become_kwargs)
239                break
240            except InterpreterDiscoveryRequiredError as idre:
241                self._discovered_interpreter = AnsibleUnsafeText(discover_interpreter(
242                    action=self,
243                    interpreter_name=idre.interpreter_name,
244                    discovery_mode=idre.discovery_mode,
245                    task_vars=use_vars))
246
247                # update the local task_vars with the discovered interpreter (which might be None);
248                # we'll propagate back to the controller in the task result
249                discovered_key = 'discovered_interpreter_%s' % idre.interpreter_name
250
251                # update the local vars copy for the retry
252                use_vars['ansible_facts'][discovered_key] = self._discovered_interpreter
253
254                # TODO: this condition prevents 'wrong host' from being updated
255                # but in future we would want to be able to update 'delegated host facts'
256                # irrespective of task settings
257                if not self._task.delegate_to or self._task.delegate_facts:
258                    # store in local task_vars facts collection for the retry and any other usages in this worker
259                    task_vars['ansible_facts'][discovered_key] = self._discovered_interpreter
260                    # preserve this so _execute_module can propagate back to controller as a fact
261                    self._discovered_interpreter_key = discovered_key
262                else:
263                    task_vars['ansible_delegated_vars'][self._task.delegate_to]['ansible_facts'][discovered_key] = self._discovered_interpreter
264
265        return (module_style, module_shebang, module_data, module_path)
266
267    def _compute_environment_string(self, raw_environment_out=None):
268        '''
269        Builds the environment string to be used when executing the remote task.
270        '''
271
272        final_environment = dict()
273        if self._task.environment is not None:
274            environments = self._task.environment
275            if not isinstance(environments, list):
276                environments = [environments]
277
278            # The order of environments matters to make sure we merge
279            # in the parent's values first so those in the block then
280            # task 'win' in precedence
281            for environment in environments:
282                if environment is None or len(environment) == 0:
283                    continue
284                temp_environment = self._templar.template(environment)
285                if not isinstance(temp_environment, dict):
286                    raise AnsibleError("environment must be a dictionary, received %s (%s)" % (temp_environment, type(temp_environment)))
287                # very deliberately using update here instead of combine_vars, as
288                # these environment settings should not need to merge sub-dicts
289                final_environment.update(temp_environment)
290
291        if len(final_environment) > 0:
292            final_environment = self._templar.template(final_environment)
293
294        if isinstance(raw_environment_out, dict):
295            raw_environment_out.clear()
296            raw_environment_out.update(final_environment)
297
298        return self._connection._shell.env_prefix(**final_environment)
299
300    def _early_needs_tmp_path(self):
301        '''
302        Determines if a tmp path should be created before the action is executed.
303        '''
304
305        return getattr(self, 'TRANSFERS_FILES', False)
306
307    def _is_pipelining_enabled(self, module_style, wrap_async=False):
308        '''
309        Determines if we are required and can do pipelining
310        '''
311
312        try:
313            is_enabled = self._connection.get_option('pipelining')
314        except (KeyError, AttributeError, ValueError):
315            is_enabled = self._play_context.pipelining
316
317        # winrm supports async pipeline
318        # TODO: make other class property 'has_async_pipelining' to separate cases
319        always_pipeline = self._connection.always_pipeline_modules
320
321        # su does not work with pipelining
322        # TODO: add has_pipelining class prop to become plugins
323        become_exception = (self._connection.become.name if self._connection.become else '') != 'su'
324
325        # any of these require a true
326        conditions = [
327            self._connection.has_pipelining,    # connection class supports it
328            is_enabled or always_pipeline,      # enabled via config or forced via connection (eg winrm)
329            module_style == "new",              # old style modules do not support pipelining
330            not C.DEFAULT_KEEP_REMOTE_FILES,    # user wants remote files
331            not wrap_async or always_pipeline,  # async does not normally support pipelining unless it does (eg winrm)
332            become_exception,
333        ]
334
335        return all(conditions)
336
337    def _get_admin_users(self):
338        '''
339        Returns a list of admin users that are configured for the current shell
340        plugin
341        '''
342
343        return self.get_shell_option('admin_users', ['root'])
344
345    def _get_remote_user(self):
346        ''' consistently get the 'remote_user' for the action plugin '''
347        # TODO: use 'current user running ansible' as fallback when moving away from play_context
348        # pwd.getpwuid(os.getuid()).pw_name
349        remote_user = None
350        try:
351            remote_user = self._connection.get_option('remote_user')
352        except KeyError:
353            # plugin does not have remote_user option, fallback to default and/play_context
354            remote_user = getattr(self._connection, 'default_user', None) or self._play_context.remote_user
355        except AttributeError:
356            # plugin does not use config system, fallback to old play_context
357            remote_user = self._play_context.remote_user
358        return remote_user
359
360    def _is_become_unprivileged(self):
361        '''
362        The user is not the same as the connection user and is not part of the
363        shell configured admin users
364        '''
365        # if we don't use become then we know we aren't switching to a
366        # different unprivileged user
367        if not self._connection.become:
368            return False
369
370        # if we use become and the user is not an admin (or same user) then
371        # we need to return become_unprivileged as True
372        admin_users = self._get_admin_users()
373        remote_user = self._get_remote_user()
374        become_user = self.get_become_option('become_user')
375        return bool(become_user and become_user not in admin_users + [remote_user])
376
377    def _make_tmp_path(self, remote_user=None):
378        '''
379        Create and return a temporary path on a remote box.
380        '''
381
382        # Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
383        # As such, we want to avoid using remote_user for paths  as remote_user may not line up with the local user
384        # This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
385        if getattr(self._connection, '_remote_is_local', False):
386            tmpdir = C.DEFAULT_LOCAL_TMP
387        else:
388            # NOTE: shell plugins should populate this setting anyways, but they dont do remote expansion, which
389            # we need for 'non posix' systems like cloud-init and solaris
390            tmpdir = self._remote_expand_user(self.get_shell_option('remote_tmp', default='~/.ansible/tmp'), sudoable=False)
391
392        become_unprivileged = self._is_become_unprivileged()
393        basefile = self._connection._shell._generate_temp_dir_name()
394        cmd = self._connection._shell.mkdtemp(basefile=basefile, system=become_unprivileged, tmpdir=tmpdir)
395        result = self._low_level_execute_command(cmd, sudoable=False)
396
397        # error handling on this seems a little aggressive?
398        if result['rc'] != 0:
399            if result['rc'] == 5:
400                output = 'Authentication failure.'
401            elif result['rc'] == 255 and self._connection.transport in ('ssh',):
402
403                if self._play_context.verbosity > 3:
404                    output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
405                else:
406                    output = (u'SSH encountered an unknown error during the connection. '
407                              'We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
408
409            elif u'No space left on device' in result['stderr']:
410                output = result['stderr']
411            else:
412                output = ('Failed to create temporary directory.'
413                          'In some cases, you may have been able to authenticate and did not have permissions on the target directory. '
414                          'Consider changing the remote tmp path in ansible.cfg to a path rooted in "/tmp", for more error information use -vvv. '
415                          'Failed command was: %s, exited with result %d' % (cmd, result['rc']))
416            if 'stdout' in result and result['stdout'] != u'':
417                output = output + u", stdout output: %s" % result['stdout']
418            if self._play_context.verbosity > 3 and 'stderr' in result and result['stderr'] != u'':
419                output += u", stderr output: %s" % result['stderr']
420            raise AnsibleConnectionFailure(output)
421        else:
422            self._cleanup_remote_tmp = True
423
424        try:
425            stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
426            rc = self._connection._shell.join_path(stdout_parts[-1], u'').splitlines()[-1]
427        except IndexError:
428            # stdout was empty or just space, set to / to trigger error in next if
429            rc = '/'
430
431        # Catch failure conditions, files should never be
432        # written to locations in /.
433        if rc == '/':
434            raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
435
436        self._connection._shell.tmpdir = rc
437
438        return rc
439
440    def _should_remove_tmp_path(self, tmp_path):
441        '''Determine if temporary path should be deleted or kept by user request/config'''
442        return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path
443
444    def _remove_tmp_path(self, tmp_path):
445        '''Remove a temporary path we created. '''
446
447        if tmp_path is None and self._connection._shell.tmpdir:
448            tmp_path = self._connection._shell.tmpdir
449
450        if self._should_remove_tmp_path(tmp_path):
451            cmd = self._connection._shell.remove(tmp_path, recurse=True)
452            # If we have gotten here we have a working ssh configuration.
453            # If ssh breaks we could leave tmp directories out on the remote system.
454            tmp_rm_res = self._low_level_execute_command(cmd, sudoable=False)
455
456            if tmp_rm_res.get('rc', 0) != 0:
457                display.warning('Error deleting remote temporary files (rc: %s, stderr: %s})'
458                                % (tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.')))
459            else:
460                self._connection._shell.tmpdir = None
461
462    def _transfer_file(self, local_path, remote_path):
463        """
464        Copy a file from the controller to a remote path
465
466        :arg local_path: Path on controller to transfer
467        :arg remote_path: Path on the remote system to transfer into
468
469        .. warning::
470            * When you use this function you likely want to use use fixup_perms2() on the
471              remote_path to make sure that the remote file is readable when the user becomes
472              a non-privileged user.
473            * If you use fixup_perms2() on the file and copy or move the file into place, you will
474              need to then remove filesystem acls on the file once it has been copied into place by
475              the module.  See how the copy module implements this for help.
476        """
477        self._connection.put_file(local_path, remote_path)
478        return remote_path
479
480    def _transfer_data(self, remote_path, data):
481        '''
482        Copies the module data out to the temporary module path.
483        '''
484
485        if isinstance(data, dict):
486            data = jsonify(data)
487
488        afd, afile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
489        afo = os.fdopen(afd, 'wb')
490        try:
491            data = to_bytes(data, errors='surrogate_or_strict')
492            afo.write(data)
493        except Exception as e:
494            raise AnsibleError("failure writing module data to temporary file for transfer: %s" % to_native(e))
495
496        afo.flush()
497        afo.close()
498
499        try:
500            self._transfer_file(afile, remote_path)
501        finally:
502            os.unlink(afile)
503
504        return remote_path
505
506    def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
507        """
508        We need the files we upload to be readable (and sometimes executable)
509        by the user being sudo'd to but we want to limit other people's access
510        (because the files could contain passwords or other private
511        information.  We achieve this in one of these ways:
512
513        * If no sudo is performed or the remote_user is sudo'ing to
514          themselves, we don't have to change permissions.
515        * If the remote_user sudo's to a privileged user (for instance, root),
516          we don't have to change permissions
517        * If the remote_user sudo's to an unprivileged user then we attempt to
518          grant the unprivileged user access via file system acls.
519        * If granting file system acls fails we try to change the owner of the
520          file with chown which only works in case the remote_user is
521          privileged or the remote systems allows chown calls by unprivileged
522          users (e.g. HP-UX)
523        * If the above fails, we next try 'chmod +a' which is a macOS way of
524          setting ACLs on files.
525        * If the above fails, we check if ansible_common_remote_group is set.
526          If it is, we attempt to chgrp the file to its value. This is useful
527          if the remote_user has a group in common with the become_user. As the
528          remote_user, we can chgrp the file to that group and allow the
529          become_user to read it.
530        * If (the chown fails AND ansible_common_remote_group is not set) OR
531          (ansible_common_remote_group is set AND the chgrp (or following chmod)
532          returned non-zero), we can set the file to be world readable so that
533          the second unprivileged user can read the file.
534          Since this could allow other users to get access to private
535          information we only do this if ansible is configured with
536          "allow_world_readable_tmpfiles" in the ansible.cfg. Also note that
537          when ansible_common_remote_group is set this final fallback is very
538          unlikely to ever be triggered, so long as chgrp was successful. But
539          just because the chgrp was successful, does not mean Ansible can
540          necessarily access the files (if, for example, the variable was set
541          to a group that remote_user is in, and can chgrp to, but does not have
542          in common with become_user).
543        """
544        if remote_user is None:
545            remote_user = self._get_remote_user()
546
547        # Step 1: Are we on windows?
548        if getattr(self._connection._shell, "_IS_WINDOWS", False):
549            # This won't work on Powershell as-is, so we'll just completely
550            # skip until we have a need for it, at which point we'll have to do
551            # something different.
552            return remote_paths
553
554        # Step 2: If we're not becoming an unprivileged user, we are roughly
555        # done. Make the files +x if we're asked to, and return.
556        if not self._is_become_unprivileged():
557            if execute:
558                # Can't depend on the file being transferred with execute permissions.
559                # Only need user perms because no become was used here
560                res = self._remote_chmod(remote_paths, 'u+x')
561                if res['rc'] != 0:
562                    raise AnsibleError(
563                        'Failed to set execute bit on remote files '
564                        '(rc: {0}, err: {1})'.format(
565                            res['rc'],
566                            to_native(res['stderr'])))
567            return remote_paths
568
569        # If we're still here, we have an unprivileged user that's different
570        # than the ssh user.
571        become_user = self.get_become_option('become_user')
572
573        # Try to use file system acls to make the files readable for sudo'd
574        # user
575        if execute:
576            chmod_mode = 'rx'
577            setfacl_mode = 'r-x'
578            # Apple patches their "file_cmds" chmod with ACL support
579            chmod_acl_mode = '{0} allow read,execute'.format(become_user)
580            # POSIX-draft ACL specification. Solaris, maybe others.
581            # See chmod(1) on something Solaris-based for syntax details.
582            posix_acl_mode = 'A+user:{0}:rx:allow'.format(become_user)
583        else:
584            chmod_mode = 'rX'
585            # TODO: this form fails silently on freebsd.  We currently
586            # never call _fixup_perms2() with execute=False but if we
587            # start to we'll have to fix this.
588            setfacl_mode = 'r-X'
589            # Apple
590            chmod_acl_mode = '{0} allow read'.format(become_user)
591            # POSIX-draft
592            posix_acl_mode = 'A+user:{0}:r:allow'.format(become_user)
593
594        # Step 3a: Are we able to use setfacl to add user ACLs to the file?
595        res = self._remote_set_user_facl(
596            remote_paths,
597            become_user,
598            setfacl_mode)
599
600        if res['rc'] == 0:
601            return remote_paths
602
603        # Step 3b: Set execute if we need to. We do this before anything else
604        # because some of the methods below might work but not let us set +x
605        # as part of them.
606        if execute:
607            res = self._remote_chmod(remote_paths, 'u+x')
608            if res['rc'] != 0:
609                raise AnsibleError(
610                    'Failed to set file mode on remote temporary files '
611                    '(rc: {0}, err: {1})'.format(
612                        res['rc'],
613                        to_native(res['stderr'])))
614
615        # Step 3c: File system ACLs failed above; try falling back to chown.
616        res = self._remote_chown(remote_paths, become_user)
617        if res['rc'] == 0:
618            return remote_paths
619
620        # Check if we are an admin/root user. If we are and got here, it means
621        # we failed to chown as root and something weird has happened.
622        if remote_user in self._get_admin_users():
623            raise AnsibleError(
624                'Failed to change ownership of the temporary files Ansible '
625                'needs to create despite connecting as a privileged user. '
626                'Unprivileged become user would be unable to read the '
627                'file.')
628
629        # Step 3d: Try macOS's special chmod + ACL
630        # macOS chmod's +a flag takes its own argument. As a slight hack, we
631        # pass that argument as the first element of remote_paths. So we end
632        # up running `chmod +a [that argument] [file 1] [file 2] ...`
633        try:
634            res = self._remote_chmod([chmod_acl_mode] + list(remote_paths), '+a')
635        except AnsibleAuthenticationFailure as e:
636            # Solaris-based chmod will return 5 when it sees an invalid mode,
637            # and +a is invalid there. Because it returns 5, which is the same
638            # thing sshpass returns on auth failure, our sshpass code will
639            # assume that auth failed. If we don't handle that case here, none
640            # of the other logic below will get run. This is fairly hacky and a
641            # corner case, but probably one that shows up pretty often in
642            # Solaris-based environments (and possibly others).
643            pass
644        else:
645            if res['rc'] == 0:
646                return remote_paths
647
648        # Step 3e: Try Solaris/OpenSolaris/OpenIndiana-sans-setfacl chmod
649        # Similar to macOS above, Solaris 11.4 drops setfacl and takes file ACLs
650        # via chmod instead. OpenSolaris and illumos-based distros allow for
651        # using either setfacl or chmod, and compatibility depends on filesystem.
652        # It should be possible to debug this branch by installing OpenIndiana
653        # (use ZFS) and going unpriv -> unpriv.
654        res = self._remote_chmod(remote_paths, posix_acl_mode)
655        if res['rc'] == 0:
656            return remote_paths
657
658        # we'll need this down here
659        become_link = get_versioned_doclink('user_guide/become.html')
660
661        # Step 3f: Common group
662        # Otherwise, we're a normal user. We failed to chown the paths to the
663        # unprivileged user, but if we have a common group with them, we should
664        # be able to chown it to that.
665        #
666        # Note that we have no way of knowing if this will actually work... just
667        # because chgrp exits successfully does not mean that Ansible will work.
668        # We could check if the become user is in the group, but this would
669        # create an extra round trip.
670        #
671        # Also note that due to the above, this can prevent the
672        # ALLOW_WORLD_READABLE_TMPFILES logic below from ever getting called. We
673        # leave this up to the user to rectify if they have both of these
674        # features enabled.
675        group = self.get_shell_option('common_remote_group')
676        if group is not None:
677            res = self._remote_chgrp(remote_paths, group)
678            if res['rc'] == 0:
679                # warn user that something might go weirdly here.
680                if self.get_shell_option('world_readable_temp'):
681                    display.warning(
682                        'Both common_remote_group and '
683                        'allow_world_readable_tmpfiles are set. chgrp was '
684                        'successful, but there is no guarantee that Ansible '
685                        'will be able to read the files after this operation, '
686                        'particularly if common_remote_group was set to a '
687                        'group of which the unprivileged become user is not a '
688                        'member. In this situation, '
689                        'allow_world_readable_tmpfiles is a no-op. See this '
690                        'URL for more details: %s'
691                        '#becoming-an-unprivileged-user' % become_link)
692                if execute:
693                    group_mode = 'g+rwx'
694                else:
695                    group_mode = 'g+rw'
696                res = self._remote_chmod(remote_paths, group_mode)
697                if res['rc'] == 0:
698                    return remote_paths
699
700        # Step 4: World-readable temp directory
701        if self.get_shell_option('world_readable_temp'):
702            # chown and fs acls failed -- do things this insecure way only if
703            # the user opted in in the config file
704            display.warning(
705                'Using world-readable permissions for temporary files Ansible '
706                'needs to create when becoming an unprivileged user. This may '
707                'be insecure. For information on securing this, see %s'
708                '#risks-of-becoming-an-unprivileged-user' % become_link)
709            res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
710            if res['rc'] == 0:
711                return remote_paths
712            raise AnsibleError(
713                'Failed to set file mode on remote files '
714                '(rc: {0}, err: {1})'.format(
715                    res['rc'],
716                    to_native(res['stderr'])))
717
718        raise AnsibleError(
719            'Failed to set permissions on the temporary files Ansible needs '
720            'to create when becoming an unprivileged user '
721            '(rc: %s, err: %s}). For information on working around this, see %s'
722            '#becoming-an-unprivileged-user' % (
723                res['rc'],
724                to_native(res['stderr']), become_link))
725
726    def _remote_chmod(self, paths, mode, sudoable=False):
727        '''
728        Issue a remote chmod command
729        '''
730        cmd = self._connection._shell.chmod(paths, mode)
731        res = self._low_level_execute_command(cmd, sudoable=sudoable)
732        return res
733
734    def _remote_chown(self, paths, user, sudoable=False):
735        '''
736        Issue a remote chown command
737        '''
738        cmd = self._connection._shell.chown(paths, user)
739        res = self._low_level_execute_command(cmd, sudoable=sudoable)
740        return res
741
742    def _remote_chgrp(self, paths, group, sudoable=False):
743        '''
744        Issue a remote chgrp command
745        '''
746        cmd = self._connection._shell.chgrp(paths, group)
747        res = self._low_level_execute_command(cmd, sudoable=sudoable)
748        return res
749
750    def _remote_set_user_facl(self, paths, user, mode, sudoable=False):
751        '''
752        Issue a remote call to setfacl
753        '''
754        cmd = self._connection._shell.set_user_facl(paths, user, mode)
755        res = self._low_level_execute_command(cmd, sudoable=sudoable)
756        return res
757
758    def _execute_remote_stat(self, path, all_vars, follow, tmp=None, checksum=True):
759        '''
760        Get information from remote file.
761        '''
762        if tmp is not None:
763            display.warning('_execute_remote_stat no longer honors the tmp parameter. Action'
764                            ' plugins should set self._connection._shell.tmpdir to share'
765                            ' the tmpdir')
766        del tmp  # No longer used
767
768        module_args = dict(
769            path=path,
770            follow=follow,
771            get_checksum=checksum,
772            checksum_algorithm='sha1',
773        )
774        mystat = self._execute_module(module_name='ansible.legacy.stat', module_args=module_args, task_vars=all_vars,
775                                      wrap_async=False)
776
777        if mystat.get('failed'):
778            msg = mystat.get('module_stderr')
779            if not msg:
780                msg = mystat.get('module_stdout')
781            if not msg:
782                msg = mystat.get('msg')
783            raise AnsibleError('Failed to get information on remote file (%s): %s' % (path, msg))
784
785        if not mystat['stat']['exists']:
786            # empty might be matched, 1 should never match, also backwards compatible
787            mystat['stat']['checksum'] = '1'
788
789        # happens sometimes when it is a dir and not on bsd
790        if 'checksum' not in mystat['stat']:
791            mystat['stat']['checksum'] = ''
792        elif not isinstance(mystat['stat']['checksum'], string_types):
793            raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
794
795        return mystat['stat']
796
797    def _remote_checksum(self, path, all_vars, follow=False):
798        '''
799        Produces a remote checksum given a path,
800        Returns a number 0-4 for specific errors instead of checksum, also ensures it is different
801        0 = unknown error
802        1 = file does not exist, this might not be an error
803        2 = permissions issue
804        3 = its a directory, not a file
805        4 = stat module failed, likely due to not finding python
806        5 = appropriate json module not found
807        '''
808        x = "0"  # unknown error has occurred
809        try:
810            remote_stat = self._execute_remote_stat(path, all_vars, follow=follow)
811            if remote_stat['exists'] and remote_stat['isdir']:
812                x = "3"  # its a directory not a file
813            else:
814                x = remote_stat['checksum']  # if 1, file is missing
815        except AnsibleError as e:
816            errormsg = to_text(e)
817            if errormsg.endswith(u'Permission denied'):
818                x = "2"  # cannot read file
819            elif errormsg.endswith(u'MODULE FAILURE'):
820                x = "4"  # python not found or module uncaught exception
821            elif 'json' in errormsg:
822                x = "5"  # json module needed
823        finally:
824            return x  # pylint: disable=lost-exception
825
826    def _remote_expand_user(self, path, sudoable=True, pathsep=None):
827        ''' takes a remote path and performs tilde/$HOME expansion on the remote host '''
828
829        # We only expand ~/path and ~username/path
830        if not path.startswith('~'):
831            return path
832
833        # Per Jborean, we don't have to worry about Windows as we don't have a notion of user's home
834        # dir there.
835        split_path = path.split(os.path.sep, 1)
836        expand_path = split_path[0]
837
838        if expand_path == '~':
839            # Network connection plugins (network_cli, netconf, etc.) execute on the controller, rather than the remote host.
840            # As such, we want to avoid using remote_user for paths  as remote_user may not line up with the local user
841            # This is a hack and should be solved by more intelligent handling of remote_tmp in 2.7
842            become_user = self.get_become_option('become_user')
843            if getattr(self._connection, '_remote_is_local', False):
844                pass
845            elif sudoable and self._connection.become and become_user:
846                expand_path = '~%s' % become_user
847            else:
848                # use remote user instead, if none set default to current user
849                expand_path = '~%s' % (self._get_remote_user() or '')
850
851        # use shell to construct appropriate command and execute
852        cmd = self._connection._shell.expand_user(expand_path)
853        data = self._low_level_execute_command(cmd, sudoable=False)
854
855        try:
856            initial_fragment = data['stdout'].strip().splitlines()[-1]
857        except IndexError:
858            initial_fragment = None
859
860        if not initial_fragment:
861            # Something went wrong trying to expand the path remotely. Try using pwd, if not, return
862            # the original string
863            cmd = self._connection._shell.pwd()
864            pwd = self._low_level_execute_command(cmd, sudoable=False).get('stdout', '').strip()
865            if pwd:
866                expanded = pwd
867            else:
868                expanded = path
869
870        elif len(split_path) > 1:
871            expanded = self._connection._shell.join_path(initial_fragment, *split_path[1:])
872        else:
873            expanded = initial_fragment
874
875        if '..' in os.path.dirname(expanded).split('/'):
876            raise AnsibleError("'%s' returned an invalid relative home directory path containing '..'" % self._play_context.remote_addr)
877
878        return expanded
879
880    def _strip_success_message(self, data):
881        '''
882        Removes the BECOME-SUCCESS message from the data.
883        '''
884        if data.strip().startswith('BECOME-SUCCESS-'):
885            data = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', data)
886        return data
887
888    def _update_module_args(self, module_name, module_args, task_vars):
889
890        # set check mode in the module arguments, if required
891        if self._play_context.check_mode:
892            if not self._supports_check_mode:
893                raise AnsibleError("check mode is not supported for this operation")
894            module_args['_ansible_check_mode'] = True
895        else:
896            module_args['_ansible_check_mode'] = False
897
898        # set no log in the module arguments, if required
899        no_target_syslog = C.config.get_config_value('DEFAULT_NO_TARGET_SYSLOG', variables=task_vars)
900        module_args['_ansible_no_log'] = self._play_context.no_log or no_target_syslog
901
902        # set debug in the module arguments, if required
903        module_args['_ansible_debug'] = C.DEFAULT_DEBUG
904
905        # let module know we are in diff mode
906        module_args['_ansible_diff'] = self._play_context.diff
907
908        # let module know our verbosity
909        module_args['_ansible_verbosity'] = display.verbosity
910
911        # give the module information about the ansible version
912        module_args['_ansible_version'] = __version__
913
914        # give the module information about its name
915        module_args['_ansible_module_name'] = module_name
916
917        # set the syslog facility to be used in the module
918        module_args['_ansible_syslog_facility'] = task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY)
919
920        # let module know about filesystems that selinux treats specially
921        module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
922
923        # what to do when parameter values are converted to strings
924        module_args['_ansible_string_conversion_action'] = C.STRING_CONVERSION_ACTION
925
926        # give the module the socket for persistent connections
927        module_args['_ansible_socket'] = getattr(self._connection, 'socket_path')
928        if not module_args['_ansible_socket']:
929            module_args['_ansible_socket'] = task_vars.get('ansible_socket')
930
931        # make sure all commands use the designated shell executable
932        module_args['_ansible_shell_executable'] = self._play_context.executable
933
934        # make sure modules are aware if they need to keep the remote files
935        module_args['_ansible_keep_remote_files'] = C.DEFAULT_KEEP_REMOTE_FILES
936
937        # make sure all commands use the designated temporary directory if created
938        if self._is_become_unprivileged():  # force fallback on remote_tmp as user cannot normally write to dir
939            module_args['_ansible_tmpdir'] = None
940        else:
941            module_args['_ansible_tmpdir'] = self._connection._shell.tmpdir
942
943        # make sure the remote_tmp value is sent through in case modules needs to create their own
944        module_args['_ansible_remote_tmp'] = self.get_shell_option('remote_tmp', default='~/.ansible/tmp')
945
946    def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=None, wrap_async=False):
947        '''
948        Transfer and run a module along with its arguments.
949        '''
950        if tmp is not None:
951            display.warning('_execute_module no longer honors the tmp parameter. Action plugins'
952                            ' should set self._connection._shell.tmpdir to share the tmpdir')
953        del tmp  # No longer used
954        if delete_remote_tmp is not None:
955            display.warning('_execute_module no longer honors the delete_remote_tmp parameter.'
956                            ' Action plugins should check self._connection._shell.tmpdir to'
957                            ' see if a tmpdir existed before they were called to determine'
958                            ' if they are responsible for removing it.')
959        del delete_remote_tmp  # No longer used
960
961        tmpdir = self._connection._shell.tmpdir
962
963        # We set the module_style to new here so the remote_tmp is created
964        # before the module args are built if remote_tmp is needed (async).
965        # If the module_style turns out to not be new and we didn't create the
966        # remote tmp here, it will still be created. This must be done before
967        # calling self._update_module_args() so the module wrapper has the
968        # correct remote_tmp value set
969        if not self._is_pipelining_enabled("new", wrap_async) and tmpdir is None:
970            self._make_tmp_path()
971            tmpdir = self._connection._shell.tmpdir
972
973        if task_vars is None:
974            task_vars = dict()
975
976        # if a module name was not specified for this execution, use the action from the task
977        if module_name is None:
978            module_name = self._task.action
979        if module_args is None:
980            module_args = self._task.args
981
982        self._update_module_args(module_name, module_args, task_vars)
983
984        # FIXME: convert async_wrapper.py to not rely on environment variables
985        # make sure we get the right async_dir variable, backwards compatibility
986        # means we need to lookup the env value ANSIBLE_ASYNC_DIR first
987        remove_async_dir = None
988        if wrap_async or self._task.async_val:
989            env_async_dir = [e for e in self._task.environment if
990                             "ANSIBLE_ASYNC_DIR" in e]
991            if len(env_async_dir) > 0:
992                msg = "Setting the async dir from the environment keyword " \
993                      "ANSIBLE_ASYNC_DIR is deprecated. Set the async_dir " \
994                      "shell option instead"
995                self._display.deprecated(msg, "2.12", collection_name='ansible.builtin')
996            else:
997                # ANSIBLE_ASYNC_DIR is not set on the task, we get the value
998                # from the shell option and temporarily add to the environment
999                # list for async_wrapper to pick up
1000                async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
1001                remove_async_dir = len(self._task.environment)
1002                self._task.environment.append({"ANSIBLE_ASYNC_DIR": async_dir})
1003
1004        # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
1005        (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
1006        display.vvv("Using module file %s" % module_path)
1007        if not shebang and module_style != 'binary':
1008            raise AnsibleError("module (%s) is missing interpreter line" % module_name)
1009
1010        self._used_interpreter = shebang
1011        remote_module_path = None
1012
1013        if not self._is_pipelining_enabled(module_style, wrap_async):
1014            # we might need remote tmp dir
1015            if tmpdir is None:
1016                self._make_tmp_path()
1017                tmpdir = self._connection._shell.tmpdir
1018
1019            remote_module_filename = self._connection._shell.get_remote_filename(module_path)
1020            remote_module_path = self._connection._shell.join_path(tmpdir, 'AnsiballZ_%s' % remote_module_filename)
1021
1022        args_file_path = None
1023        if module_style in ('old', 'non_native_want_json', 'binary'):
1024            # we'll also need a tmp file to hold our module arguments
1025            args_file_path = self._connection._shell.join_path(tmpdir, 'args')
1026
1027        if remote_module_path or module_style != 'new':
1028            display.debug("transferring module to remote %s" % remote_module_path)
1029            if module_style == 'binary':
1030                self._transfer_file(module_path, remote_module_path)
1031            else:
1032                self._transfer_data(remote_module_path, module_data)
1033            if module_style == 'old':
1034                # we need to dump the module args to a k=v string in a file on
1035                # the remote system, which can be read and parsed by the module
1036                args_data = ""
1037                for k, v in iteritems(module_args):
1038                    args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
1039                self._transfer_data(args_file_path, args_data)
1040            elif module_style in ('non_native_want_json', 'binary'):
1041                self._transfer_data(args_file_path, json.dumps(module_args))
1042            display.debug("done transferring module to remote")
1043
1044        environment_string = self._compute_environment_string()
1045
1046        # remove the ANSIBLE_ASYNC_DIR env entry if we added a temporary one for
1047        # the async_wrapper task - this is so the async_status plugin doesn't
1048        # fire a deprecation warning when it runs after this task
1049        if remove_async_dir is not None:
1050            del self._task.environment[remove_async_dir]
1051
1052        remote_files = []
1053        if tmpdir and remote_module_path:
1054            remote_files = [tmpdir, remote_module_path]
1055
1056        if args_file_path:
1057            remote_files.append(args_file_path)
1058
1059        sudoable = True
1060        in_data = None
1061        cmd = ""
1062
1063        if wrap_async and not self._connection.always_pipeline_modules:
1064            # configure, upload, and chmod the async_wrapper module
1065            (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(
1066                module_name='ansible.legacy.async_wrapper', module_args=dict(), task_vars=task_vars)
1067            async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
1068            remote_async_module_path = self._connection._shell.join_path(tmpdir, async_module_remote_filename)
1069            self._transfer_data(remote_async_module_path, async_module_data)
1070            remote_files.append(remote_async_module_path)
1071
1072            async_limit = self._task.async_val
1073            async_jid = str(random.randint(0, 999999999999))
1074
1075            # call the interpreter for async_wrapper directly
1076            # this permits use of a script for an interpreter on non-Linux platforms
1077            # TODO: re-implement async_wrapper as a regular module to avoid this special case
1078            interpreter = shebang.replace('#!', '').strip()
1079            async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
1080
1081            if environment_string:
1082                async_cmd.insert(0, environment_string)
1083
1084            if args_file_path:
1085                async_cmd.append(args_file_path)
1086            else:
1087                # maintain a fixed number of positional parameters for async_wrapper
1088                async_cmd.append('_')
1089
1090            if not self._should_remove_tmp_path(tmpdir):
1091                async_cmd.append("-preserve_tmp")
1092
1093            cmd = " ".join(to_text(x) for x in async_cmd)
1094
1095        else:
1096
1097            if self._is_pipelining_enabled(module_style):
1098                in_data = module_data
1099                display.vvv("Pipelining is enabled.")
1100            else:
1101                cmd = remote_module_path
1102
1103            cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()
1104
1105        # Fix permissions of the tmpdir path and tmpdir files. This should be called after all
1106        # files have been transferred.
1107        if remote_files:
1108            # remove none/empty
1109            remote_files = [x for x in remote_files if x]
1110            self._fixup_perms2(remote_files, self._get_remote_user())
1111
1112        # actually execute
1113        res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
1114
1115        # parse the main result
1116        data = self._parse_returned_data(res)
1117
1118        # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
1119        # get internal info before cleaning
1120        if data.pop("_ansible_suppress_tmpdir_delete", False):
1121            self._cleanup_remote_tmp = False
1122
1123        # NOTE: yum returns results .. but that made it 'compatible' with squashing, so we allow mappings, for now
1124        if 'results' in data and (not isinstance(data['results'], Sequence) or isinstance(data['results'], string_types)):
1125            data['ansible_module_results'] = data['results']
1126            del data['results']
1127            display.warning("Found internal 'results' key in module return, renamed to 'ansible_module_results'.")
1128
1129        # remove internal keys
1130        remove_internal_keys(data)
1131
1132        if wrap_async:
1133            # async_wrapper will clean up its tmpdir on its own so we want the controller side to
1134            # forget about it now
1135            self._connection._shell.tmpdir = None
1136
1137            # FIXME: for backwards compat, figure out if still makes sense
1138            data['changed'] = True
1139
1140        # pre-split stdout/stderr into lines if needed
1141        if 'stdout' in data and 'stdout_lines' not in data:
1142            # if the value is 'False', a default won't catch it.
1143            txt = data.get('stdout', None) or u''
1144            data['stdout_lines'] = txt.splitlines()
1145        if 'stderr' in data and 'stderr_lines' not in data:
1146            # if the value is 'False', a default won't catch it.
1147            txt = data.get('stderr', None) or u''
1148            data['stderr_lines'] = txt.splitlines()
1149
1150        # propagate interpreter discovery results back to the controller
1151        if self._discovered_interpreter_key:
1152            if data.get('ansible_facts') is None:
1153                data['ansible_facts'] = {}
1154
1155            data['ansible_facts'][self._discovered_interpreter_key] = self._discovered_interpreter
1156
1157        if self._discovery_warnings:
1158            if data.get('warnings') is None:
1159                data['warnings'] = []
1160            data['warnings'].extend(self._discovery_warnings)
1161
1162        if self._discovery_deprecation_warnings:
1163            if data.get('deprecations') is None:
1164                data['deprecations'] = []
1165            data['deprecations'].extend(self._discovery_deprecation_warnings)
1166
1167        # mark the entire module results untrusted as a template right here, since the current action could
1168        # possibly template one of these values.
1169        data = wrap_var(data)
1170
1171        display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
1172        return data
1173
1174    def _parse_returned_data(self, res):
1175        try:
1176            filtered_output, warnings = _filter_non_json_lines(res.get('stdout', u''), objects_only=True)
1177            for w in warnings:
1178                display.warning(w)
1179
1180            data = json.loads(filtered_output)
1181            data['_ansible_parsed'] = True
1182        except ValueError:
1183            # not valid json, lets try to capture error
1184            data = dict(failed=True, _ansible_parsed=False)
1185            data['module_stdout'] = res.get('stdout', u'')
1186            if 'stderr' in res:
1187                data['module_stderr'] = res['stderr']
1188                if res['stderr'].startswith(u'Traceback'):
1189                    data['exception'] = res['stderr']
1190
1191            # in some cases a traceback will arrive on stdout instead of stderr, such as when using ssh with -tt
1192            if 'exception' not in data and data['module_stdout'].startswith(u'Traceback'):
1193                data['exception'] = data['module_stdout']
1194
1195            # The default
1196            data['msg'] = "MODULE FAILURE"
1197
1198            # try to figure out if we are missing interpreter
1199            if self._used_interpreter is not None:
1200                match = re.compile('%s: (?:No such file or directory|not found)' % self._used_interpreter.lstrip('!#'))
1201                if match.search(data['module_stderr']) or match.search(data['module_stdout']):
1202                    data['msg'] = "The module failed to execute correctly, you probably need to set the interpreter."
1203
1204            # always append hint
1205            data['msg'] += '\nSee stdout/stderr for the exact error'
1206
1207            if 'rc' in res:
1208                data['rc'] = res['rc']
1209        return data
1210
1211    # FIXME: move to connection base
1212    def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_then_replace', chdir=None):
1213        '''
1214        This is the function which executes the low level shell command, which
1215        may be commands to create/remove directories for temporary files, or to
1216        run the module code or python directly when pipelining.
1217
1218        :kwarg encoding_errors: If the value returned by the command isn't
1219            utf-8 then we have to figure out how to transform it to unicode.
1220            If the value is just going to be displayed to the user (or
1221            discarded) then the default of 'replace' is fine.  If the data is
1222            used as a key or is going to be written back out to a file
1223            verbatim, then this won't work.  May have to use some sort of
1224            replacement strategy (python3 could use surrogateescape)
1225        :kwarg chdir: cd into this directory before executing the command.
1226        '''
1227
1228        display.debug("_low_level_execute_command(): starting")
1229        # if not cmd:
1230        #     # this can happen with powershell modules when there is no analog to a Windows command (like chmod)
1231        #     display.debug("_low_level_execute_command(): no command, exiting")
1232        #     return dict(stdout='', stderr='', rc=254)
1233
1234        if chdir:
1235            display.debug("_low_level_execute_command(): changing cwd to %s for this command" % chdir)
1236            cmd = self._connection._shell.append_command('cd %s' % chdir, cmd)
1237
1238        # https://github.com/ansible/ansible/issues/68054
1239        if executable:
1240            self._connection._shell.executable = executable
1241
1242        ruser = self._get_remote_user()
1243        buser = self.get_become_option('become_user')
1244        if (sudoable and self._connection.become and  # if sudoable and have become
1245                resource_from_fqcr(self._connection.transport) != 'network_cli' and  # if not using network_cli
1246                (C.BECOME_ALLOW_SAME_USER or (buser != ruser or not any((ruser, buser))))):  # if we allow same user PE or users are different and either is set
1247            display.debug("_low_level_execute_command(): using become for this command")
1248            cmd = self._connection.become.build_become_command(cmd, self._connection._shell)
1249
1250        if self._connection.allow_executable:
1251            if executable is None:
1252                executable = self._play_context.executable
1253                # mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
1254                # only applied for the default executable to avoid interfering with the raw action
1255                cmd = self._connection._shell.append_command(cmd, 'sleep 0')
1256            if executable:
1257                cmd = executable + ' -c ' + shlex_quote(cmd)
1258
1259        display.debug("_low_level_execute_command(): executing: %s" % (cmd,))
1260
1261        # Change directory to basedir of task for command execution when connection is local
1262        if self._connection.transport == 'local':
1263            self._connection.cwd = to_bytes(self._loader.get_basedir(), errors='surrogate_or_strict')
1264
1265        rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
1266
1267        # stdout and stderr may be either a file-like or a bytes object.
1268        # Convert either one to a text type
1269        if isinstance(stdout, binary_type):
1270            out = to_text(stdout, errors=encoding_errors)
1271        elif not isinstance(stdout, text_type):
1272            out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
1273        else:
1274            out = stdout
1275
1276        if isinstance(stderr, binary_type):
1277            err = to_text(stderr, errors=encoding_errors)
1278        elif not isinstance(stderr, text_type):
1279            err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
1280        else:
1281            err = stderr
1282
1283        if rc is None:
1284            rc = 0
1285
1286        # be sure to remove the BECOME-SUCCESS message now
1287        out = self._strip_success_message(out)
1288
1289        display.debug(u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err))
1290        return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err, stderr_lines=err.splitlines())
1291
1292    def _get_diff_data(self, destination, source, task_vars, source_file=True):
1293
1294        # Note: Since we do not diff the source and destination before we transform from bytes into
1295        # text the diff between source and destination may not be accurate.  To fix this, we'd need
1296        # to move the diffing from the callback plugins into here.
1297        #
1298        # Example of data which would cause trouble is src_content == b'\xff' and dest_content ==
1299        # b'\xfe'.  Neither of those are valid utf-8 so both get turned into the replacement
1300        # character: diff['before'] = u'�' ; diff['after'] = u'�'  When the callback plugin later
1301        # diffs before and after it shows an empty diff.
1302
1303        diff = {}
1304        display.debug("Going to peek to see if file has changed permissions")
1305        peek_result = self._execute_module(
1306            module_name='ansible.legacy.file', module_args=dict(path=destination, _diff_peek=True),
1307            task_vars=task_vars, persist_files=True)
1308
1309        if peek_result.get('failed', False):
1310            display.warning(u"Failed to get diff between '%s' and '%s': %s" % (os.path.basename(source), destination, to_text(peek_result.get(u'msg', u''))))
1311            return diff
1312
1313        if peek_result.get('rc', 0) == 0:
1314
1315            if peek_result.get('state') in (None, 'absent'):
1316                diff['before'] = u''
1317            elif peek_result.get('appears_binary'):
1318                diff['dst_binary'] = 1
1319            elif peek_result.get('size') and C.MAX_FILE_SIZE_FOR_DIFF > 0 and peek_result['size'] > C.MAX_FILE_SIZE_FOR_DIFF:
1320                diff['dst_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
1321            else:
1322                display.debug(u"Slurping the file %s" % source)
1323                dest_result = self._execute_module(
1324                    module_name='ansible.legacy.slurp', module_args=dict(path=destination),
1325                    task_vars=task_vars, persist_files=True)
1326                if 'content' in dest_result:
1327                    dest_contents = dest_result['content']
1328                    if dest_result['encoding'] == u'base64':
1329                        dest_contents = base64.b64decode(dest_contents)
1330                    else:
1331                        raise AnsibleError("unknown encoding in content option, failed: %s" % to_native(dest_result))
1332                    diff['before_header'] = destination
1333                    diff['before'] = to_text(dest_contents)
1334
1335            if source_file:
1336                st = os.stat(source)
1337                if C.MAX_FILE_SIZE_FOR_DIFF > 0 and st[stat.ST_SIZE] > C.MAX_FILE_SIZE_FOR_DIFF:
1338                    diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
1339                else:
1340                    display.debug("Reading local copy of the file %s" % source)
1341                    try:
1342                        with open(source, 'rb') as src:
1343                            src_contents = src.read()
1344                    except Exception as e:
1345                        raise AnsibleError("Unexpected error while reading source (%s) for diff: %s " % (source, to_native(e)))
1346
1347                    if b"\x00" in src_contents:
1348                        diff['src_binary'] = 1
1349                    else:
1350                        diff['after_header'] = source
1351                        diff['after'] = to_text(src_contents)
1352            else:
1353                display.debug(u"source of file passed in")
1354                diff['after_header'] = u'dynamically generated'
1355                diff['after'] = source
1356
1357        if self._play_context.no_log:
1358            if 'before' in diff:
1359                diff["before"] = u""
1360            if 'after' in diff:
1361                diff["after"] = u" [[ Diff output has been hidden because 'no_log: true' was specified for this result ]]\n"
1362
1363        return diff
1364
1365    def _find_needle(self, dirname, needle):
1366        '''
1367            find a needle in haystack of paths, optionally using 'dirname' as a subdir.
1368            This will build the ordered list of paths to search and pass them to dwim
1369            to get back the first existing file found.
1370        '''
1371
1372        # dwim already deals with playbook basedirs
1373        path_stack = self._task.get_search_path()
1374
1375        # if missing it will return a file not found exception
1376        return self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
1377