1# (c) 2018, Matt Martz <matt@sivel.net>
2# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3from __future__ import (absolute_import, division, print_function)
4__metaclass__ = type
5
6ANSIBLE_METADATA = {'metadata_version': '1.1',
7                    'status': ['preview'],
8                    'supported_by': 'community'}
9
10DOCUMENTATION = '''
11    cache: mongodb
12    name: mongodb
13    author: Matt Martz
14    version_added: "1.0.0"
15    short_description: Use MongoDB for caching
16    description:
17        - This cache uses per host records saved in MongoDB.
18    requirements:
19      - pymongo>=3
20    options:
21      _uri:
22        description:
23          - MongoDB Connection String URI
24        required: False
25        env:
26          - name: ANSIBLE_CACHE_PLUGIN_CONNECTION
27        ini:
28          - key: fact_caching_connection
29            section: defaults
30      _prefix:
31        description: User defined prefix to use when creating the DB entries
32        default: ansible_facts
33        env:
34          - name: ANSIBLE_CACHE_PLUGIN_PREFIX
35        ini:
36          - key: fact_caching_prefix
37            section: defaults
38      _timeout:
39        default: 86400
40        description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
41        env:
42          - name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
43        ini:
44          - key: fact_caching_timeout
45            section: defaults
46        type: integer
47'''
48
49import datetime
50
51from contextlib import contextmanager
52
53from ansible import constants as C
54from ansible.errors import AnsibleError
55from ansible.plugins.cache import BaseCacheModule
56from ansible.utils.display import Display
57from ansible.module_utils._text import to_native
58
59pymongo_missing = False
60
61try:
62    import pymongo
63except ImportError:
64    pymongo_missing = True
65
66display = Display()
67
68
69class CacheModule(BaseCacheModule):
70    """
71    A caching module backed by mongodb.
72    """
73    def __init__(self, *args, **kwargs):
74        try:
75            if pymongo_missing:
76                raise AnsibleError("The 'pymongo' python module is required for the mongodb fact cache, 'pip install pymongo>=3.0'")
77            super(CacheModule, self).__init__(*args, **kwargs)
78            self._connection = self.get_option('_uri')
79            self._timeout = int(self.get_option('_timeout'))
80            self._prefix = self.get_option('_prefix')
81        except KeyError:
82            self._connection = C.CACHE_PLUGIN_CONNECTION
83            self._timeout = int(C.CACHE_PLUGIN_TIMEOUT)
84            self._prefix = C.CACHE_PLUGIN_PREFIX
85
86        self._cache = {}
87        self._managed_indexes = False
88
89    def _ttl_index_exists(self, collection):
90        '''
91        Returns true if an index named ttl exists
92        on the given collection.
93        '''
94        exists = False
95        try:
96            indexes = collection.list_indexes()
97            for index in indexes:
98                if index["name"] == "ttl":
99                    exists = True
100                    break
101        except pymongo.errors.OperationFailure as excep:
102            raise AnsibleError('Error checking MongoDB index: %s' % to_native(excep))
103        return exists
104
105    def _manage_indexes(self, collection):
106        '''
107        This function manages indexes on the mongo collection.
108        We only do this once, at run time based on _managed_indexes,
109        rather than per connection instantiation as that would be overkill
110        '''
111        _timeout = self._timeout
112        if _timeout and _timeout > 0:
113            try:
114                collection.create_index(
115                    'date',
116                    name='ttl',
117                    expireAfterSeconds=_timeout
118                )
119            except pymongo.errors.OperationFailure:
120                # We make it here when the fact_caching_timeout was set to a different value between runs
121                if self._ttl_index_exists(collection):
122                    collection.drop_index('ttl')
123                return self._manage_indexes(collection)
124        else:
125            if self._ttl_index_exists(collection):
126                collection.drop_index('ttl')
127
128    @contextmanager
129    def _collection(self):
130        '''
131        This is a context manager for opening and closing mongo connections as needed. This exists as to not create a global
132        connection, due to pymongo not being fork safe (http://api.mongodb.com/python/current/faq.html#is-pymongo-fork-safe)
133        '''
134        mongo = pymongo.MongoClient(self._connection)
135        try:
136            db = mongo.get_default_database()
137        except pymongo.errors.ConfigurationError:
138            # We'll fall back to using ``ansible`` as the database if one was not provided
139            # in the MongoDB Connection String URI
140            db = mongo['ansible']
141
142        # The collection is hard coded as ``cache``, there are no configuration options for this
143        collection = db['cache']
144        if not self._managed_indexes:
145            # Only manage the indexes once per run, not per connection
146            self._manage_indexes(collection)
147            self._managed_indexes = True
148
149        yield collection
150
151        mongo.close()
152
153    def _make_key(self, key):
154        return '%s%s' % (self._prefix, key)
155
156    def get(self, key):
157        if key not in self._cache:
158            with self._collection() as collection:
159                value = collection.find_one({'_id': self._make_key(key)})
160            self._cache[key] = value['data']
161
162        return self._cache.get(key)
163
164    def set(self, key, value):
165        self._cache[key] = value
166        with self._collection() as collection:
167            collection.update_one(
168                {'_id': self._make_key(key)},
169                {
170                    '$set': {
171                        '_id': self._make_key(key),
172                        'data': value,
173                        'date': datetime.datetime.utcnow()
174                    }
175                },
176                upsert=True
177            )
178
179    def keys(self):
180        with self._collection() as collection:
181            return [doc['_id'] for doc in collection.find({}, {'_id': True})]
182
183    def contains(self, key):
184        with self._collection() as collection:
185            return bool(collection.count({'_id': self._make_key(key)}))
186
187    def delete(self, key):
188        del self._cache[key]
189        with self._collection() as collection:
190            collection.delete_one({'_id': self._make_key(key)})
191
192    def flush(self):
193        with self._collection() as collection:
194            collection.delete_many({})
195
196    def copy(self):
197        with self._collection() as collection:
198            return dict((d['_id'], d['data']) for d in collection.find({}))
199
200    def __getstate__(self):
201        return dict()
202
203    def __setstate__(self, data):
204        self.__init__()
205