1#!/usr/bin/python
2#
3# Copyright: Ansible Project
4# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
5
6from __future__ import absolute_import, division, print_function
7__metaclass__ = type
8
9
10ANSIBLE_METADATA = {'metadata_version': '1.1',
11                    'status': ['preview'],
12                    'supported_by': 'community'}
13
14
15DOCUMENTATION = '''
16---
17module: gc_storage
18version_added: "1.4"
19short_description: This module manages objects/buckets in Google Cloud Storage.
20description:
21    - This module allows users to manage their objects/buckets in Google Cloud Storage.  It allows upload and download operations and can set some
22      canned permissions. It also allows retrieval of URLs for objects for use in playbooks, and retrieval of string contents of objects.  This module
23      requires setting the default project in GCS prior to playbook usage.  See U(https://developers.google.com/storage/docs/reference/v1/apiversion1) for
24      information about setting the default project.
25
26options:
27  bucket:
28    description:
29      - Bucket name.
30    required: true
31  object:
32    description:
33      - Keyname of the object inside the bucket. Can be also be used to create "virtual directories" (see examples).
34  src:
35    description:
36      - The source file path when performing a PUT operation.
37  dest:
38    description:
39      - The destination file path when downloading an object/key with a GET operation.
40  force:
41    description:
42      - Forces an overwrite either locally on the filesystem or remotely with the object/key. Used with PUT and GET operations.
43    type: bool
44    default: 'yes'
45    aliases: [ 'overwrite' ]
46  permission:
47    description:
48      - This option let's the user set the canned permissions on the object/bucket that are created. The permissions that can be set are 'private',
49        'public-read', 'authenticated-read'.
50    default: private
51  headers:
52    version_added: "2.0"
53    description:
54      - Headers to attach to object.
55    default: {}
56  expiration:
57    description:
58      - Time limit (in seconds) for the URL generated and returned by GCA when performing a mode=put or mode=get_url operation. This url is only
59        available when public-read is the acl for the object.
60  mode:
61    description:
62      - Switches the module behaviour between upload, download, get_url (return download url) , get_str (download object as string), create (bucket) and
63        delete (bucket).
64    required: true
65    choices: [ 'get', 'put', 'get_url', 'get_str', 'delete', 'create' ]
66  gs_secret_key:
67    description:
68      - GS secret key. If not set then the value of the GS_SECRET_ACCESS_KEY environment variable is used.
69    required: true
70  gs_access_key:
71    description:
72      - GS access key. If not set then the value of the GS_ACCESS_KEY_ID environment variable is used.
73    required: true
74  region:
75    version_added: "2.4"
76    description:
77      - The gs region to use. If not defined then the value 'US' will be used. See U(https://cloud.google.com/storage/docs/bucket-locations)
78    default: 'US'
79  versioning:
80    version_added: "2.4"
81    description:
82      - Whether versioning is enabled or disabled (note that once versioning is enabled, it can only be suspended)
83    type: bool
84
85requirements:
86    - "python >= 2.6"
87    - "boto >= 2.9"
88
89author:
90- Benno Joy (@bennojoy)
91- Lukas Beumer (@Nitaco)
92
93'''
94
95EXAMPLES = '''
96- name: Upload some content
97  gc_storage:
98    bucket: mybucket
99    object: key.txt
100    src: /usr/local/myfile.txt
101    mode: put
102    permission: public-read
103
104- name: Upload some headers
105  gc_storage:
106    bucket: mybucket
107    object: key.txt
108    src: /usr/local/myfile.txt
109    headers: '{"Content-Encoding": "gzip"}'
110
111- name: Download some content
112  gc_storage:
113    bucket: mybucket
114    object: key.txt
115    dest: /usr/local/myfile.txt
116    mode: get
117
118- name: Download an object as a string to use else where in your playbook
119  gc_storage:
120    bucket: mybucket
121    object: key.txt
122    mode: get_str
123
124- name: Create an empty bucket
125  gc_storage:
126    bucket: mybucket
127    mode: create
128
129- name: Create a bucket with key as directory
130  gc_storage:
131    bucket: mybucket
132    object: /my/directory/path
133    mode: create
134
135- name: Delete a bucket and all contents
136  gc_storage:
137    bucket: mybucket
138    mode: delete
139
140- name: Create a bucket with versioning enabled
141  gc_storage:
142    bucket: "mybucket"
143    versioning: yes
144    mode: create
145
146- name: Create a bucket located in the eu
147  gc_storage:
148    bucket: "mybucket"
149    region: "europe-west3"
150    mode: create
151
152'''
153
154import os
155
156try:
157    import boto
158    HAS_BOTO = True
159except ImportError:
160    HAS_BOTO = False
161
162from ansible.module_utils.basic import AnsibleModule
163
164
165def grant_check(module, gs, obj):
166    try:
167        acp = obj.get_acl()
168        if module.params.get('permission') == 'public-read':
169            grant = [x for x in acp.entries.entry_list if x.scope.type == 'AllUsers']
170            if not grant:
171                obj.set_acl('public-read')
172                module.exit_json(changed=True, result="The objects permission as been set to public-read")
173        if module.params.get('permission') == 'authenticated-read':
174            grant = [x for x in acp.entries.entry_list if x.scope.type == 'AllAuthenticatedUsers']
175            if not grant:
176                obj.set_acl('authenticated-read')
177                module.exit_json(changed=True, result="The objects permission as been set to authenticated-read")
178    except gs.provider.storage_response_error as e:
179        module.fail_json(msg=str(e))
180    return True
181
182
183def key_check(module, gs, bucket, obj):
184    try:
185        bucket = gs.lookup(bucket)
186        key_check = bucket.get_key(obj)
187    except gs.provider.storage_response_error as e:
188        module.fail_json(msg=str(e))
189    if key_check:
190        grant_check(module, gs, key_check)
191        return True
192    else:
193        return False
194
195
196def keysum(module, gs, bucket, obj):
197    bucket = gs.lookup(bucket)
198    key_check = bucket.get_key(obj)
199    if not key_check:
200        return None
201    md5_remote = key_check.etag[1:-1]
202    etag_multipart = '-' in md5_remote  # Check for multipart, etag is not md5
203    if etag_multipart is True:
204        module.fail_json(msg="Files uploaded with multipart of gs are not supported with checksum, unable to compute checksum.")
205    return md5_remote
206
207
208def bucket_check(module, gs, bucket):
209    try:
210        result = gs.lookup(bucket)
211    except gs.provider.storage_response_error as e:
212        module.fail_json(msg=str(e))
213    if result:
214        grant_check(module, gs, result)
215        return True
216    else:
217        return False
218
219
220def create_bucket(module, gs, bucket):
221    try:
222        bucket = gs.create_bucket(bucket, transform_headers(module.params.get('headers')), module.params.get('region'))
223        bucket.set_acl(module.params.get('permission'))
224        bucket.configure_versioning(module.params.get('versioning'))
225    except gs.provider.storage_response_error as e:
226        module.fail_json(msg=str(e))
227    if bucket:
228        return True
229
230
231def delete_bucket(module, gs, bucket):
232    try:
233        bucket = gs.lookup(bucket)
234        bucket_contents = bucket.list()
235        for key in bucket_contents:
236            bucket.delete_key(key.name)
237        bucket.delete()
238        return True
239    except gs.provider.storage_response_error as e:
240        module.fail_json(msg=str(e))
241
242
243def delete_key(module, gs, bucket, obj):
244    try:
245        bucket = gs.lookup(bucket)
246        bucket.delete_key(obj)
247        module.exit_json(msg="Object deleted from bucket ", changed=True)
248    except gs.provider.storage_response_error as e:
249        module.fail_json(msg=str(e))
250
251
252def create_dirkey(module, gs, bucket, obj):
253    try:
254        bucket = gs.lookup(bucket)
255        key = bucket.new_key(obj)
256        key.set_contents_from_string('')
257        module.exit_json(msg="Virtual directory %s created in bucket %s" % (obj, bucket.name), changed=True)
258    except gs.provider.storage_response_error as e:
259        module.fail_json(msg=str(e))
260
261
262def path_check(path):
263    if os.path.exists(path):
264        return True
265    else:
266        return False
267
268
269def transform_headers(headers):
270    """
271    Boto url-encodes values unless we convert the value to `str`, so doing
272    this prevents 'max-age=100000' from being converted to "max-age%3D100000".
273
274    :param headers: Headers to convert
275    :type  headers: dict
276    :rtype: dict
277
278    """
279
280    for key, value in headers.items():
281        headers[key] = str(value)
282    return headers
283
284
285def upload_gsfile(module, gs, bucket, obj, src, expiry):
286    try:
287        bucket = gs.lookup(bucket)
288        key = bucket.new_key(obj)
289        key.set_contents_from_filename(
290            filename=src,
291            headers=transform_headers(module.params.get('headers'))
292        )
293        key.set_acl(module.params.get('permission'))
294        url = key.generate_url(expiry)
295        module.exit_json(msg="PUT operation complete", url=url, changed=True)
296    except gs.provider.storage_copy_error as e:
297        module.fail_json(msg=str(e))
298
299
300def download_gsfile(module, gs, bucket, obj, dest):
301    try:
302        bucket = gs.lookup(bucket)
303        key = bucket.lookup(obj)
304        key.get_contents_to_filename(dest)
305        module.exit_json(msg="GET operation complete", changed=True)
306    except gs.provider.storage_copy_error as e:
307        module.fail_json(msg=str(e))
308
309
310def download_gsstr(module, gs, bucket, obj):
311    try:
312        bucket = gs.lookup(bucket)
313        key = bucket.lookup(obj)
314        contents = key.get_contents_as_string()
315        module.exit_json(msg="GET operation complete", contents=contents, changed=True)
316    except gs.provider.storage_copy_error as e:
317        module.fail_json(msg=str(e))
318
319
320def get_download_url(module, gs, bucket, obj, expiry):
321    try:
322        bucket = gs.lookup(bucket)
323        key = bucket.lookup(obj)
324        url = key.generate_url(expiry)
325        module.exit_json(msg="Download url:", url=url, expiration=expiry, changed=True)
326    except gs.provider.storage_response_error as e:
327        module.fail_json(msg=str(e))
328
329
330def handle_get(module, gs, bucket, obj, overwrite, dest):
331    md5_remote = keysum(module, gs, bucket, obj)
332    md5_local = module.md5(dest)
333    if md5_local == md5_remote:
334        module.exit_json(changed=False)
335    if md5_local != md5_remote and not overwrite:
336        module.exit_json(msg="WARNING: Checksums do not match. Use overwrite parameter to force download.", failed=True)
337    else:
338        download_gsfile(module, gs, bucket, obj, dest)
339
340
341def handle_put(module, gs, bucket, obj, overwrite, src, expiration):
342    # Lets check to see if bucket exists to get ground truth.
343    bucket_rc = bucket_check(module, gs, bucket)
344    key_rc = key_check(module, gs, bucket, obj)
345
346    # Lets check key state. Does it exist and if it does, compute the etag md5sum.
347    if bucket_rc and key_rc:
348        md5_remote = keysum(module, gs, bucket, obj)
349        md5_local = module.md5(src)
350        if md5_local == md5_remote:
351            module.exit_json(msg="Local and remote object are identical", changed=False)
352        if md5_local != md5_remote and not overwrite:
353            module.exit_json(msg="WARNING: Checksums do not match. Use overwrite parameter to force upload.", failed=True)
354        else:
355            upload_gsfile(module, gs, bucket, obj, src, expiration)
356
357    if not bucket_rc:
358        create_bucket(module, gs, bucket)
359        upload_gsfile(module, gs, bucket, obj, src, expiration)
360
361    # If bucket exists but key doesn't, just upload.
362    if bucket_rc and not key_rc:
363        upload_gsfile(module, gs, bucket, obj, src, expiration)
364
365
366def handle_delete(module, gs, bucket, obj):
367    if bucket and not obj:
368        if bucket_check(module, gs, bucket):
369            module.exit_json(msg="Bucket %s and all keys have been deleted." % bucket, changed=delete_bucket(module, gs, bucket))
370        else:
371            module.exit_json(msg="Bucket does not exist.", changed=False)
372    if bucket and obj:
373        if bucket_check(module, gs, bucket):
374            if key_check(module, gs, bucket, obj):
375                module.exit_json(msg="Object has been deleted.", changed=delete_key(module, gs, bucket, obj))
376            else:
377                module.exit_json(msg="Object does not exist.", changed=False)
378        else:
379            module.exit_json(msg="Bucket does not exist.", changed=False)
380    else:
381        module.fail_json(msg="Bucket or Bucket & object  parameter is required.", failed=True)
382
383
384def handle_create(module, gs, bucket, obj):
385    if bucket and not obj:
386        if bucket_check(module, gs, bucket):
387            module.exit_json(msg="Bucket already exists.", changed=False)
388        else:
389            module.exit_json(msg="Bucket created successfully", changed=create_bucket(module, gs, bucket))
390    if bucket and obj:
391        if obj.endswith('/'):
392            dirobj = obj
393        else:
394            dirobj = obj + "/"
395
396        if bucket_check(module, gs, bucket):
397            if key_check(module, gs, bucket, dirobj):
398                module.exit_json(msg="Bucket %s and key %s already exists." % (bucket, obj), changed=False)
399            else:
400                create_dirkey(module, gs, bucket, dirobj)
401        else:
402            create_bucket(module, gs, bucket)
403            create_dirkey(module, gs, bucket, dirobj)
404
405
406def main():
407    module = AnsibleModule(
408        argument_spec=dict(
409            bucket=dict(required=True),
410            object=dict(default=None, type='path'),
411            src=dict(default=None),
412            dest=dict(default=None, type='path'),
413            expiration=dict(type='int', default=600, aliases=['expiry']),
414            mode=dict(choices=['get', 'put', 'delete', 'create', 'get_url', 'get_str'], required=True),
415            permission=dict(choices=['private', 'public-read', 'authenticated-read'], default='private'),
416            headers=dict(type='dict', default={}),
417            gs_secret_key=dict(no_log=True, required=True),
418            gs_access_key=dict(required=True),
419            overwrite=dict(default=True, type='bool', aliases=['force']),
420            region=dict(default='US', type='str'),
421            versioning=dict(default='no', type='bool')
422        ),
423    )
424
425    if not HAS_BOTO:
426        module.fail_json(msg='`boto` 2.9+ is required for this module. Try: pip install `boto` --upgrade')
427
428    bucket = module.params.get('bucket')
429    obj = module.params.get('object')
430    src = module.params.get('src')
431    dest = module.params.get('dest')
432    mode = module.params.get('mode')
433    expiry = module.params.get('expiration')
434    gs_secret_key = module.params.get('gs_secret_key')
435    gs_access_key = module.params.get('gs_access_key')
436    overwrite = module.params.get('overwrite')
437
438    if mode == 'put':
439        if not src or not object:
440            module.fail_json(msg="When using PUT, src, bucket, object are mandatory parameters")
441    if mode == 'get':
442        if not dest or not object:
443            module.fail_json(msg="When using GET, dest, bucket, object are mandatory parameters")
444
445    try:
446        gs = boto.connect_gs(gs_access_key, gs_secret_key)
447    except boto.exception.NoAuthHandlerFound as e:
448        module.fail_json(msg=str(e))
449
450    if mode == 'get':
451        if not bucket_check(module, gs, bucket) or not key_check(module, gs, bucket, obj):
452            module.fail_json(msg="Target bucket/key cannot be found", failed=True)
453        if not path_check(dest):
454            download_gsfile(module, gs, bucket, obj, dest)
455        else:
456            handle_get(module, gs, bucket, obj, overwrite, dest)
457
458    if mode == 'put':
459        if not path_check(src):
460            module.fail_json(msg="Local object for PUT does not exist", failed=True)
461        handle_put(module, gs, bucket, obj, overwrite, src, expiry)
462
463    # Support for deleting an object if we have both params.
464    if mode == 'delete':
465        handle_delete(module, gs, bucket, obj)
466
467    if mode == 'create':
468        handle_create(module, gs, bucket, obj)
469
470    if mode == 'get_url':
471        if bucket and obj:
472            if bucket_check(module, gs, bucket) and key_check(module, gs, bucket, obj):
473                get_download_url(module, gs, bucket, obj, expiry)
474            else:
475                module.fail_json(msg="Key/Bucket does not exist", failed=True)
476        else:
477            module.fail_json(msg="Bucket and Object parameters must be set", failed=True)
478
479    # --------------------------- Get the String contents of an Object -------------------------
480    if mode == 'get_str':
481        if bucket and obj:
482            if bucket_check(module, gs, bucket) and key_check(module, gs, bucket, obj):
483                download_gsstr(module, gs, bucket, obj)
484            else:
485                module.fail_json(msg="Key/Bucket does not exist", failed=True)
486        else:
487            module.fail_json(msg="Bucket and Object parameters must be set", failed=True)
488
489
490if __name__ == '__main__':
491    main()
492