1#!/usr/bin/python
2# Copyright: Ansible Project
3# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
4
5from __future__ import absolute_import, division, print_function
6__metaclass__ = type
7
8
9ANSIBLE_METADATA = {'metadata_version': '1.1',
10                    'status': ['stableinterface'],
11                    'supported_by': 'community'}
12
13
14DOCUMENTATION = '''
15---
16module: s3_logging
17short_description: Manage logging facility of an s3 bucket in AWS
18description:
19    - Manage logging facility of an s3 bucket in AWS
20version_added: "2.0"
21author: Rob White (@wimnat)
22options:
23  name:
24    description:
25      - "Name of the s3 bucket."
26    required: true
27  state:
28    description:
29      - "Enable or disable logging."
30    default: present
31    choices: [ 'present', 'absent' ]
32  target_bucket:
33    description:
34      - "The bucket to log to. Required when state=present."
35  target_prefix:
36    description:
37      - "The prefix that should be prepended to the generated log files written to the target_bucket."
38    default: ""
39extends_documentation_fragment:
40    - aws
41    - ec2
42'''
43
44EXAMPLES = '''
45# Note: These examples do not set authentication details, see the AWS Guide for details.
46
47- name: Enable logging of s3 bucket mywebsite.com to s3 bucket mylogs
48  s3_logging:
49    name: mywebsite.com
50    target_bucket: mylogs
51    target_prefix: logs/mywebsite.com
52    state: present
53
54- name: Remove logging on an s3 bucket
55  s3_logging:
56    name: mywebsite.com
57    state: absent
58
59'''
60
61try:
62    import boto.ec2
63    from boto.s3.connection import OrdinaryCallingFormat, Location
64    from boto.exception import S3ResponseError
65    HAS_BOTO = True
66except ImportError:
67    HAS_BOTO = False
68
69from ansible.module_utils.basic import AnsibleModule
70from ansible.module_utils.ec2 import AnsibleAWSError, ec2_argument_spec, get_aws_connection_info
71
72
73def compare_bucket_logging(bucket, target_bucket, target_prefix):
74
75    bucket_log_obj = bucket.get_logging_status()
76    if bucket_log_obj.target != target_bucket or bucket_log_obj.prefix != target_prefix:
77        return False
78    else:
79        return True
80
81
82def enable_bucket_logging(connection, module):
83
84    bucket_name = module.params.get("name")
85    target_bucket = module.params.get("target_bucket")
86    target_prefix = module.params.get("target_prefix")
87    changed = False
88
89    try:
90        bucket = connection.get_bucket(bucket_name)
91    except S3ResponseError as e:
92        module.fail_json(msg=e.message)
93
94    try:
95        if not compare_bucket_logging(bucket, target_bucket, target_prefix):
96            # Before we can enable logging we must give the log-delivery group WRITE and READ_ACP permissions to the target bucket
97            try:
98                target_bucket_obj = connection.get_bucket(target_bucket)
99            except S3ResponseError as e:
100                if e.status == 301:
101                    module.fail_json(msg="the logging target bucket must be in the same region as the bucket being logged")
102                else:
103                    module.fail_json(msg=e.message)
104            target_bucket_obj.set_as_logging_target()
105
106            bucket.enable_logging(target_bucket, target_prefix)
107            changed = True
108
109    except S3ResponseError as e:
110        module.fail_json(msg=e.message)
111
112    module.exit_json(changed=changed)
113
114
115def disable_bucket_logging(connection, module):
116
117    bucket_name = module.params.get("name")
118    changed = False
119
120    try:
121        bucket = connection.get_bucket(bucket_name)
122        if not compare_bucket_logging(bucket, None, None):
123            bucket.disable_logging()
124            changed = True
125    except S3ResponseError as e:
126        module.fail_json(msg=e.message)
127
128    module.exit_json(changed=changed)
129
130
131def main():
132
133    argument_spec = ec2_argument_spec()
134    argument_spec.update(
135        dict(
136            name=dict(required=True),
137            target_bucket=dict(required=False, default=None),
138            target_prefix=dict(required=False, default=""),
139            state=dict(required=False, default='present', choices=['present', 'absent'])
140        )
141    )
142
143    module = AnsibleModule(argument_spec=argument_spec)
144
145    if not HAS_BOTO:
146        module.fail_json(msg='boto required for this module')
147
148    region, ec2_url, aws_connect_params = get_aws_connection_info(module)
149
150    if region in ('us-east-1', '', None):
151        # S3ism for the US Standard region
152        location = Location.DEFAULT
153    else:
154        # Boto uses symbolic names for locations but region strings will
155        # actually work fine for everything except us-east-1 (US Standard)
156        location = region
157    try:
158        connection = boto.s3.connect_to_region(location, is_secure=True, calling_format=OrdinaryCallingFormat(), **aws_connect_params)
159        # use this as fallback because connect_to_region seems to fail in boto + non 'classic' aws accounts in some cases
160        if connection is None:
161            connection = boto.connect_s3(**aws_connect_params)
162    except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
163        module.fail_json(msg=str(e))
164
165    state = module.params.get("state")
166
167    if state == 'present':
168        enable_bucket_logging(connection, module)
169    elif state == 'absent':
170        disable_bucket_logging(connection, module)
171
172
173if __name__ == '__main__':
174    main()
175