1"""
2Connection library for Amazon S3
3
4:depends: requests
5"""
6
7import logging
8import urllib.parse
9import xml.etree.ElementTree as ET
10
11import salt.utils.aws
12import salt.utils.files
13import salt.utils.hashutils
14import salt.utils.xmlutil as xml
15from salt.exceptions import CommandExecutionError
16
17try:
18    import requests
19
20    HAS_REQUESTS = True  # pylint: disable=W0612
21except ImportError:
22    HAS_REQUESTS = False  # pylint: disable=W0612
23
24
25log = logging.getLogger(__name__)
26
27
28def query(
29    key,
30    keyid,
31    method="GET",
32    params=None,
33    headers=None,
34    requesturl=None,
35    return_url=False,
36    bucket=None,
37    service_url=None,
38    path="",
39    return_bin=False,
40    action=None,
41    local_file=None,
42    verify_ssl=True,
43    full_headers=False,
44    kms_keyid=None,
45    location=None,
46    role_arn=None,
47    chunk_size=16384,
48    path_style=False,
49    https_enable=True,
50):
51    """
52    Perform a query against an S3-like API. This function requires that a
53    secret key and the id for that key are passed in. For instance:
54
55        s3.keyid: GKTADJGHEIQSXMKKRBJ08H
56        s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
57
58    If keyid or key is not specified, an attempt to fetch them from EC2 IAM
59    metadata service will be made.
60
61    A service_url may also be specified in the configuration:
62
63        s3.service_url: s3.amazonaws.com
64
65    If a service_url is not specified, the default is s3.amazonaws.com. This
66    may appear in various documentation as an "endpoint". A comprehensive list
67    for Amazon S3 may be found at::
68
69        http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
70
71    The service_url will form the basis for the final endpoint that is used to
72    query the service.
73
74    Path style can be enabled:
75
76        s3.path_style: True
77
78    This can be useful if you need to use salt with a proxy for an s3 compatible storage
79
80    You can use either https protocol or http protocol:
81
82        s3.https_enable: True
83
84    SSL verification may also be turned off in the configuration:
85
86        s3.verify_ssl: False
87
88    This is required if using S3 bucket names that contain a period, as
89    these will not match Amazon's S3 wildcard certificates. Certificate
90    verification is enabled by default.
91
92    A region may be specified:
93
94        s3.location: eu-central-1
95
96    If region is not specified, an attempt to fetch the region from EC2 IAM
97    metadata service will be made. Failing that, default is us-east-1
98    """
99    if not HAS_REQUESTS:
100        log.error("There was an error: requests is required for s3 access")
101
102    if not headers:
103        headers = {}
104
105    if not params:
106        params = {}
107
108    if not service_url:
109        service_url = "s3.amazonaws.com"
110
111    if not bucket or path_style:
112        endpoint = service_url
113    else:
114        endpoint = "{}.{}".format(bucket, service_url)
115
116    if path_style and bucket:
117        path = "{}/{}".format(bucket, path)
118
119    # Try grabbing the credentials from the EC2 instance IAM metadata if available
120    if not key:
121        key = salt.utils.aws.IROLE_CODE
122
123    if not keyid:
124        keyid = salt.utils.aws.IROLE_CODE
125
126    if kms_keyid is not None and method in ("PUT", "POST"):
127        headers["x-amz-server-side-encryption"] = "aws:kms"
128        headers["x-amz-server-side-encryption-aws-kms-key-id"] = kms_keyid
129
130    if not location:
131        location = salt.utils.aws.get_location()
132
133    data = ""
134    fh = None
135    payload_hash = None
136    if method == "PUT":
137        if local_file:
138            payload_hash = salt.utils.hashutils.get_hash(local_file, form="sha256")
139
140    if path is None:
141        path = ""
142    path = urllib.parse.quote(path)
143
144    if not requesturl:
145        requesturl = "{}://{}/{}".format(
146            "https" if https_enable else "http", endpoint, path
147        )
148        headers, requesturl = salt.utils.aws.sig4(
149            method,
150            endpoint,
151            params,
152            data=data,
153            uri="/{}".format(path),
154            prov_dict={"id": keyid, "key": key},
155            role_arn=role_arn,
156            location=location,
157            product="s3",
158            requesturl=requesturl,
159            headers=headers,
160            payload_hash=payload_hash,
161        )
162
163    log.debug("S3 Request: %s", requesturl)
164    log.debug("S3 Headers::")
165    log.debug("    Authorization: %s", headers["Authorization"])
166
167    if not data:
168        data = None
169
170    try:
171        if method == "PUT":
172            if local_file:
173                # pylint: disable=resource-leakage
174                fh = salt.utils.files.fopen(local_file, "rb")
175                # pylint: enable=resource-leakage
176                data = fh.read()  # pylint: disable=resource-leakage
177            result = requests.request(
178                method,
179                requesturl,
180                headers=headers,
181                data=data,
182                verify=verify_ssl,
183                stream=True,
184                timeout=300,
185            )
186        elif method == "GET" and local_file and not return_bin:
187            result = requests.request(
188                method,
189                requesturl,
190                headers=headers,
191                data=data,
192                verify=verify_ssl,
193                stream=True,
194                timeout=300,
195            )
196        else:
197            result = requests.request(
198                method,
199                requesturl,
200                headers=headers,
201                data=data,
202                verify=verify_ssl,
203                timeout=300,
204            )
205    finally:
206        if fh is not None:
207            fh.close()
208
209    err_code = None
210    err_msg = None
211    if result.status_code >= 400:
212        # On error the S3 API response should contain error message
213        err_text = result.content or "Unknown error"
214        log.debug("    Response content: %s", err_text)
215
216        # Try to get err info from response xml
217        try:
218            err_data = xml.to_dict(ET.fromstring(err_text))
219            err_code = err_data["Code"]
220            err_msg = err_data["Message"]
221        except (KeyError, ET.ParseError) as err:
222            log.debug(
223                "Failed to parse s3 err response. %s: %s", type(err).__name__, err
224            )
225            err_code = "http-{}".format(result.status_code)
226            err_msg = err_text
227
228    log.debug("S3 Response Status Code: %s", result.status_code)
229
230    if method == "PUT":
231        if result.status_code != 200:
232            if local_file:
233                raise CommandExecutionError(
234                    "Failed to upload from {} to {}. {}: {}".format(
235                        local_file, path, err_code, err_msg
236                    )
237                )
238            raise CommandExecutionError(
239                "Failed to create bucket {}. {}: {}".format(bucket, err_code, err_msg)
240            )
241
242        if local_file:
243            log.debug("Uploaded from %s to %s", local_file, path)
244        else:
245            log.debug("Created bucket %s", bucket)
246        return
247
248    if method == "DELETE":
249        if not str(result.status_code).startswith("2"):
250            if path:
251                raise CommandExecutionError(
252                    "Failed to delete {} from bucket {}. {}: {}".format(
253                        path, bucket, err_code, err_msg
254                    )
255                )
256            raise CommandExecutionError(
257                "Failed to delete bucket {}. {}: {}".format(bucket, err_code, err_msg)
258            )
259
260        if path:
261            log.debug("Deleted %s from bucket %s", path, bucket)
262        else:
263            log.debug("Deleted bucket %s", bucket)
264        return
265
266    # This can be used to save a binary object to disk
267    if local_file and method == "GET":
268        if result.status_code < 200 or result.status_code >= 300:
269            raise CommandExecutionError(
270                "Failed to get file. {}: {}".format(err_code, err_msg)
271            )
272
273        log.debug("Saving to local file: %s", local_file)
274        with salt.utils.files.fopen(local_file, "wb") as out:
275            for chunk in result.iter_content(chunk_size=chunk_size):
276                out.write(chunk)
277        return "Saved to local file: {}".format(local_file)
278
279    if result.status_code < 200 or result.status_code >= 300:
280        raise CommandExecutionError(
281            "Failed s3 operation. {}: {}".format(err_code, err_msg)
282        )
283
284    # This can be used to return a binary object wholesale
285    if return_bin:
286        return result.content
287
288    if result.content:
289        items = ET.fromstring(result.content)
290
291        ret = []
292        for item in items:
293            ret.append(xml.to_dict(item))
294
295        if return_url is True:
296            return ret, requesturl
297    else:
298        if result.status_code != requests.codes.ok:
299            return
300        ret = {"headers": []}
301        if full_headers:
302            ret["headers"] = dict(result.headers)
303        else:
304            for header in result.headers:
305                ret["headers"].append(header.strip())
306
307    return ret
308