1# coding: utf-8
2# Copyright (c) 2016, 2021, Oracle and/or its affiliates.  All rights reserved.
3# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
4
5from __future__ import absolute_import
6
7import base64
8import datetime
9import json
10import os
11import os.path
12
13import pytz
14
15from oci._vendor import six
16from oci.exceptions import InvalidConfig
17
18try:
19    from urllib.parse import urlparse
20except ImportError:
21    from urlparse import urlparse
22
23from io import SEEK_SET
24import logging
25
26logger = logging.getLogger(__name__)
27logger = logging.getLogger(__name__)
28INSTANCE_PRINCIPAL_AUTHENTICATION_TYPE_VALUE_NAME = 'instance_principal'
29DELEGATION_TOKEN_WITH_INSTANCE_PRINCIPAL_AUTHENTICATION_TYPE = 'delegation_token_with_instance_principal'
30RESOURCE_PRINCIPAL_AUTHENTICATION_TYPE = 'resource_principal'
31DELEGATION_TOKEN_FILE_FIELD_NAME = 'delegation_token_file'
32AUTHENTICATION_TYPE_FIELD_NAME = 'authentication_type'
33MEBIBYTE = 1024 * 1024
34DEFAULT_BUFFER_SIZE = 100 * MEBIBYTE
35DEFAULT_PART_SIZE = 128 * MEBIBYTE
36
37try:
38    # PY3+
39    import collections.abc as abc
40except ImportError:
41    # PY2
42    import collections as abc
43
44missing_attr = object()
45
46
47def to_dict(obj):
48    """Helper to flatten models into dicts for rendering.
49
50    The following conversions are applied:
51
52    * datetime.date, datetime.datetime, datetime.time
53      are converted into ISO8601 UTC strings
54    """
55    # Shortcut strings so they don't count as Iterables
56    if isinstance(obj, six.string_types):
57        return obj
58    elif obj is NONE_SENTINEL:
59        return None
60    elif isinstance(obj, (datetime.datetime, datetime.time)):
61        # always use UTC
62        if not obj.tzinfo:
63            obj = pytz.utc.localize(obj)
64        if isinstance(obj, datetime.datetime):
65            # only datetime.datetime takes a separator
66            return obj.isoformat(sep="T")
67        return obj.isoformat()
68    elif isinstance(obj, datetime.date):
69        # datetime.date doesn't have a timezone
70        return obj.isoformat()
71    elif isinstance(obj, abc.Mapping):
72        return {k: to_dict(v) for k, v in six.iteritems(obj)}
73    elif isinstance(obj, abc.Iterable):
74        return [to_dict(v) for v in obj]
75    # Not a string, datetime, dict, list, or model - return directly
76    elif not hasattr(obj, "swagger_types"):
77        return obj
78
79    # Collect attrs from obj according to swagger_types into a dict
80    as_dict = {}
81    for key in six.iterkeys(obj.swagger_types):
82        value = getattr(obj, key, missing_attr)
83        if value is not missing_attr:
84            as_dict[key] = to_dict(value)
85    return as_dict
86
87
88def formatted_flat_dict(model):
89    """Returns a string of the model flattened as a dict, sorted"""
90    as_dict = to_dict(model)
91    return json.dumps(
92        as_dict,
93        indent=2,
94        sort_keys=True
95    )
96
97
98def value_allowed_none_or_none_sentinel(value_to_test, allowed_values):
99    return value_to_test is None or value_to_test is NONE_SENTINEL or value_to_test in allowed_values
100
101
102def file_content_as_launch_instance_user_data(file_path):
103    """
104    Takes a file path and returns a Base64-encoded string which can be provided as the value of the ``user_data`` key
105    in the ``metadata`` dictionary when launching an instance(see :py:class:`~oci.core.models.LaunchInstanceDetails`
106    for more information).
107
108    :param str file_path:
109      The path to the file whose contents will be Base64-encoded
110
111    :return: The Base64-encoded string
112    :rtype: str
113    """
114
115    full_path = os.path.expandvars(os.path.expanduser(file_path))
116    with open(full_path, 'rb') as f:
117        file_contents = f.read()
118
119    return base64.b64encode(file_contents).decode('utf-8')
120
121
122class Sentinel(object):
123    """Named singletons for clear docstrings.
124    Also used to differentiate an explicit param of None from a lack of argument.
125
126    .. code-block:: pycon
127
128        >>> missing = Sentinel("Missing", False)
129        >>> also_missing = Sentinel("Missing", False)
130        >>> assert missing is also_missing
131        >>> repr(missing)
132        <Missing>
133        >>> assert bool(missing) is False
134    """
135    _symbols = {}
136
137    def __new__(cls, name, truthy=True):
138        sentinel = Sentinel._symbols.get(name, None)
139        if sentinel is None:
140            sentinel = Sentinel._symbols[name] = super(Sentinel, cls).__new__(cls)
141        elif sentinel.truthy is not truthy:
142            raise ValueError("Tried to get existing Sentinel {!r} with wrong truthy value".format(sentinel))
143        return sentinel
144
145    def __init__(self, name, truthy=True):
146        self.name = name
147        self.truthy = truthy
148
149    def __repr__(self):
150        # Sentinel("Missing") -> <Missing>
151        return "<{}>".format(self.name)
152
153    def __bool__(self):
154        return self.truthy
155    # PY2 Compatibility
156    __nonzero__ = __bool__
157
158
159NONE_SENTINEL = Sentinel(name='None', truthy=False)
160
161WAIT_RESOURCE_NOT_FOUND = Sentinel(name='WaitResourceNotFound', truthy=False)
162
163
164def _get_signer_from_delegation_token_instance_principal(config):
165    # Import the signer inside the function to avoid circular imports during initialization
166    from oci.auth.signers import InstancePrincipalsDelegationTokenSigner
167
168    signer_kwargs = {}
169
170    delegation_token_file_path = config.get(DELEGATION_TOKEN_FILE_FIELD_NAME)
171
172    if delegation_token_file_path is None:
173        raise InvalidConfig('ERROR: {} was not provided.'.format(DELEGATION_TOKEN_FILE_FIELD_NAME))
174
175    expanded_delegation_token_file_path = os.path.expanduser(delegation_token_file_path)
176    if not os.path.isfile(expanded_delegation_token_file_path):
177        raise InvalidConfig('ERROR: delegation token file not found at {}'.format(expanded_delegation_token_file_path))
178
179    with open(expanded_delegation_token_file_path, mode="r") as f:
180        delegation_token = f.read().strip()
181    if delegation_token is None:
182        raise InvalidConfig('ERROR: delegation_token was not provided.')
183    signer_kwargs['delegation_token'] = delegation_token
184    # Return signer with delegation token
185    return InstancePrincipalsDelegationTokenSigner(**signer_kwargs)
186
187
188def _get_signer_from_resource_principal(config):
189    # Import the signer inside the function to avoid circular imports during initialization
190    from oci.auth.signers import get_resource_principal_signer
191
192    return get_resource_principal_signer()
193
194
195# This map can be easily extended to accommodate support for more auth types through the config file
196AUTH_TYPE_TO_SIGNER_FUNCTION_MAP = {
197    DELEGATION_TOKEN_WITH_INSTANCE_PRINCIPAL_AUTHENTICATION_TYPE: _get_signer_from_delegation_token_instance_principal,
198    RESOURCE_PRINCIPAL_AUTHENTICATION_TYPE: _get_signer_from_resource_principal,
199}
200
201
202def get_signer_from_authentication_type(config):
203    # This is currently made for allowing SDK to run seamlessly on cloud shell
204    auth_type = get_authentication_type_from_config(config)
205
206    # Get the signer function from map
207    signer_function = AUTH_TYPE_TO_SIGNER_FUNCTION_MAP.get(auth_type)
208    return signer_function(config)
209
210
211def get_authentication_type_from_config(config):
212    auth_type = config.get(AUTHENTICATION_TYPE_FIELD_NAME)
213    if auth_type is None:
214        raise ValueError("{} not provided".format(AUTHENTICATION_TYPE_FIELD_NAME))
215
216    # Currently the SDK supports only the cloud shell use case, this can be extended for other auth types
217    if auth_type == INSTANCE_PRINCIPAL_AUTHENTICATION_TYPE_VALUE_NAME:
218        if DELEGATION_TOKEN_FILE_FIELD_NAME in config:
219            return DELEGATION_TOKEN_WITH_INSTANCE_PRINCIPAL_AUTHENTICATION_TYPE
220        else:
221            raise InvalidConfig("The authentication type {} requires config values for the keys {}".format(DELEGATION_TOKEN_WITH_INSTANCE_PRINCIPAL_AUTHENTICATION_TYPE, DELEGATION_TOKEN_FILE_FIELD_NAME))
222    else:
223        raise InvalidConfig("The authentication type {} is not supported".format(auth_type))
224
225
226def back_up_body_calculate_stream_content_length(stream, buffer_limit=DEFAULT_BUFFER_SIZE):
227    # Note: Need to read in chunks, older version of Python will fail to read more than 2GB at once.
228    # We pull data in chunks (128MB at a time) from the stream until there is no more
229    logger.warning("Reading the stream to calculate its content-length. Process may freeze for very big streams. Consider passing in content length for big objects")
230    try:
231        keep_reading = True
232        part_size = DEFAULT_PART_SIZE
233        content_length = 0
234        content = ""
235        byte_content = b''
236        while keep_reading:
237            if hasattr(stream, 'buffer'):
238                content = stream.buffer.read(part_size)
239            elif hasattr(stream, 'read'):
240                content = stream.read(part_size)
241            else:
242                raise TypeError("Stream object does not contain a 'read' property. Cannot auto calculate content length, please pass in content length")
243            if len(content) == 0:
244                keep_reading = False
245            byte_content += content
246            content_length += len(content)
247            if (buffer_limit and content_length > buffer_limit):
248                raise BufferError("Stream size is greater than the buffer_limit, please pass in a bigger buffer_limit or pass in content length to the request")
249        return {"content_length": content_length, "byte_content": byte_content}
250    except(IOError, OSError):
251        raise TypeError("Stream object's content length cannot be calculated, please pass in content length")
252
253
254# This helper function checks if an object can have it's content length auto-calculated by the Request library
255def is_content_length_calculable_by_req_util(o):
256    if hasattr(o, '__len__') or hasattr(o, 'len') or hasattr(o, 'fileno') or hasattr(o, 'tell'):
257        return True
258    logger.warning("Request did not contain content-length and stream object does not contain fileno or tell property. Stream object will be read to calculate content-length")
259    return False
260
261
262def extract_service_endpoint(endpoint_with_base_path):
263    """
264    Takes a Service Endpoint with base-path embedded and extracts the Service Endpoint from it.
265
266    :param str endpoint_with_base_path:
267    Service Endpoint with base-path embedded
268
269    :return: The Service Endpoint without base-path.
270    :rtype: str
271    """
272    parsed_endpoint = urlparse(endpoint_with_base_path)
273    return parsed_endpoint.scheme + r'://' + parsed_endpoint.netloc
274
275
276def should_record_body_position_for_retry(func_ref, **func_kwargs):
277    func_name = func_ref.__name__
278    # TODO: remove Python 2 requirements, use qualname
279    if func_name == 'call_api':
280        body = func_kwargs.get('body')
281        # A file-like object body should be treated differently for retry
282        if body and hasattr(body, 'read'):
283            return True
284        return False
285    return False
286
287
288def record_body_position_for_rewind(body):
289    is_body_rewindable = True
290    if getattr(body, 'tell', None) is not None:
291        try:
292            # Attempt to record current body position
293            body_position = body.tell()
294        except (IOError, OSError):
295            # If we cannot record the current body position for a file-like body, then we should not retry
296            is_body_rewindable = False
297            body_position = None
298            logger.warning("Unable to record body position for rewinding. This request will not be retried/rewound")
299    else:
300        # If the body does not support tell, then don't retry
301        is_body_rewindable = False
302        body_position = None
303        logger.warning("Unable to record body position for rewinding. This request will not be retried/rewound")
304    return is_body_rewindable, body_position
305
306
307def rewind_body(body, body_position):
308    if getattr(body, 'seek', None) is not None:
309        try:
310            body.seek(body_position, SEEK_SET)
311        except (IOError, OSError):
312            # If we're unable to reset the body position, then we should not retry
313            logger.warning("Unable to reset body position for rewinding. This request will not be retried/rewound")
314            return False
315        return True
316    # if the body does not support seek, then we should not retry
317    logger.warning("Unable to reset body position for rewinding. This request will not be retried/rewound")
318    return False
319
320
321def read_stream_for_signing(signing_algorithm, body):
322    bytes_read = 0
323    try:
324        while True:
325            chunk = ""
326            if hasattr(body, "read"):
327                chunk = body.read(DEFAULT_PART_SIZE)
328            elif hasattr(body, "buffer"):
329                chunk = body.buffer.read(DEFAULT_PART_SIZE)
330            if len(chunk) == 0:
331                break
332            bytes_read += len(chunk)
333            signing_algorithm.update(chunk)
334    except(IOError, OSError):
335        logger.warning("Unable to read stream body for signing")
336        bytes_read = -1
337    return bytes_read
338