1from __future__ import absolute_import
2from __future__ import unicode_literals
3
4import codecs
5import hashlib
6import json.decoder
7import logging
8import ntpath
9import random
10
11import six
12from docker.errors import DockerException
13from docker.utils import parse_bytes as sdk_parse_bytes
14
15from .errors import StreamParseError
16from .timeparse import MULTIPLIERS
17from .timeparse import timeparse
18
19
20json_decoder = json.JSONDecoder()
21log = logging.getLogger(__name__)
22
23
24def get_output_stream(stream):
25    if six.PY3:
26        return stream
27    return codecs.getwriter('utf-8')(stream)
28
29
30def stream_as_text(stream):
31    """Given a stream of bytes or text, if any of the items in the stream
32    are bytes convert them to text.
33
34    This function can be removed once docker-py returns text streams instead
35    of byte streams.
36    """
37    for data in stream:
38        if not isinstance(data, six.text_type):
39            data = data.decode('utf-8', 'replace')
40        yield data
41
42
43def line_splitter(buffer, separator=u'\n'):
44    index = buffer.find(six.text_type(separator))
45    if index == -1:
46        return None
47    return buffer[:index + 1], buffer[index + 1:]
48
49
50def split_buffer(stream, splitter=None, decoder=lambda a: a):
51    """Given a generator which yields strings and a splitter function,
52    joins all input, splits on the separator and yields each chunk.
53
54    Unlike string.split(), each chunk includes the trailing
55    separator, except for the last one if none was found on the end
56    of the input.
57    """
58    splitter = splitter or line_splitter
59    buffered = six.text_type('')
60
61    for data in stream_as_text(stream):
62        buffered += data
63        while True:
64            buffer_split = splitter(buffered)
65            if buffer_split is None:
66                break
67
68            item, buffered = buffer_split
69            yield item
70
71    if buffered:
72        try:
73            yield decoder(buffered)
74        except Exception as e:
75            log.error(
76                'Compose tried decoding the following data chunk, but failed:'
77                '\n%s' % repr(buffered)
78            )
79            raise StreamParseError(e)
80
81
82def json_splitter(buffer):
83    """Attempt to parse a json object from a buffer. If there is at least one
84    object, return it and the rest of the buffer, otherwise return None.
85    """
86    buffer = buffer.strip()
87    try:
88        obj, index = json_decoder.raw_decode(buffer)
89        rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():]
90        return obj, rest
91    except ValueError:
92        return None
93
94
95def json_stream(stream):
96    """Given a stream of text, return a stream of json objects.
97    This handles streams which are inconsistently buffered (some entries may
98    be newline delimited, and others are not).
99    """
100    return split_buffer(stream, json_splitter, json_decoder.decode)
101
102
103def json_hash(obj):
104    dump = json.dumps(obj, sort_keys=True, separators=(',', ':'), default=lambda x: x.repr())
105    h = hashlib.sha256()
106    h.update(dump.encode('utf8'))
107    return h.hexdigest()
108
109
110def microseconds_from_time_nano(time_nano):
111    return int(time_nano % 1000000000 / 1000)
112
113
114def nanoseconds_from_time_seconds(time_seconds):
115    return int(time_seconds / MULTIPLIERS['nano'])
116
117
118def parse_seconds_float(value):
119    return timeparse(value or '')
120
121
122def parse_nanoseconds_int(value):
123    parsed = timeparse(value or '')
124    if parsed is None:
125        return None
126    return nanoseconds_from_time_seconds(parsed)
127
128
129def build_string_dict(source_dict):
130    return dict((k, str(v if v is not None else '')) for k, v in source_dict.items())
131
132
133def splitdrive(path):
134    if len(path) == 0:
135        return ('', '')
136    if path[0] in ['.', '\\', '/', '~']:
137        return ('', path)
138    return ntpath.splitdrive(path)
139
140
141def parse_bytes(n):
142    try:
143        return sdk_parse_bytes(n)
144    except DockerException:
145        return None
146
147
148def unquote_path(s):
149    if not s:
150        return s
151    if s[0] == '"' and s[-1] == '"':
152        return s[1:-1]
153    return s
154
155
156def generate_random_id():
157    while True:
158        val = hex(random.getrandbits(32 * 8))[2:-1]
159        try:
160            int(truncate_id(val))
161            continue
162        except ValueError:
163            return val
164
165
166def truncate_id(value):
167    if ':' in value:
168        value = value[value.index(':') + 1:]
169    if len(value) > 12:
170        return value[:12]
171    return value
172
173
174def unique_everseen(iterable, key=lambda x: x):
175    "List unique elements, preserving order. Remember all elements ever seen."
176    seen = set()
177    for element in iterable:
178        unique_key = key(element)
179        if unique_key not in seen:
180            seen.add(unique_key)
181            yield element
182
183
184def truncate_string(s, max_chars=35):
185    if len(s) > max_chars:
186        return s[:max_chars - 2] + '...'
187    return s
188