1# -------------------------------------------------------------------------
2# Copyright (c) Microsoft Corporation. All rights reserved.
3# Licensed under the MIT License. See License.txt in the project root for
4# license information.
5# --------------------------------------------------------------------------
6
7import os
8
9from .._models import BlobType
10from .._shared.policies_async import ExponentialRetry, LinearRetry
11from ._blob_client_async import BlobClient
12from ._container_client_async import ContainerClient
13from ._blob_service_client_async import BlobServiceClient
14from ._lease_async import BlobLeaseClient
15from ._download_async import StorageStreamDownloader
16
17
18async def upload_blob_to_url(
19        blob_url,  # type: str
20        data,  # type: Union[Iterable[AnyStr], IO[AnyStr]]
21        credential=None,  # type: Any
22        **kwargs):
23    # type: (...) -> dict[str, Any]
24    """Upload data to a given URL
25
26    The data will be uploaded as a block blob.
27
28    :param str blob_url:
29        The full URI to the blob. This can also include a SAS token.
30    :param data:
31        The data to upload. This can be bytes, text, an iterable or a file-like object.
32    :type data: bytes or str or Iterable
33    :param credential:
34        The credentials with which to authenticate. This is optional if the
35        blob URL already has a SAS token. The value can be a SAS token string, an account
36        shared access key, or an instance of a TokenCredentials class from azure.identity.
37        If the URL already has a SAS token, specifying an explicit credential will take priority.
38    :keyword bool overwrite:
39        Whether the blob to be uploaded should overwrite the current data.
40        If True, upload_blob_to_url will overwrite any existing data. If set to False, the
41        operation will fail with a ResourceExistsError.
42    :keyword int max_concurrency:
43        The number of parallel connections with which to download.
44    :keyword int length:
45        Number of bytes to read from the stream. This is optional, but
46        should be supplied for optimal performance.
47    :keyword dict(str,str) metadata:
48        Name-value pairs associated with the blob as metadata.
49    :keyword bool validate_content:
50        If true, calculates an MD5 hash for each chunk of the blob. The storage
51        service checks the hash of the content that has arrived with the hash
52        that was sent. This is primarily valuable for detecting bitflips on
53        the wire if using http instead of https as https (the default) will
54        already validate. Note that this MD5 hash is not stored with the
55        blob. Also note that if enabled, the memory-efficient upload algorithm
56        will not be used, because computing the MD5 hash requires buffering
57        entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
58    :keyword str encoding:
59        Encoding to use if text is supplied as input. Defaults to UTF-8.
60    :returns: Blob-updated property dict (Etag and last modified)
61    :rtype: dict(str, Any)
62    """
63    async with BlobClient.from_blob_url(blob_url, credential=credential) as client:
64        return await client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs)
65
66
67async def _download_to_stream(client, handle, **kwargs):
68    """Download data to specified open file-handle."""
69    stream = await client.download_blob(**kwargs)
70    await stream.readinto(handle)
71
72
73async def download_blob_from_url(
74        blob_url,  # type: str
75        output,  # type: str
76        credential=None,  # type: Any
77        **kwargs):
78    # type: (...) -> None
79    """Download the contents of a blob to a local file or stream.
80
81    :param str blob_url:
82        The full URI to the blob. This can also include a SAS token.
83    :param output:
84        Where the data should be downloaded to. This could be either a file path to write to,
85        or an open IO handle to write to.
86    :type output: str or writable stream
87    :param credential:
88        The credentials with which to authenticate. This is optional if the
89        blob URL already has a SAS token or the blob is public. The value can be a SAS token string,
90        an account shared access key, or an instance of a TokenCredentials class from azure.identity.
91        If the URL already has a SAS token, specifying an explicit credential will take priority.
92    :keyword bool overwrite:
93        Whether the local file should be overwritten if it already exists. The default value is
94        `False` - in which case a ValueError will be raised if the file already exists. If set to
95        `True`, an attempt will be made to write to the existing file. If a stream handle is passed
96        in, this value is ignored.
97    :keyword int max_concurrency:
98        The number of parallel connections with which to download.
99    :keyword int offset:
100        Start of byte range to use for downloading a section of the blob.
101        Must be set if length is provided.
102    :keyword int length:
103        Number of bytes to read from the stream. This is optional, but
104        should be supplied for optimal performance.
105    :keyword bool validate_content:
106        If true, calculates an MD5 hash for each chunk of the blob. The storage
107        service checks the hash of the content that has arrived with the hash
108        that was sent. This is primarily valuable for detecting bitflips on
109        the wire if using http instead of https as https (the default) will
110        already validate. Note that this MD5 hash is not stored with the
111        blob. Also note that if enabled, the memory-efficient upload algorithm
112        will not be used, because computing the MD5 hash requires buffering
113        entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
114    :rtype: None
115    """
116    overwrite = kwargs.pop('overwrite', False)
117    async with BlobClient.from_blob_url(blob_url, credential=credential) as client:
118        if hasattr(output, 'write'):
119            await _download_to_stream(client, output, **kwargs)
120        else:
121            if not overwrite and os.path.isfile(output):
122                raise ValueError("The file '{}' already exists.".format(output))
123            with open(output, 'wb') as file_handle:
124                await _download_to_stream(client, file_handle, **kwargs)
125
126
127__all__ = [
128    'upload_blob_to_url',
129    'download_blob_from_url',
130    'BlobServiceClient',
131    'ContainerClient',
132    'BlobClient',
133    'BlobLeaseClient',
134    'ExponentialRetry',
135    'LinearRetry',
136    'StorageStreamDownloader'
137]
138