1# pylint: disable=too-many-lines
2# -------------------------------------------------------------------------
3# Copyright (c) Microsoft Corporation. All rights reserved.
4# Licensed under the MIT License. See License.txt in the project root for
5# license information.
6# --------------------------------------------------------------------------
7
8import functools
9from typing import (  # pylint: disable=unused-import
10    Union, Optional, Any, Iterable, AnyStr, Dict, List, Tuple, IO, Iterator,
11    TYPE_CHECKING
12)
13
14
15try:
16    from urllib.parse import urlparse, quote, unquote
17except ImportError:
18    from urlparse import urlparse # type: ignore
19    from urllib2 import quote, unquote # type: ignore
20
21import six
22
23from azure.core import MatchConditions
24from azure.core.exceptions import HttpResponseError
25from azure.core.paging import ItemPaged
26from azure.core.tracing.decorator import distributed_trace
27from azure.core.pipeline import Pipeline
28from azure.core.pipeline.transport import HttpRequest
29
30from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query
31from ._shared.request_handlers import add_metadata_headers, serialize_iso
32from ._shared.response_handlers import (
33    process_storage_error,
34    return_response_headers,
35    return_headers_and_deserialized)
36from ._generated import AzureBlobStorage
37from ._generated.models import SignedIdentifier
38from ._deserialize import deserialize_container_properties
39from ._serialize import get_modify_conditions, get_container_cpk_scope_info, get_api_version, get_access_conditions
40from ._models import ( # pylint: disable=unused-import
41    ContainerProperties,
42    BlobProperties,
43    BlobType)
44from ._list_blobs_helper import BlobPrefix, BlobPropertiesPaged
45from ._lease import BlobLeaseClient
46from ._blob_client import BlobClient
47
48if TYPE_CHECKING:
49    from azure.core.pipeline.transport import HttpTransport, HttpResponse  # pylint: disable=ungrouped-imports
50    from azure.core.pipeline.policies import HTTPPolicy # pylint: disable=ungrouped-imports
51    from datetime import datetime
52    from ._models import (  # pylint: disable=unused-import
53        PublicAccess,
54        AccessPolicy,
55        ContentSettings,
56        StandardBlobTier,
57        PremiumPageBlobTier)
58
59
60def _get_blob_name(blob):
61    """Return the blob name.
62
63    :param blob: A blob string or BlobProperties
64    :rtype: str
65    """
66    try:
67        return blob.get('name')
68    except AttributeError:
69        return blob
70
71
72class ContainerClient(StorageAccountHostsMixin):
73    """A client to interact with a specific container, although that container
74    may not yet exist.
75
76    For operations relating to a specific blob within this container, a blob client can be
77    retrieved using the :func:`~get_blob_client` function.
78
79    :param str account_url:
80        The URI to the storage account. In order to create a client given the full URI to the container,
81        use the :func:`from_container_url` classmethod.
82    :param container_name:
83        The name of the container for the blob.
84    :type container_name: str
85    :param credential:
86        The credentials with which to authenticate. This is optional if the
87        account URL already has a SAS token. The value can be a SAS token string,
88        an instance of a AzureSasCredential from azure.core.credentials, an account
89        shared access key, or an instance of a TokenCredentials class from azure.identity.
90        If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
91        - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
92    :keyword str api_version:
93        The Storage API version to use for requests. Default value is '2019-07-07'.
94        Setting to an older version may result in reduced feature compatibility.
95
96        .. versionadded:: 12.2.0
97
98    :keyword str secondary_hostname:
99        The hostname of the secondary endpoint.
100    :keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks.
101        Defaults to 4*1024*1024, or 4MB.
102    :keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be
103        uploaded with only one http PUT request. If the blob size is larger than max_single_put_size,
104        the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB.
105    :keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient
106        algorithm when uploading a block blob. Defaults to 4*1024*1024+1.
107    :keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False.
108    :keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB.
109    :keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call,
110        the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB.
111    :keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024,
112        or 4MB.
113
114    .. admonition:: Example:
115
116        .. literalinclude:: ../samples/blob_samples_containers.py
117            :start-after: [START create_container_client_from_service]
118            :end-before: [END create_container_client_from_service]
119            :language: python
120            :dedent: 8
121            :caption: Get a ContainerClient from an existing BlobServiceClient.
122
123        .. literalinclude:: ../samples/blob_samples_containers.py
124            :start-after: [START create_container_client_sasurl]
125            :end-before: [END create_container_client_sasurl]
126            :language: python
127            :dedent: 8
128            :caption: Creating the container client directly.
129    """
130    def __init__(
131            self, account_url,  # type: str
132            container_name,  # type: str
133            credential=None,  # type: Optional[Any]
134            **kwargs  # type: Any
135        ):
136        # type: (...) -> None
137        try:
138            if not account_url.lower().startswith('http'):
139                account_url = "https://" + account_url
140        except AttributeError:
141            raise ValueError("Container URL must be a string.")
142        parsed_url = urlparse(account_url.rstrip('/'))
143        if not container_name:
144            raise ValueError("Please specify a container name.")
145        if not parsed_url.netloc:
146            raise ValueError("Invalid URL: {}".format(account_url))
147
148        _, sas_token = parse_query(parsed_url.query)
149        self.container_name = container_name
150        self._query_str, credential = self._format_query_string(sas_token, credential)
151        super(ContainerClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs)
152        self._client = AzureBlobStorage(self.url, pipeline=self._pipeline)
153        default_api_version = self._client._config.version  # pylint: disable=protected-access
154        self._client._config.version = get_api_version(kwargs, default_api_version) # pylint: disable=protected-access
155
156    def _format_url(self, hostname):
157        container_name = self.container_name
158        if isinstance(container_name, six.text_type):
159            container_name = container_name.encode('UTF-8')
160        return "{}://{}/{}{}".format(
161            self.scheme,
162            hostname,
163            quote(container_name),
164            self._query_str)
165
166    @classmethod
167    def from_container_url(cls, container_url, credential=None, **kwargs):
168        # type: (str, Optional[Any], Any) -> ContainerClient
169        """Create ContainerClient from a container url.
170
171        :param str container_url:
172            The full endpoint URL to the Container, including SAS token if used. This could be
173            either the primary endpoint, or the secondary endpoint depending on the current `location_mode`.
174        :type container_url: str
175        :param credential:
176            The credentials with which to authenticate. This is optional if the
177            account URL already has a SAS token, or the connection string already has shared
178            access key values. The value can be a SAS token string,
179            an instance of a AzureSasCredential from azure.core.credentials, an account shared access
180            key, or an instance of a TokenCredentials class from azure.identity.
181            If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
182            - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
183        :returns: A container client.
184        :rtype: ~azure.storage.blob.ContainerClient
185        """
186        try:
187            if not container_url.lower().startswith('http'):
188                container_url = "https://" + container_url
189        except AttributeError:
190            raise ValueError("Container URL must be a string.")
191        parsed_url = urlparse(container_url.rstrip('/'))
192        if not parsed_url.netloc:
193            raise ValueError("Invalid URL: {}".format(container_url))
194
195        container_path = parsed_url.path.lstrip('/').split('/')
196        account_path = ""
197        if len(container_path) > 1:
198            account_path = "/" + "/".join(container_path[:-1])
199        account_url = "{}://{}{}?{}".format(
200            parsed_url.scheme,
201            parsed_url.netloc.rstrip('/'),
202            account_path,
203            parsed_url.query)
204        container_name = unquote(container_path[-1])
205        if not container_name:
206            raise ValueError("Invalid URL. Please provide a URL with a valid container name")
207        return cls(account_url, container_name=container_name, credential=credential, **kwargs)
208
209    @classmethod
210    def from_connection_string(
211            cls, conn_str,  # type: str
212            container_name,  # type: str
213            credential=None,  # type: Optional[Any]
214            **kwargs  # type: Any
215        ):  # type: (...) -> ContainerClient
216        """Create ContainerClient from a Connection String.
217
218        :param str conn_str:
219            A connection string to an Azure Storage account.
220        :param container_name:
221            The container name for the blob.
222        :type container_name: str
223        :param credential:
224            The credentials with which to authenticate. This is optional if the
225            account URL already has a SAS token, or the connection string already has shared
226            access key values. The value can be a SAS token string,
227            an instance of a AzureSasCredential from azure.core.credentials, an account shared access
228            key, or an instance of a TokenCredentials class from azure.identity.
229            Credentials provided here will take precedence over those in the connection string.
230        :returns: A container client.
231        :rtype: ~azure.storage.blob.ContainerClient
232
233        .. admonition:: Example:
234
235            .. literalinclude:: ../samples/blob_samples_authentication.py
236                :start-after: [START auth_from_connection_string_container]
237                :end-before: [END auth_from_connection_string_container]
238                :language: python
239                :dedent: 8
240                :caption: Creating the ContainerClient from a connection string.
241        """
242        account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob')
243        if 'secondary_hostname' not in kwargs:
244            kwargs['secondary_hostname'] = secondary
245        return cls(
246            account_url, container_name=container_name, credential=credential, **kwargs)
247
248    @distributed_trace
249    def create_container(self, metadata=None, public_access=None, **kwargs):
250        # type: (Optional[Dict[str, str]], Optional[Union[PublicAccess, str]], **Any) -> None
251        """
252        Creates a new container under the specified account. If the container
253        with the same name already exists, the operation fails.
254
255        :param metadata:
256            A dict with name_value pairs to associate with the
257            container as metadata. Example:{'Category':'test'}
258        :type metadata: dict[str, str]
259        :param ~azure.storage.blob.PublicAccess public_access:
260            Possible values include: 'container', 'blob'.
261        :keyword container_encryption_scope:
262            Specifies the default encryption scope to set on the container and use for
263            all future writes.
264
265            .. versionadded:: 12.2.0
266
267        :paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope
268        :keyword int timeout:
269            The timeout parameter is expressed in seconds.
270        :rtype: None
271
272        .. admonition:: Example:
273
274            .. literalinclude:: ../samples/blob_samples_containers.py
275                :start-after: [START create_container]
276                :end-before: [END create_container]
277                :language: python
278                :dedent: 12
279                :caption: Creating a container to store blobs.
280        """
281        headers = kwargs.pop('headers', {})
282        timeout = kwargs.pop('timeout', None)
283        headers.update(add_metadata_headers(metadata)) # type: ignore
284        container_cpk_scope_info = get_container_cpk_scope_info(kwargs)
285        try:
286            return self._client.container.create( # type: ignore
287                timeout=timeout,
288                access=public_access,
289                container_cpk_scope_info=container_cpk_scope_info,
290                cls=return_response_headers,
291                headers=headers,
292                **kwargs)
293        except HttpResponseError as error:
294            process_storage_error(error)
295
296    @distributed_trace
297    def delete_container(
298            self, **kwargs):
299        # type: (Any) -> None
300        """
301        Marks the specified container for deletion. The container and any blobs
302        contained within it are later deleted during garbage collection.
303
304        :keyword lease:
305            If specified, delete_container only succeeds if the
306            container's lease is active and matches this ID.
307            Required if the container has an active lease.
308        :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
309        :keyword ~datetime.datetime if_modified_since:
310            A DateTime value. Azure expects the date value passed in to be UTC.
311            If timezone is included, any non-UTC datetimes will be converted to UTC.
312            If a date is passed in without timezone info, it is assumed to be UTC.
313            Specify this header to perform the operation only
314            if the resource has been modified since the specified time.
315        :keyword ~datetime.datetime if_unmodified_since:
316            A DateTime value. Azure expects the date value passed in to be UTC.
317            If timezone is included, any non-UTC datetimes will be converted to UTC.
318            If a date is passed in without timezone info, it is assumed to be UTC.
319            Specify this header to perform the operation only if
320            the resource has not been modified since the specified date/time.
321        :keyword str etag:
322            An ETag value, or the wildcard character (*). Used to check if the resource has changed,
323            and act according to the condition specified by the `match_condition` parameter.
324        :keyword ~azure.core.MatchConditions match_condition:
325            The match condition to use upon the etag.
326        :keyword int timeout:
327            The timeout parameter is expressed in seconds.
328        :rtype: None
329
330        .. admonition:: Example:
331
332            .. literalinclude:: ../samples/blob_samples_containers.py
333                :start-after: [START delete_container]
334                :end-before: [END delete_container]
335                :language: python
336                :dedent: 12
337                :caption: Delete a container.
338        """
339        lease = kwargs.pop('lease', None)
340        access_conditions = get_access_conditions(lease)
341        mod_conditions = get_modify_conditions(kwargs)
342        timeout = kwargs.pop('timeout', None)
343        try:
344            self._client.container.delete(
345                timeout=timeout,
346                lease_access_conditions=access_conditions,
347                modified_access_conditions=mod_conditions,
348                **kwargs)
349        except HttpResponseError as error:
350            process_storage_error(error)
351
352    @distributed_trace
353    def acquire_lease(
354            self, lease_duration=-1,  # type: int
355            lease_id=None,  # type: Optional[str]
356            **kwargs):
357        # type: (...) -> BlobLeaseClient
358        """
359        Requests a new lease. If the container does not have an active lease,
360        the Blob service creates a lease on the container and returns a new
361        lease ID.
362
363        :param int lease_duration:
364            Specifies the duration of the lease, in seconds, or negative one
365            (-1) for a lease that never expires. A non-infinite lease can be
366            between 15 and 60 seconds. A lease duration cannot be changed
367            using renew or change. Default is -1 (infinite lease).
368        :param str lease_id:
369            Proposed lease ID, in a GUID string format. The Blob service returns
370            400 (Invalid request) if the proposed lease ID is not in the correct format.
371        :keyword ~datetime.datetime if_modified_since:
372            A DateTime value. Azure expects the date value passed in to be UTC.
373            If timezone is included, any non-UTC datetimes will be converted to UTC.
374            If a date is passed in without timezone info, it is assumed to be UTC.
375            Specify this header to perform the operation only
376            if the resource has been modified since the specified time.
377        :keyword ~datetime.datetime if_unmodified_since:
378            A DateTime value. Azure expects the date value passed in to be UTC.
379            If timezone is included, any non-UTC datetimes will be converted to UTC.
380            If a date is passed in without timezone info, it is assumed to be UTC.
381            Specify this header to perform the operation only if
382            the resource has not been modified since the specified date/time.
383        :keyword str etag:
384            An ETag value, or the wildcard character (*). Used to check if the resource has changed,
385            and act according to the condition specified by the `match_condition` parameter.
386        :keyword ~azure.core.MatchConditions match_condition:
387            The match condition to use upon the etag.
388        :keyword int timeout:
389            The timeout parameter is expressed in seconds.
390        :returns: A BlobLeaseClient object, that can be run in a context manager.
391        :rtype: ~azure.storage.blob.BlobLeaseClient
392
393        .. admonition:: Example:
394
395            .. literalinclude:: ../samples/blob_samples_containers.py
396                :start-after: [START acquire_lease_on_container]
397                :end-before: [END acquire_lease_on_container]
398                :language: python
399                :dedent: 8
400                :caption: Acquiring a lease on the container.
401        """
402        lease = BlobLeaseClient(self, lease_id=lease_id) # type: ignore
403        kwargs.setdefault('merge_span', True)
404        timeout = kwargs.pop('timeout', None)
405        lease.acquire(lease_duration=lease_duration, timeout=timeout, **kwargs)
406        return lease
407
408    @distributed_trace
409    def get_account_information(self, **kwargs):
410        # type: (**Any) -> Dict[str, str]
411        """Gets information related to the storage account.
412
413        The information can also be retrieved if the user has a SAS to a container or blob.
414        The keys in the returned dictionary include 'sku_name' and 'account_kind'.
415
416        :returns: A dict of account information (SKU and account type).
417        :rtype: dict(str, str)
418        """
419        try:
420            return self._client.container.get_account_info(cls=return_response_headers, **kwargs) # type: ignore
421        except HttpResponseError as error:
422            process_storage_error(error)
423
424    @distributed_trace
425    def get_container_properties(self, **kwargs):
426        # type: (Any) -> ContainerProperties
427        """Returns all user-defined metadata and system properties for the specified
428        container. The data returned does not include the container's list of blobs.
429
430        :keyword lease:
431            If specified, get_container_properties only succeeds if the
432            container's lease is active and matches this ID.
433        :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
434        :keyword int timeout:
435            The timeout parameter is expressed in seconds.
436        :return: Properties for the specified container within a container object.
437        :rtype: ~azure.storage.blob.ContainerProperties
438
439        .. admonition:: Example:
440
441            .. literalinclude:: ../samples/blob_samples_containers.py
442                :start-after: [START get_container_properties]
443                :end-before: [END get_container_properties]
444                :language: python
445                :dedent: 12
446                :caption: Getting properties on the container.
447        """
448        lease = kwargs.pop('lease', None)
449        access_conditions = get_access_conditions(lease)
450        timeout = kwargs.pop('timeout', None)
451        try:
452            response = self._client.container.get_properties(
453                timeout=timeout,
454                lease_access_conditions=access_conditions,
455                cls=deserialize_container_properties,
456                **kwargs)
457        except HttpResponseError as error:
458            process_storage_error(error)
459        response.name = self.container_name
460        return response # type: ignore
461
462    @distributed_trace
463    def set_container_metadata( # type: ignore
464            self, metadata=None,  # type: Optional[Dict[str, str]]
465            **kwargs
466        ):
467        # type: (...) -> Dict[str, Union[str, datetime]]
468        """Sets one or more user-defined name-value pairs for the specified
469        container. Each call to this operation replaces all existing metadata
470        attached to the container. To remove all metadata from the container,
471        call this operation with no metadata dict.
472
473        :param metadata:
474            A dict containing name-value pairs to associate with the container as
475            metadata. Example: {'category':'test'}
476        :type metadata: dict[str, str]
477        :keyword lease:
478            If specified, set_container_metadata only succeeds if the
479            container's lease is active and matches this ID.
480        :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
481        :keyword ~datetime.datetime if_modified_since:
482            A DateTime value. Azure expects the date value passed in to be UTC.
483            If timezone is included, any non-UTC datetimes will be converted to UTC.
484            If a date is passed in without timezone info, it is assumed to be UTC.
485            Specify this header to perform the operation only
486            if the resource has been modified since the specified time.
487        :keyword ~datetime.datetime if_unmodified_since:
488            A DateTime value. Azure expects the date value passed in to be UTC.
489            If timezone is included, any non-UTC datetimes will be converted to UTC.
490            If a date is passed in without timezone info, it is assumed to be UTC.
491            Specify this header to perform the operation only if
492            the resource has not been modified since the specified date/time.
493        :keyword str etag:
494            An ETag value, or the wildcard character (*). Used to check if the resource has changed,
495            and act according to the condition specified by the `match_condition` parameter.
496        :keyword int timeout:
497            The timeout parameter is expressed in seconds.
498        :returns: Container-updated property dict (Etag and last modified).
499        :rtype: dict[str, str or datetime]
500
501        .. admonition:: Example:
502
503            .. literalinclude:: ../samples/blob_samples_containers.py
504                :start-after: [START set_container_metadata]
505                :end-before: [END set_container_metadata]
506                :language: python
507                :dedent: 12
508                :caption: Setting metadata on the container.
509        """
510        headers = kwargs.pop('headers', {})
511        headers.update(add_metadata_headers(metadata))
512        lease = kwargs.pop('lease', None)
513        access_conditions = get_access_conditions(lease)
514        mod_conditions = get_modify_conditions(kwargs)
515        timeout = kwargs.pop('timeout', None)
516        try:
517            return self._client.container.set_metadata( # type: ignore
518                timeout=timeout,
519                lease_access_conditions=access_conditions,
520                modified_access_conditions=mod_conditions,
521                cls=return_response_headers,
522                headers=headers,
523                **kwargs)
524        except HttpResponseError as error:
525            process_storage_error(error)
526
527    @distributed_trace
528    def get_container_access_policy(self, **kwargs):
529        # type: (Any) -> Dict[str, Any]
530        """Gets the permissions for the specified container.
531        The permissions indicate whether container data may be accessed publicly.
532
533        :keyword lease:
534            If specified, get_container_access_policy only succeeds if the
535            container's lease is active and matches this ID.
536        :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
537        :keyword int timeout:
538            The timeout parameter is expressed in seconds.
539        :returns: Access policy information in a dict.
540        :rtype: dict[str, Any]
541
542        .. admonition:: Example:
543
544            .. literalinclude:: ../samples/blob_samples_containers.py
545                :start-after: [START get_container_access_policy]
546                :end-before: [END get_container_access_policy]
547                :language: python
548                :dedent: 12
549                :caption: Getting the access policy on the container.
550        """
551        lease = kwargs.pop('lease', None)
552        access_conditions = get_access_conditions(lease)
553        timeout = kwargs.pop('timeout', None)
554        try:
555            response, identifiers = self._client.container.get_access_policy(
556                timeout=timeout,
557                lease_access_conditions=access_conditions,
558                cls=return_headers_and_deserialized,
559                **kwargs)
560        except HttpResponseError as error:
561            process_storage_error(error)
562        return {
563            'public_access': response.get('blob_public_access'),
564            'signed_identifiers': identifiers or []
565        }
566
567    @distributed_trace
568    def set_container_access_policy(
569            self, signed_identifiers,  # type: Dict[str, AccessPolicy]
570            public_access=None,  # type: Optional[Union[str, PublicAccess]]
571            **kwargs
572        ):  # type: (...) -> Dict[str, Union[str, datetime]]
573        """Sets the permissions for the specified container or stored access
574        policies that may be used with Shared Access Signatures. The permissions
575        indicate whether blobs in a container may be accessed publicly.
576
577        :param signed_identifiers:
578            A dictionary of access policies to associate with the container. The
579            dictionary may contain up to 5 elements. An empty dictionary
580            will clear the access policies set on the service.
581        :type signed_identifiers: dict[str, ~azure.storage.blob.AccessPolicy]
582        :param ~azure.storage.blob.PublicAccess public_access:
583            Possible values include: 'container', 'blob'.
584        :keyword lease:
585            Required if the container has an active lease. Value can be a BlobLeaseClient object
586            or the lease ID as a string.
587        :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
588        :keyword ~datetime.datetime if_modified_since:
589            A datetime value. Azure expects the date value passed in to be UTC.
590            If timezone is included, any non-UTC datetimes will be converted to UTC.
591            If a date is passed in without timezone info, it is assumed to be UTC.
592            Specify this header to perform the operation only
593            if the resource has been modified since the specified date/time.
594        :keyword ~datetime.datetime if_unmodified_since:
595            A datetime value. Azure expects the date value passed in to be UTC.
596            If timezone is included, any non-UTC datetimes will be converted to UTC.
597            If a date is passed in without timezone info, it is assumed to be UTC.
598            Specify this header to perform the operation only if
599            the resource has not been modified since the specified date/time.
600        :keyword int timeout:
601            The timeout parameter is expressed in seconds.
602        :returns: Container-updated property dict (Etag and last modified).
603        :rtype: dict[str, str or ~datetime.datetime]
604
605        .. admonition:: Example:
606
607            .. literalinclude:: ../samples/blob_samples_containers.py
608                :start-after: [START set_container_access_policy]
609                :end-before: [END set_container_access_policy]
610                :language: python
611                :dedent: 12
612                :caption: Setting access policy on the container.
613        """
614        if len(signed_identifiers) > 5:
615            raise ValueError(
616                'Too many access policies provided. The server does not support setting '
617                'more than 5 access policies on a single resource.')
618        identifiers = []
619        for key, value in signed_identifiers.items():
620            if value:
621                value.start = serialize_iso(value.start)
622                value.expiry = serialize_iso(value.expiry)
623            identifiers.append(SignedIdentifier(id=key, access_policy=value)) # type: ignore
624        signed_identifiers = identifiers # type: ignore
625        lease = kwargs.pop('lease', None)
626        mod_conditions = get_modify_conditions(kwargs)
627        access_conditions = get_access_conditions(lease)
628        timeout = kwargs.pop('timeout', None)
629        try:
630            return self._client.container.set_access_policy(
631                container_acl=signed_identifiers or None,
632                timeout=timeout,
633                access=public_access,
634                lease_access_conditions=access_conditions,
635                modified_access_conditions=mod_conditions,
636                cls=return_response_headers,
637                **kwargs)
638        except HttpResponseError as error:
639            process_storage_error(error)
640
641    @distributed_trace
642    def list_blobs(self, name_starts_with=None, include=None, **kwargs):
643        # type: (Optional[str], Optional[Union[str, List[str]]], **Any) -> ItemPaged[BlobProperties]
644        """Returns a generator to list the blobs under the specified container.
645        The generator will lazily follow the continuation tokens returned by
646        the service.
647
648        :param str name_starts_with:
649            Filters the results to return only blobs whose names
650            begin with the specified prefix.
651        :param list[str] or str include:
652            Specifies one or more additional datasets to include in the response.
653            Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted', 'tags'.
654        :keyword int timeout:
655            The timeout parameter is expressed in seconds.
656        :returns: An iterable (auto-paging) response of BlobProperties.
657        :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties]
658
659        .. admonition:: Example:
660
661            .. literalinclude:: ../samples/blob_samples_containers.py
662                :start-after: [START list_blobs_in_container]
663                :end-before: [END list_blobs_in_container]
664                :language: python
665                :dedent: 8
666                :caption: List the blobs in the container.
667        """
668        if include and not isinstance(include, list):
669            include = [include]
670
671        results_per_page = kwargs.pop('results_per_page', None)
672        timeout = kwargs.pop('timeout', None)
673        command = functools.partial(
674            self._client.container.list_blob_flat_segment,
675            include=include,
676            timeout=timeout,
677            **kwargs)
678        return ItemPaged(
679            command, prefix=name_starts_with, results_per_page=results_per_page,
680            page_iterator_class=BlobPropertiesPaged)
681
682    @distributed_trace
683    def walk_blobs(
684            self, name_starts_with=None, # type: Optional[str]
685            include=None, # type: Optional[Any]
686            delimiter="/", # type: str
687            **kwargs # type: Optional[Any]
688        ):
689        # type: (...) -> ItemPaged[BlobProperties]
690        """Returns a generator to list the blobs under the specified container.
691        The generator will lazily follow the continuation tokens returned by
692        the service. This operation will list blobs in accordance with a hierarchy,
693        as delimited by the specified delimiter character.
694
695        :param str name_starts_with:
696            Filters the results to return only blobs whose names
697            begin with the specified prefix.
698        :param list[str] include:
699            Specifies one or more additional datasets to include in the response.
700            Options include: 'snapshots', 'metadata', 'uncommittedblobs', 'copy', 'deleted'.
701        :param str delimiter:
702            When the request includes this parameter, the operation returns a BlobPrefix
703            element in the response body that acts as a placeholder for all blobs whose
704            names begin with the same substring up to the appearance of the delimiter
705            character. The delimiter may be a single character or a string.
706        :keyword int timeout:
707            The timeout parameter is expressed in seconds.
708        :returns: An iterable (auto-paging) response of BlobProperties.
709        :rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.BlobProperties]
710        """
711        if include and not isinstance(include, list):
712            include = [include]
713
714        results_per_page = kwargs.pop('results_per_page', None)
715        timeout = kwargs.pop('timeout', None)
716        command = functools.partial(
717            self._client.container.list_blob_hierarchy_segment,
718            delimiter=delimiter,
719            include=include,
720            timeout=timeout,
721            **kwargs)
722        return BlobPrefix(
723            command,
724            prefix=name_starts_with,
725            results_per_page=results_per_page,
726            delimiter=delimiter)
727
728    @distributed_trace
729    def upload_blob(
730            self, name,  # type: Union[str, BlobProperties]
731            data,  # type: Union[Iterable[AnyStr], IO[AnyStr]]
732            blob_type=BlobType.BlockBlob,  # type: Union[str, BlobType]
733            length=None,  # type: Optional[int]
734            metadata=None,  # type: Optional[Dict[str, str]]
735            **kwargs
736        ):
737        # type: (...) -> BlobClient
738        """Creates a new blob from a data source with automatic chunking.
739
740        :param name: The blob with which to interact. If specified, this value will override
741            a blob value specified in the blob URL.
742        :type name: str or ~azure.storage.blob.BlobProperties
743        :param data: The blob data to upload.
744        :param ~azure.storage.blob.BlobType blob_type: The type of the blob. This can be
745            either BlockBlob, PageBlob or AppendBlob. The default value is BlockBlob.
746        :param int length:
747            Number of bytes to read from the stream. This is optional, but
748            should be supplied for optimal performance.
749        :param metadata:
750            Name-value pairs associated with the blob as metadata.
751        :type metadata: dict(str, str)
752        :keyword bool overwrite: Whether the blob to be uploaded should overwrite the current data.
753            If True, upload_blob will overwrite the existing data. If set to False, the
754            operation will fail with ResourceExistsError. The exception to the above is with Append
755            blob types: if set to False and the data already exists, an error will not be raised
756            and the data will be appended to the existing blob. If set overwrite=True, then the existing
757            append blob will be deleted, and a new one created. Defaults to False.
758        :keyword ~azure.storage.blob.ContentSettings content_settings:
759            ContentSettings object used to set blob properties. Used to set content type, encoding,
760            language, disposition, md5, and cache control.
761        :keyword bool validate_content:
762            If true, calculates an MD5 hash for each chunk of the blob. The storage
763            service checks the hash of the content that has arrived with the hash
764            that was sent. This is primarily valuable for detecting bitflips on
765            the wire if using http instead of https, as https (the default), will
766            already validate. Note that this MD5 hash is not stored with the
767            blob. Also note that if enabled, the memory-efficient upload algorithm
768            will not be used, because computing the MD5 hash requires buffering
769            entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
770        :keyword lease:
771            Required if the container has an active lease. Value can be a BlobLeaseClient object
772            or the lease ID as a string.
773        :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
774        :keyword ~datetime.datetime if_modified_since:
775            A DateTime value. Azure expects the date value passed in to be UTC.
776            If timezone is included, any non-UTC datetimes will be converted to UTC.
777            If a date is passed in without timezone info, it is assumed to be UTC.
778            Specify this header to perform the operation only
779            if the resource has been modified since the specified time.
780        :keyword ~datetime.datetime if_unmodified_since:
781            A DateTime value. Azure expects the date value passed in to be UTC.
782            If timezone is included, any non-UTC datetimes will be converted to UTC.
783            If a date is passed in without timezone info, it is assumed to be UTC.
784            Specify this header to perform the operation only if
785            the resource has not been modified since the specified date/time.
786        :keyword str etag:
787            An ETag value, or the wildcard character (*). Used to check if the resource has changed,
788            and act according to the condition specified by the `match_condition` parameter.
789        :keyword ~azure.core.MatchConditions match_condition:
790            The match condition to use upon the etag.
791        :keyword str if_tags_match_condition:
792            Specify a SQL where clause on blob tags to operate only on blob with a matching value.
793            eg. ``\"\\\"tagname\\\"='my tag'\"``
794
795            .. versionadded:: 12.4.0
796
797        :keyword int timeout:
798            The timeout parameter is expressed in seconds. This method may make
799            multiple calls to the Azure service and the timeout will apply to
800            each call individually.
801        :keyword ~azure.storage.blob.PremiumPageBlobTier premium_page_blob_tier:
802            A page blob tier value to set the blob to. The tier correlates to the size of the
803            blob and number of allowed IOPS. This is only applicable to page blobs on
804            premium storage accounts.
805        :keyword ~azure.storage.blob.StandardBlobTier standard_blob_tier:
806            A standard blob tier value to set the blob to. For this version of the library,
807            this is only applicable to block blobs on standard storage accounts.
808        :keyword int maxsize_condition:
809            Optional conditional header. The max length in bytes permitted for
810            the append blob. If the Append Block operation would cause the blob
811            to exceed that limit or if the blob size is already greater than the
812            value specified in this header, the request will fail with
813            MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed).
814        :keyword int max_concurrency:
815            Maximum number of parallel connections to use when the blob size exceeds
816            64MB.
817        :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk:
818            Encrypts the data on the service-side with the given key.
819            Use of customer-provided keys must be done over HTTPS.
820            As the encryption key itself is provided in the request,
821            a secure connection must be established to transfer the key.
822        :keyword str encryption_scope:
823            A predefined encryption scope used to encrypt the data on the service. An encryption
824            scope can be created using the Management API and referenced here by name. If a default
825            encryption scope has been defined at the container, this value will override it if the
826            container-level scope is configured to allow overrides. Otherwise an error will be raised.
827
828            .. versionadded:: 12.2.0
829
830        :keyword str encoding:
831            Defaults to UTF-8.
832        :returns: A BlobClient to interact with the newly uploaded blob.
833        :rtype: ~azure.storage.blob.BlobClient
834
835        .. admonition:: Example:
836
837            .. literalinclude:: ../samples/blob_samples_containers.py
838                :start-after: [START upload_blob_to_container]
839                :end-before: [END upload_blob_to_container]
840                :language: python
841                :dedent: 8
842                :caption: Upload blob to the container.
843        """
844        blob = self.get_blob_client(name)
845        kwargs.setdefault('merge_span', True)
846        timeout = kwargs.pop('timeout', None)
847        encoding = kwargs.pop('encoding', 'UTF-8')
848        blob.upload_blob(
849            data,
850            blob_type=blob_type,
851            length=length,
852            metadata=metadata,
853            timeout=timeout,
854            encoding=encoding,
855            **kwargs
856        )
857        return blob
858
859    @distributed_trace
860    def delete_blob(
861            self, blob,  # type: Union[str, BlobProperties]
862            delete_snapshots=None,  # type: Optional[str]
863            **kwargs
864        ):
865        # type: (...) -> None
866        """Marks the specified blob or snapshot for deletion.
867
868        The blob is later deleted during garbage collection.
869        Note that in order to delete a blob, you must delete all of its
870        snapshots. You can delete both at the same time with the delete_blob
871        operation.
872
873        If a delete retention policy is enabled for the service, then this operation soft deletes the blob or snapshot
874        and retains the blob or snapshot for specified number of days.
875        After specified number of days, blob's data is removed from the service during garbage collection.
876        Soft deleted blob or snapshot is accessible through :func:`list_blobs()` specifying `include=["deleted"]`
877        option. Soft-deleted blob or snapshot can be restored using :func:`~BlobClient.undelete()`
878
879        :param blob: The blob with which to interact. If specified, this value will override
880            a blob value specified in the blob URL.
881        :type blob: str or ~azure.storage.blob.BlobProperties
882        :param str delete_snapshots:
883            Required if the blob has associated snapshots. Values include:
884             - "only": Deletes only the blobs snapshots.
885             - "include": Deletes the blob along with all snapshots.
886        :keyword str version_id:
887            The version id parameter is an opaque DateTime
888            value that, when present, specifies the version of the blob to delete.
889
890            .. versionadded:: 12.4.0
891            This keyword argument was introduced in API version '2019-12-12'.
892
893        :keyword lease:
894            Required if the blob has an active lease. Value can be a BlobLeaseClient object
895            or the lease ID as a string.
896        :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
897        :keyword ~datetime.datetime if_modified_since:
898            A DateTime value. Azure expects the date value passed in to be UTC.
899            If timezone is included, any non-UTC datetimes will be converted to UTC.
900            If a date is passed in without timezone info, it is assumed to be UTC.
901            Specify this header to perform the operation only
902            if the resource has been modified since the specified time.
903        :keyword ~datetime.datetime if_unmodified_since:
904            A DateTime value. Azure expects the date value passed in to be UTC.
905            If timezone is included, any non-UTC datetimes will be converted to UTC.
906            If a date is passed in without timezone info, it is assumed to be UTC.
907            Specify this header to perform the operation only if
908            the resource has not been modified since the specified date/time.
909        :keyword str etag:
910            An ETag value, or the wildcard character (*). Used to check if the resource has changed,
911            and act according to the condition specified by the `match_condition` parameter.
912        :keyword ~azure.core.MatchConditions match_condition:
913            The match condition to use upon the etag.
914        :keyword str if_tags_match_condition:
915            Specify a SQL where clause on blob tags to operate only on blob with a matching value.
916            eg. ``\"\\\"tagname\\\"='my tag'\"``
917
918            .. versionadded:: 12.4.0
919
920        :keyword int timeout:
921            The timeout parameter is expressed in seconds.
922        :rtype: None
923        """
924        blob_client = self.get_blob_client(blob) # type: ignore
925        kwargs.setdefault('merge_span', True)
926        timeout = kwargs.pop('timeout', None)
927        blob_client.delete_blob( # type: ignore
928            delete_snapshots=delete_snapshots,
929            timeout=timeout,
930            **kwargs)
931
932    @distributed_trace
933    def download_blob(self, blob, offset=None, length=None, **kwargs):
934        # type: (Union[str, BlobProperties], Optional[int], Optional[int], **Any) -> StorageStreamDownloader
935        """Downloads a blob to the StorageStreamDownloader. The readall() method must
936        be used to read all the content or readinto() must be used to download the blob into
937        a stream.
938
939        :param blob: The blob with which to interact. If specified, this value will override
940            a blob value specified in the blob URL.
941        :type blob: str or ~azure.storage.blob.BlobProperties
942        :param int offset:
943            Start of byte range to use for downloading a section of the blob.
944            Must be set if length is provided.
945        :param int length:
946            Number of bytes to read from the stream. This is optional, but
947            should be supplied for optimal performance.
948        :keyword bool validate_content:
949            If true, calculates an MD5 hash for each chunk of the blob. The storage
950            service checks the hash of the content that has arrived with the hash
951            that was sent. This is primarily valuable for detecting bitflips on
952            the wire if using http instead of https, as https (the default), will
953            already validate. Note that this MD5 hash is not stored with the
954            blob. Also note that if enabled, the memory-efficient upload algorithm
955            will not be used because computing the MD5 hash requires buffering
956            entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
957        :keyword lease:
958            Required if the blob has an active lease. If specified, download_blob only
959            succeeds if the blob's lease is active and matches this ID. Value can be a
960            BlobLeaseClient object or the lease ID as a string.
961        :paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
962        :keyword ~datetime.datetime if_modified_since:
963            A DateTime value. Azure expects the date value passed in to be UTC.
964            If timezone is included, any non-UTC datetimes will be converted to UTC.
965            If a date is passed in without timezone info, it is assumed to be UTC.
966            Specify this header to perform the operation only
967            if the resource has been modified since the specified time.
968        :keyword ~datetime.datetime if_unmodified_since:
969            A DateTime value. Azure expects the date value passed in to be UTC.
970            If timezone is included, any non-UTC datetimes will be converted to UTC.
971            If a date is passed in without timezone info, it is assumed to be UTC.
972            Specify this header to perform the operation only if
973            the resource has not been modified since the specified date/time.
974        :keyword str etag:
975            An ETag value, or the wildcard character (*). Used to check if the resource has changed,
976            and act according to the condition specified by the `match_condition` parameter.
977        :keyword ~azure.core.MatchConditions match_condition:
978            The match condition to use upon the etag.
979        :keyword str if_tags_match_condition:
980            Specify a SQL where clause on blob tags to operate only on blob with a matching value.
981            eg. ``\"\\\"tagname\\\"='my tag'\"``
982
983            .. versionadded:: 12.4.0
984
985        :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk:
986            Encrypts the data on the service-side with the given key.
987            Use of customer-provided keys must be done over HTTPS.
988            As the encryption key itself is provided in the request,
989            a secure connection must be established to transfer the key.
990        :keyword int max_concurrency:
991            The number of parallel connections with which to download.
992        :keyword str encoding:
993            Encoding to decode the downloaded bytes. Default is None, i.e. no decoding.
994        :keyword int timeout:
995            The timeout parameter is expressed in seconds. This method may make
996            multiple calls to the Azure service and the timeout will apply to
997            each call individually.
998        :returns: A streaming object (StorageStreamDownloader)
999        :rtype: ~azure.storage.blob.StorageStreamDownloader
1000        """
1001        blob_client = self.get_blob_client(blob) # type: ignore
1002        kwargs.setdefault('merge_span', True)
1003        return blob_client.download_blob(offset=offset, length=length, **kwargs)
1004
1005    def _generate_delete_blobs_subrequest_options(
1006        self, snapshot=None,
1007        delete_snapshots=None,
1008        lease_access_conditions=None,
1009        modified_access_conditions=None,
1010        **kwargs
1011    ):
1012        """This code is a copy from _generated.
1013
1014        Once Autorest is able to provide request preparation this code should be removed.
1015        """
1016        lease_id = None
1017        if lease_access_conditions is not None:
1018            lease_id = lease_access_conditions.lease_id
1019        if_modified_since = None
1020        if modified_access_conditions is not None:
1021            if_modified_since = modified_access_conditions.if_modified_since
1022        if_unmodified_since = None
1023        if modified_access_conditions is not None:
1024            if_unmodified_since = modified_access_conditions.if_unmodified_since
1025        if_match = None
1026        if modified_access_conditions is not None:
1027            if_match = modified_access_conditions.if_match
1028        if_none_match = None
1029        if modified_access_conditions is not None:
1030            if_none_match = modified_access_conditions.if_none_match
1031        if_tags = None
1032        if modified_access_conditions is not None:
1033            if_tags = modified_access_conditions.if_tags
1034
1035        # Construct parameters
1036        timeout = kwargs.pop('timeout', None)
1037        query_parameters = {}
1038        if snapshot is not None:
1039            query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str')  # pylint: disable=protected-access
1040        if timeout is not None:
1041            query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0)  # pylint: disable=protected-access
1042
1043        # Construct headers
1044        header_parameters = {}
1045        if delete_snapshots is not None:
1046            header_parameters['x-ms-delete-snapshots'] = self._client._serialize.header(  # pylint: disable=protected-access
1047                "delete_snapshots", delete_snapshots, 'DeleteSnapshotsOptionType')
1048        if lease_id is not None:
1049            header_parameters['x-ms-lease-id'] = self._client._serialize.header(  # pylint: disable=protected-access
1050                "lease_id", lease_id, 'str')
1051        if if_modified_since is not None:
1052            header_parameters['If-Modified-Since'] = self._client._serialize.header(  # pylint: disable=protected-access
1053                "if_modified_since", if_modified_since, 'rfc-1123')
1054        if if_unmodified_since is not None:
1055            header_parameters['If-Unmodified-Since'] = self._client._serialize.header(  # pylint: disable=protected-access
1056                "if_unmodified_since", if_unmodified_since, 'rfc-1123')
1057        if if_match is not None:
1058            header_parameters['If-Match'] = self._client._serialize.header(  # pylint: disable=protected-access
1059                "if_match", if_match, 'str')
1060        if if_none_match is not None:
1061            header_parameters['If-None-Match'] = self._client._serialize.header(  # pylint: disable=protected-access
1062                "if_none_match", if_none_match, 'str')
1063        if if_tags is not None:
1064            header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str')  # pylint: disable=protected-access
1065
1066        return query_parameters, header_parameters
1067
1068    def _generate_delete_blobs_options(self,
1069                                       *blobs,  # type: List[Union[str, BlobProperties, dict]]
1070                                       **kwargs
1071                                       ):
1072        timeout = kwargs.pop('timeout', None)
1073        raise_on_any_failure = kwargs.pop('raise_on_any_failure', True)
1074        delete_snapshots = kwargs.pop('delete_snapshots', None)
1075        if_modified_since = kwargs.pop('if_modified_since', None)
1076        if_unmodified_since = kwargs.pop('if_unmodified_since', None)
1077        if_tags_match_condition = kwargs.pop('if_tags_match_condition', None)
1078        kwargs.update({'raise_on_any_failure': raise_on_any_failure,
1079                       'sas': self._query_str.replace('?', '&'),
1080                       'timeout': '&timeout=' + str(timeout) if timeout else ""
1081                       })
1082
1083        reqs = []
1084        for blob in blobs:
1085            blob_name = _get_blob_name(blob)
1086            container_name = self.container_name
1087
1088            try:
1089                options = BlobClient._generic_delete_blob_options(  # pylint: disable=protected-access
1090                    snapshot=blob.get('snapshot'),
1091                    delete_snapshots=delete_snapshots or blob.get('delete_snapshots'),
1092                    lease=blob.get('lease_id'),
1093                    if_modified_since=if_modified_since or blob.get('if_modified_since'),
1094                    if_unmodified_since=if_unmodified_since or blob.get('if_unmodified_since'),
1095                    etag=blob.get('etag'),
1096                    if_tags_match_condition=if_tags_match_condition or blob.get('if_tags_match_condition'),
1097                    match_condition=blob.get('match_condition') or MatchConditions.IfNotModified if blob.get('etag')
1098                    else None,
1099                    timeout=blob.get('timeout'),
1100                )
1101            except AttributeError:
1102                options = BlobClient._generic_delete_blob_options(  # pylint: disable=protected-access
1103                    delete_snapshots=delete_snapshots,
1104                    if_modified_since=if_modified_since,
1105                    if_unmodified_since=if_unmodified_since,
1106                    if_tags_match_condition=if_tags_match_condition
1107                )
1108
1109            query_parameters, header_parameters = self._generate_delete_blobs_subrequest_options(**options)
1110
1111            req = HttpRequest(
1112                "DELETE",
1113                "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str),
1114                headers=header_parameters
1115            )
1116            req.format_parameters(query_parameters)
1117            reqs.append(req)
1118
1119        return reqs, kwargs
1120
1121    @distributed_trace
1122    def delete_blobs(self, *blobs, **kwargs):
1123        # type: (...) -> Iterator[HttpResponse]
1124        """Marks the specified blobs or snapshots for deletion.
1125
1126        The blobs are later deleted during garbage collection.
1127        Note that in order to delete blobs, you must delete all of their
1128        snapshots. You can delete both at the same time with the delete_blobs operation.
1129
1130        If a delete retention policy is enabled for the service, then this operation soft deletes the blobs or snapshots
1131        and retains the blobs or snapshots for specified number of days.
1132        After specified number of days, blobs' data is removed from the service during garbage collection.
1133        Soft deleted blobs or snapshots are accessible through :func:`list_blobs()` specifying `include=["deleted"]`
1134        Soft-deleted blobs or snapshots can be restored using :func:`~BlobClient.undelete()`
1135
1136        :param blobs:
1137            The blobs to delete. This can be a single blob, or multiple values can
1138            be supplied, where each value is either the name of the blob (str) or BlobProperties.
1139
1140            .. note::
1141                When the blob type is dict, here's a list of keys, value rules.
1142
1143                blob name:
1144                    key: 'name', value type: str
1145                snapshot you want to delete:
1146                    key: 'snapshot', value type: str
1147                whether to delete snapthots when deleting blob:
1148                    key: 'delete_snapshots', value: 'include' or 'only'
1149                if the blob modified or not:
1150                    key: 'if_modified_since', 'if_unmodified_since', value type: datetime
1151                etag:
1152                    key: 'etag', value type: str
1153                match the etag or not:
1154                    key: 'match_condition', value type: MatchConditions
1155                tags match condition:
1156                    key: 'if_tags_match_condition', value type: str
1157                lease:
1158                    key: 'lease_id', value type: Union[str, LeaseClient]
1159                timeout for subrequest:
1160                    key: 'timeout', value type: int
1161
1162        :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties]
1163        :keyword str delete_snapshots:
1164            Required if a blob has associated snapshots. Values include:
1165             - "only": Deletes only the blobs snapshots.
1166             - "include": Deletes the blob along with all snapshots.
1167        :keyword ~datetime.datetime if_modified_since:
1168            A DateTime value. Azure expects the date value passed in to be UTC.
1169            If timezone is included, any non-UTC datetimes will be converted to UTC.
1170            If a date is passed in without timezone info, it is assumed to be UTC.
1171            Specify this header to perform the operation only
1172            if the resource has been modified since the specified time.
1173        :keyword ~datetime.datetime if_unmodified_since:
1174            A DateTime value. Azure expects the date value passed in to be UTC.
1175            If timezone is included, any non-UTC datetimes will be converted to UTC.
1176            If a date is passed in without timezone info, it is assumed to be UTC.
1177            Specify this header to perform the operation only if
1178            the resource has not been modified since the specified date/time.
1179        :keyword str if_tags_match_condition:
1180            Specify a SQL where clause on blob tags to operate only on blob with a matching value.
1181            eg. ``\"\\\"tagname\\\"='my tag'\"``
1182
1183            .. versionadded:: 12.4.0
1184
1185        :keyword bool raise_on_any_failure:
1186            This is a boolean param which defaults to True. When this is set, an exception
1187            is raised even if there is a single operation failure.
1188        :keyword int timeout:
1189            The timeout parameter is expressed in seconds.
1190        :return: An iterator of responses, one for each blob in order
1191        :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse]
1192
1193        .. admonition:: Example:
1194
1195            .. literalinclude:: ../samples/blob_samples_common.py
1196                :start-after: [START delete_multiple_blobs]
1197                :end-before: [END delete_multiple_blobs]
1198                :language: python
1199                :dedent: 8
1200                :caption: Deleting multiple blobs.
1201        """
1202        if len(blobs) == 0:
1203            return iter(list())
1204
1205        reqs, options = self._generate_delete_blobs_options(*blobs, **kwargs)
1206
1207        return self._batch_send(*reqs, **options)
1208
1209    def _generate_set_tiers_subrequest_options(
1210        self, tier, snapshot=None, version_id=None, rehydrate_priority=None, lease_access_conditions=None, **kwargs
1211    ):
1212        """This code is a copy from _generated.
1213
1214        Once Autorest is able to provide request preparation this code should be removed.
1215        """
1216        if not tier:
1217            raise ValueError("A blob tier must be specified")
1218        if snapshot and version_id:
1219            raise ValueError("Snapshot and version_id cannot be set at the same time")
1220        if_tags = kwargs.pop('if_tags', None)
1221
1222        lease_id = None
1223        if lease_access_conditions is not None:
1224            lease_id = lease_access_conditions.lease_id
1225
1226        comp = "tier"
1227        timeout = kwargs.pop('timeout', None)
1228        # Construct parameters
1229        query_parameters = {}
1230        if snapshot is not None:
1231            query_parameters['snapshot'] = self._client._serialize.query("snapshot", snapshot, 'str')  # pylint: disable=protected-access
1232        if version_id is not None:
1233            query_parameters['versionid'] = self._client._serialize.query("version_id", version_id, 'str')  # pylint: disable=protected-access
1234        if timeout is not None:
1235            query_parameters['timeout'] = self._client._serialize.query("timeout", timeout, 'int', minimum=0)  # pylint: disable=protected-access
1236        query_parameters['comp'] = self._client._serialize.query("comp", comp, 'str')  # pylint: disable=protected-access, specify-parameter-names-in-call
1237
1238        # Construct headers
1239        header_parameters = {}
1240        header_parameters['x-ms-access-tier'] = self._client._serialize.header("tier", tier, 'str')  # pylint: disable=protected-access, specify-parameter-names-in-call
1241        if rehydrate_priority is not None:
1242            header_parameters['x-ms-rehydrate-priority'] = self._client._serialize.header(  # pylint: disable=protected-access
1243                "rehydrate_priority", rehydrate_priority, 'str')
1244        if lease_id is not None:
1245            header_parameters['x-ms-lease-id'] = self._client._serialize.header("lease_id", lease_id, 'str')  # pylint: disable=protected-access
1246        if if_tags is not None:
1247            header_parameters['x-ms-if-tags'] = self._client._serialize.header("if_tags", if_tags, 'str')  # pylint: disable=protected-access
1248
1249        return query_parameters, header_parameters
1250
1251    def _generate_set_tiers_options(self,
1252                                    blob_tier,  # type: Optional[Union[str, StandardBlobTier, PremiumPageBlobTier]]
1253                                    *blobs,  # type: List[Union[str, BlobProperties, dict]]
1254                                    **kwargs
1255                                    ):
1256        timeout = kwargs.pop('timeout', None)
1257        raise_on_any_failure = kwargs.pop('raise_on_any_failure', True)
1258        rehydrate_priority = kwargs.pop('rehydrate_priority', None)
1259        if_tags = kwargs.pop('if_tags_match_condition', None)
1260        kwargs.update({'raise_on_any_failure': raise_on_any_failure,
1261                       'sas': self._query_str.replace('?', '&'),
1262                       'timeout': '&timeout=' + str(timeout) if timeout else ""
1263                       })
1264
1265        reqs = []
1266        for blob in blobs:
1267            blob_name = _get_blob_name(blob)
1268            container_name = self.container_name
1269
1270            try:
1271                tier = blob_tier or blob.get('blob_tier')
1272                query_parameters, header_parameters = self._generate_set_tiers_subrequest_options(
1273                    tier=tier,
1274                    snapshot=blob.get('snapshot'),
1275                    version_id=blob.get('version_id'),
1276                    rehydrate_priority=rehydrate_priority or blob.get('rehydrate_priority'),
1277                    lease_access_conditions=blob.get('lease_id'),
1278                    if_tags=if_tags or blob.get('if_tags_match_condition'),
1279                    timeout=timeout or blob.get('timeout')
1280                )
1281            except AttributeError:
1282                query_parameters, header_parameters = self._generate_set_tiers_subrequest_options(
1283                    blob_tier, rehydrate_priority=rehydrate_priority, if_tags=if_tags)
1284
1285            req = HttpRequest(
1286                "PUT",
1287                "/{}/{}{}".format(quote(container_name), quote(blob_name, safe='/~'), self._query_str),
1288                headers=header_parameters
1289            )
1290            req.format_parameters(query_parameters)
1291            reqs.append(req)
1292
1293        return reqs, kwargs
1294
1295    @distributed_trace
1296    def set_standard_blob_tier_blobs(
1297        self,
1298        standard_blob_tier,  # type: Optional[Union[str, StandardBlobTier]]
1299        *blobs,  # type: List[Union[str, BlobProperties, dict]]
1300        **kwargs
1301    ):
1302        # type: (...) -> Iterator[HttpResponse]
1303        """This operation sets the tier on block blobs.
1304
1305        A block blob's tier determines Hot/Cool/Archive storage type.
1306        This operation does not update the blob's ETag.
1307
1308        :param standard_blob_tier:
1309            Indicates the tier to be set on all blobs. Options include 'Hot', 'Cool',
1310            'Archive'. The hot tier is optimized for storing data that is accessed
1311            frequently. The cool storage tier is optimized for storing data that
1312            is infrequently accessed and stored for at least a month. The archive
1313            tier is optimized for storing data that is rarely accessed and stored
1314            for at least six months with flexible latency requirements.
1315
1316            .. note::
1317                If you want to set different tier on different blobs please set this positional parameter to None.
1318                Then the blob tier on every BlobProperties will be taken.
1319
1320        :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier
1321        :param blobs:
1322            The blobs with which to interact. This can be a single blob, or multiple values can
1323            be supplied, where each value is either the name of the blob (str) or BlobProperties.
1324
1325            .. note::
1326                When the blob type is dict, here's a list of keys, value rules.
1327
1328                blob name:
1329                    key: 'name', value type: str
1330                standard blob tier:
1331                    key: 'blob_tier', value type: StandardBlobTier
1332                rehydrate priority:
1333                    key: 'rehydrate_priority', value type: RehydratePriority
1334                lease:
1335                    key: 'lease_id', value type: Union[str, LeaseClient]
1336                snapshot:
1337                    key: "snapshost", value type: str
1338                version id:
1339                    key: "version_id", value type: str
1340                tags match condition:
1341                    key: 'if_tags_match_condition', value type: str
1342                timeout for subrequest:
1343                    key: 'timeout', value type: int
1344
1345        :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties]
1346        :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority:
1347            Indicates the priority with which to rehydrate an archived blob
1348        :keyword str if_tags_match_condition:
1349            Specify a SQL where clause on blob tags to operate only on blob with a matching value.
1350            eg. ``\"\\\"tagname\\\"='my tag'\"``
1351
1352            .. versionadded:: 12.4.0
1353
1354        :keyword int timeout:
1355            The timeout parameter is expressed in seconds.
1356        :keyword bool raise_on_any_failure:
1357            This is a boolean param which defaults to True. When this is set, an exception
1358            is raised even if there is a single operation failure.
1359        :return: An iterator of responses, one for each blob in order
1360        :rtype: Iterator[~azure.core.pipeline.transport.HttpResponse]
1361        """
1362        reqs, options = self._generate_set_tiers_options(standard_blob_tier, *blobs, **kwargs)
1363
1364        return self._batch_send(*reqs, **options)
1365
1366    @distributed_trace
1367    def set_premium_page_blob_tier_blobs(
1368        self,
1369        premium_page_blob_tier,  # type: Optional[Union[str, PremiumPageBlobTier]]
1370        *blobs,  # type: List[Union[str, BlobProperties, dict]]
1371        **kwargs
1372    ):
1373        # type: (...) -> Iterator[HttpResponse]
1374        """Sets the page blob tiers on all blobs. This API is only supported for page blobs on premium accounts.
1375
1376        :param premium_page_blob_tier:
1377            A page blob tier value to set the blob to. The tier correlates to the size of the
1378            blob and number of allowed IOPS. This is only applicable to page blobs on
1379            premium storage accounts.
1380
1381            .. note::
1382                If you want to set different tier on different blobs please set this positional parameter to None.
1383                Then the blob tier on every BlobProperties will be taken.
1384
1385        :type premium_page_blob_tier: ~azure.storage.blob.PremiumPageBlobTier
1386        :param blobs:
1387            The blobs with which to interact. This can be a single blob, or multiple values can
1388            be supplied, where each value is either the name of the blob (str) or BlobProperties.
1389
1390            .. note::
1391                When the blob type is dict, here's a list of keys, value rules.
1392
1393                blob name:
1394                    key: 'name', value type: str
1395                premium blob tier:
1396                    key: 'blob_tier', value type: PremiumPageBlobTier
1397                lease:
1398                    key: 'lease_id', value type: Union[str, LeaseClient]
1399                timeout for subrequest:
1400                    key: 'timeout', value type: int
1401
1402        :type blobs: list[str], list[dict], or list[~azure.storage.blob.BlobProperties]
1403        :keyword int timeout:
1404            The timeout parameter is expressed in seconds. This method may make
1405            multiple calls to the Azure service and the timeout will apply to
1406            each call individually.
1407        :keyword bool raise_on_any_failure:
1408            This is a boolean param which defaults to True. When this is set, an exception
1409            is raised even if there is a single operation failure.
1410        :return: An iterator of responses, one for each blob in order
1411        :rtype: iterator[~azure.core.pipeline.transport.HttpResponse]
1412        """
1413        reqs, options = self._generate_set_tiers_options(premium_page_blob_tier, *blobs, **kwargs)
1414
1415        return self._batch_send(*reqs, **options)
1416
1417    def get_blob_client(
1418            self, blob,  # type: Union[str, BlobProperties]
1419            snapshot=None  # type: str
1420        ):
1421        # type: (...) -> BlobClient
1422        """Get a client to interact with the specified blob.
1423
1424        The blob need not already exist.
1425
1426        :param blob:
1427            The blob with which to interact.
1428        :type blob: str or ~azure.storage.blob.BlobProperties
1429        :param str snapshot:
1430            The optional blob snapshot on which to operate. This can be the snapshot ID string
1431            or the response returned from :func:`~BlobClient.create_snapshot()`.
1432        :returns: A BlobClient.
1433        :rtype: ~azure.storage.blob.BlobClient
1434
1435        .. admonition:: Example:
1436
1437            .. literalinclude:: ../samples/blob_samples_containers.py
1438                :start-after: [START get_blob_client]
1439                :end-before: [END get_blob_client]
1440                :language: python
1441                :dedent: 8
1442                :caption: Get the blob client.
1443        """
1444        blob_name = _get_blob_name(blob)
1445        _pipeline = Pipeline(
1446            transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access
1447            policies=self._pipeline._impl_policies # pylint: disable = protected-access
1448        )
1449        return BlobClient(
1450            self.url, container_name=self.container_name, blob_name=blob_name, snapshot=snapshot,
1451            credential=self.credential, api_version=self.api_version, _configuration=self._config,
1452            _pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts,
1453            require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key,
1454            key_resolver_function=self.key_resolver_function)
1455