1# ------------------------------------------------------------------------- 2# Copyright (c) Microsoft Corporation. All rights reserved. 3# Licensed under the MIT License. See License.txt in the project root for 4# license information. 5# -------------------------------------------------------------------------- 6 7import functools 8import time 9from io import BytesIO 10from typing import Optional, Union, IO, List, Dict, Any, Iterable, TYPE_CHECKING # pylint: disable=unused-import 11 12import six 13from azure.core.async_paging import AsyncItemPaged 14 15from azure.core.tracing.decorator import distributed_trace 16from azure.core.tracing.decorator_async import distributed_trace_async 17from .._parser import _datetime_to_str, _get_file_permission 18from .._shared.parser import _str 19 20from .._generated.aio import AzureFileStorage 21from .._generated.version import VERSION 22from .._generated.models import StorageErrorException, FileHTTPHeaders 23from .._shared.policies_async import ExponentialRetry 24from .._shared.uploads_async import upload_data_chunks, FileChunkUploader, IterStreamer 25from .._shared.base_client_async import AsyncStorageAccountHostsMixin 26from .._shared.request_handlers import add_metadata_headers, get_length 27from .._shared.response_handlers import return_response_headers, process_storage_error 28from .._deserialize import deserialize_file_properties, deserialize_file_stream 29from .._file_client import ShareFileClient as ShareFileClientBase 30from ._models import HandlesPaged 31from ._download_async import StorageStreamDownloader 32 33if TYPE_CHECKING: 34 from datetime import datetime 35 from .._models import ShareProperties, ContentSettings, FileProperties, NTFSAttributes 36 from .._generated.models import HandleItem 37 38 39async def _upload_file_helper( 40 client, 41 stream, 42 size, 43 metadata, 44 content_settings, 45 validate_content, 46 timeout, 47 max_concurrency, 48 file_settings, 49 file_attributes="none", 50 file_creation_time="now", 51 file_last_write_time="now", 52 file_permission=None, 53 file_permission_key=None, 54 **kwargs 55): 56 try: 57 if size is None or size < 0: 58 raise ValueError("A content size must be specified for a File.") 59 response = await client.create_file( 60 size, content_settings=content_settings, metadata=metadata, 61 file_attributes=file_attributes, 62 file_creation_time=file_creation_time, 63 file_last_write_time=file_last_write_time, 64 file_permission=file_permission, 65 permission_key=file_permission_key, 66 timeout=timeout, 67 **kwargs 68 ) 69 if size == 0: 70 return response 71 72 responses = await upload_data_chunks( 73 service=client, 74 uploader_class=FileChunkUploader, 75 total_size=size, 76 chunk_size=file_settings.max_range_size, 77 stream=stream, 78 max_concurrency=max_concurrency, 79 validate_content=validate_content, 80 timeout=timeout, 81 **kwargs 82 ) 83 return sorted(responses, key=lambda r: r.get('last_modified'))[-1] 84 except StorageErrorException as error: 85 process_storage_error(error) 86 87 88class ShareFileClient(AsyncStorageAccountHostsMixin, ShareFileClientBase): 89 """A client to interact with a specific file, although that file may not yet exist. 90 91 :param str account_url: 92 The URI to the storage account. In order to create a client given the full URI to the 93 file, use the :func:`from_file_url` classmethod. 94 :param share_name: 95 The name of the share for the file. 96 :type share_name: str 97 :param str file_path: 98 The file path to the file with which to interact. If specified, this value will override 99 a file value specified in the file URL. 100 :param str snapshot: 101 An optional file snapshot on which to operate. This can be the snapshot ID string 102 or the response returned from :func:`ShareClient.create_snapshot`. 103 :param credential: 104 The credential with which to authenticate. This is optional if the 105 account URL already has a SAS token. The value can be a SAS token string or an account 106 shared access key. 107 :keyword str secondary_hostname: 108 The hostname of the secondary endpoint. 109 :keyword loop: 110 The event loop to run the asynchronous tasks. 111 :keyword int max_range_size: The maximum range size used for a file upload. Defaults to 4*1024*1024. 112 """ 113 114 def __init__( # type: ignore 115 self, 116 account_url, # type: str 117 share_name, # type: str 118 file_path, # type: str 119 snapshot=None, # type: Optional[Union[str, Dict[str, Any]]] 120 credential=None, # type: Optional[Any] 121 **kwargs # type: Any 122 ): 123 # type: (...) -> None 124 kwargs["retry_policy"] = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) 125 loop = kwargs.pop('loop', None) 126 super(ShareFileClient, self).__init__( 127 account_url, share_name=share_name, file_path=file_path, snapshot=snapshot, 128 credential=credential, loop=loop, **kwargs 129 ) 130 self._client = AzureFileStorage(version=VERSION, url=self.url, pipeline=self._pipeline, loop=loop) 131 self._loop = loop 132 133 @distributed_trace_async 134 async def create_file( # type: ignore 135 self, 136 size, # type: int 137 file_attributes="none", # type: Union[str, NTFSAttributes] 138 file_creation_time="now", # type: Union[str, datetime] 139 file_last_write_time="now", # type: Union[str, datetime] 140 file_permission=None, # type: Optional[str] 141 permission_key=None, # type: Optional[str] 142 **kwargs # type: Any 143 ): 144 # type: (...) -> Dict[str, Any] 145 """Creates a new file. 146 147 Note that it only initializes the file with no content. 148 149 :param int size: Specifies the maximum size for the file, 150 up to 1 TB. 151 :param file_attributes: 152 The file system attributes for files and directories. 153 If not set, the default value would be "None" and the attributes will be set to "Archive". 154 Here is an example for when the var type is str: 'Temporary|Archive'. 155 file_attributes value is not case sensitive. 156 :type file_attributes: str or :class:`~azure.storage.fileshare.NTFSAttributes` 157 :param file_creation_time: Creation time for the file 158 Default value: Now. 159 :type file_creation_time: str or ~datetime.datetime 160 :param file_last_write_time: Last write time for the file 161 Default value: Now. 162 :type file_last_write_time: str or ~datetime.datetime 163 :param file_permission: If specified the permission (security 164 descriptor) shall be set for the directory/file. This header can be 165 used if Permission size is <= 8KB, else x-ms-file-permission-key 166 header shall be used. Default value: Inherit. If SDDL is specified as 167 input, it must have owner, group and dacl. Note: Only one of the 168 x-ms-file-permission or x-ms-file-permission-key should be specified. 169 :type file_permission: str 170 :param permission_key: Key of the permission to be set for the 171 directory/file. Note: Only one of the x-ms-file-permission or 172 x-ms-file-permission-key should be specified. 173 :type permission_key: str 174 :keyword ~azure.storage.fileshare.ContentSettings content_settings: 175 ContentSettings object used to set file properties. Used to set content type, encoding, 176 language, disposition, md5, and cache control. 177 :keyword dict(str,str) metadata: 178 Name-value pairs associated with the file as metadata. 179 :keyword int timeout: 180 The timeout parameter is expressed in seconds. 181 :returns: File-updated property dict (Etag and last modified). 182 :rtype: dict(str, Any) 183 184 .. admonition:: Example: 185 186 .. literalinclude:: ../samples/file_samples_client_async.py 187 :start-after: [START create_file] 188 :end-before: [END create_file] 189 :language: python 190 :dedent: 16 191 :caption: Create a file. 192 """ 193 content_settings = kwargs.pop('content_settings', None) 194 metadata = kwargs.pop('metadata', None) 195 timeout = kwargs.pop('timeout', None) 196 if self.require_encryption and not self.key_encryption_key: 197 raise ValueError("Encryption required but no key was provided.") 198 199 headers = kwargs.pop("headers", {}) 200 headers.update(add_metadata_headers(metadata)) 201 file_http_headers = None 202 if content_settings: 203 file_http_headers = FileHTTPHeaders( 204 file_cache_control=content_settings.cache_control, 205 file_content_type=content_settings.content_type, 206 file_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, 207 file_content_encoding=content_settings.content_encoding, 208 file_content_language=content_settings.content_language, 209 file_content_disposition=content_settings.content_disposition, 210 ) 211 file_permission = _get_file_permission(file_permission, permission_key, 'Inherit') 212 try: 213 return await self._client.file.create( # type: ignore 214 file_content_length=size, 215 metadata=metadata, 216 file_attributes=_str(file_attributes), 217 file_creation_time=_datetime_to_str(file_creation_time), 218 file_last_write_time=_datetime_to_str(file_last_write_time), 219 file_permission=file_permission, 220 file_permission_key=permission_key, 221 file_http_headers=file_http_headers, 222 headers=headers, 223 timeout=timeout, 224 cls=return_response_headers, 225 **kwargs 226 ) 227 except StorageErrorException as error: 228 process_storage_error(error) 229 230 @distributed_trace_async 231 async def upload_file( 232 self, data, # type: Any 233 length=None, # type: Optional[int] 234 file_attributes="none", # type: Union[str, NTFSAttributes] 235 file_creation_time="now", # type: Union[str, datetime] 236 file_last_write_time="now", # type: Union[str, datetime] 237 file_permission=None, # type: Optional[str] 238 permission_key=None, # type: Optional[str] 239 **kwargs # type: Any 240 ): 241 # type: (...) -> Dict[str, Any] 242 """Uploads a new file. 243 244 :param Any data: 245 Content of the file. 246 :param int length: 247 Length of the file in bytes. Specify its maximum size, up to 1 TiB. 248 :param file_attributes: 249 The file system attributes for files and directories. 250 If not set, the default value would be "None" and the attributes will be set to "Archive". 251 Here is an example for when the var type is str: 'Temporary|Archive'. 252 file_attributes value is not case sensitive. 253 :type file_attributes: str or ~azure.storage.fileshare.NTFSAttributes 254 :param file_creation_time: Creation time for the file 255 Default value: Now. 256 :type file_creation_time: str or ~datetime.datetime 257 :param file_last_write_time: Last write time for the file 258 Default value: Now. 259 :type file_last_write_time: str or ~datetime.datetime 260 :param file_permission: If specified the permission (security 261 descriptor) shall be set for the directory/file. This header can be 262 used if Permission size is <= 8KB, else x-ms-file-permission-key 263 header shall be used. Default value: Inherit. If SDDL is specified as 264 input, it must have owner, group and dacl. Note: Only one of the 265 x-ms-file-permission or x-ms-file-permission-key should be specified. 266 :type file_permission: str 267 :param permission_key: Key of the permission to be set for the 268 directory/file. Note: Only one of the x-ms-file-permission or 269 x-ms-file-permission-key should be specified. 270 :type permission_key: str 271 :keyword dict(str,str) metadata: 272 Name-value pairs associated with the file as metadata. 273 :keyword ~azure.storage.fileshare.ContentSettings content_settings: 274 ContentSettings object used to set file properties. Used to set content type, encoding, 275 language, disposition, md5, and cache control. 276 :keyword bool validate_content: 277 If true, calculates an MD5 hash for each range of the file. The storage 278 service checks the hash of the content that has arrived with the hash 279 that was sent. This is primarily valuable for detecting bitflips on 280 the wire if using http instead of https as https (the default) will 281 already validate. Note that this MD5 hash is not stored with the 282 file. 283 :keyword int max_concurrency: 284 Maximum number of parallel connections to use. 285 :keyword str encoding: 286 Defaults to UTF-8. 287 :keyword int timeout: 288 The timeout parameter is expressed in seconds. 289 :returns: File-updated property dict (Etag and last modified). 290 :rtype: dict(str, Any) 291 292 .. admonition:: Example: 293 294 .. literalinclude:: ../samples/file_samples_client_async.py 295 :start-after: [START upload_file] 296 :end-before: [END upload_file] 297 :language: python 298 :dedent: 16 299 :caption: Upload a file. 300 """ 301 metadata = kwargs.pop('metadata', None) 302 content_settings = kwargs.pop('content_settings', None) 303 max_concurrency = kwargs.pop('max_concurrency', 1) 304 validate_content = kwargs.pop('validate_content', False) 305 timeout = kwargs.pop('timeout', None) 306 encoding = kwargs.pop('encoding', 'UTF-8') 307 if self.require_encryption or (self.key_encryption_key is not None): 308 raise ValueError("Encryption not supported.") 309 310 if isinstance(data, six.text_type): 311 data = data.encode(encoding) 312 if length is None: 313 length = get_length(data) 314 if isinstance(data, bytes): 315 data = data[:length] 316 317 if isinstance(data, bytes): 318 stream = BytesIO(data) 319 elif hasattr(data, "read"): 320 stream = data 321 elif hasattr(data, "__iter__"): 322 stream = IterStreamer(data, encoding=encoding) # type: ignore 323 else: 324 raise TypeError("Unsupported data type: {}".format(type(data))) 325 return await _upload_file_helper( # type: ignore 326 self, 327 stream, 328 length, 329 metadata, 330 content_settings, 331 validate_content, 332 timeout, 333 max_concurrency, 334 self._config, 335 file_attributes=file_attributes, 336 file_creation_time=file_creation_time, 337 file_last_write_time=file_last_write_time, 338 file_permission=file_permission, 339 file_permission_key=permission_key, 340 **kwargs 341 ) 342 343 @distributed_trace_async 344 async def start_copy_from_url( 345 self, 346 source_url, # type: str 347 **kwargs # type: Any 348 ): 349 # type: (...) -> Any 350 """Initiates the copying of data from a source URL into the file 351 referenced by the client. 352 353 The status of this copy operation can be found using the `get_properties` 354 method. 355 356 :param str source_url: 357 Specifies the URL of the source file. 358 :keyword dict(str,str) metadata: 359 Name-value pairs associated with the file as metadata. 360 :keyword int timeout: 361 The timeout parameter is expressed in seconds. 362 :rtype: dict(str, Any) 363 364 .. admonition:: Example: 365 366 .. literalinclude:: ../samples/file_samples_client_async.py 367 :start-after: [START copy_file_from_url] 368 :end-before: [END copy_file_from_url] 369 :language: python 370 :dedent: 16 371 :caption: Copy a file from a URL 372 """ 373 metadata = kwargs.pop('metadata', None) 374 timeout = kwargs.pop('timeout', None) 375 headers = kwargs.pop("headers", {}) 376 headers.update(add_metadata_headers(metadata)) 377 378 try: 379 return await self._client.file.start_copy( 380 source_url, timeout=timeout, metadata=metadata, headers=headers, cls=return_response_headers, **kwargs 381 ) 382 except StorageErrorException as error: 383 process_storage_error(error) 384 385 @distributed_trace_async 386 async def abort_copy(self, copy_id, **kwargs): 387 # type: (Union[str, FileProperties], Any) -> None 388 """Abort an ongoing copy operation. 389 390 This will leave a destination file with zero length and full metadata. 391 This will raise an error if the copy operation has already ended. 392 393 :param copy_id: 394 The copy operation to abort. This can be either an ID, or an 395 instance of FileProperties. 396 :type copy_id: str or ~azure.storage.fileshare.FileProperties 397 :keyword int timeout: 398 The timeout parameter is expressed in seconds. 399 :rtype: None 400 """ 401 timeout = kwargs.pop('timeout', None) 402 try: 403 copy_id = copy_id.copy.id 404 except AttributeError: 405 try: 406 copy_id = copy_id["copy_id"] 407 except TypeError: 408 pass 409 try: 410 await self._client.file.abort_copy(copy_id=copy_id, timeout=timeout, **kwargs) 411 except StorageErrorException as error: 412 process_storage_error(error) 413 414 @distributed_trace_async 415 async def download_file( 416 self, 417 offset=None, # type: Optional[int] 418 length=None, # type: Optional[int] 419 **kwargs 420 ): 421 # type: (...) -> Iterable[bytes] 422 """Downloads a file to a stream with automatic chunking. 423 424 :param int offset: 425 Start of byte range to use for downloading a section of the file. 426 Must be set if length is provided. 427 :param int length: 428 Number of bytes to read from the stream. This is optional, but 429 should be supplied for optimal performance. 430 :keyword int max_concurrency: 431 Maximum number of parallel connections to use. 432 :keyword bool validate_content: 433 If true, calculates an MD5 hash for each chunk of the file. The storage 434 service checks the hash of the content that has arrived with the hash 435 that was sent. This is primarily valuable for detecting bitflips on 436 the wire if using http instead of https as https (the default) will 437 already validate. Note that this MD5 hash is not stored with the 438 file. Also note that if enabled, the memory-efficient upload algorithm 439 will not be used, because computing the MD5 hash requires buffering 440 entire blocks, and doing so defeats the purpose of the memory-efficient algorithm. 441 :keyword int timeout: 442 The timeout parameter is expressed in seconds. 443 :returns: A iterable data generator (stream) 444 445 .. admonition:: Example: 446 447 .. literalinclude:: ../samples/file_samples_client_async.py 448 :start-after: [START download_file] 449 :end-before: [END download_file] 450 :language: python 451 :dedent: 16 452 :caption: Download a file. 453 """ 454 if self.require_encryption or (self.key_encryption_key is not None): 455 raise ValueError("Encryption not supported.") 456 if length is not None and offset is None: 457 raise ValueError("Offset value must not be None if length is set.") 458 459 range_end = None 460 if length is not None: 461 range_end = offset + length - 1 # Service actually uses an end-range inclusive index 462 downloader = StorageStreamDownloader( 463 client=self._client.file, 464 config=self._config, 465 start_range=offset, 466 end_range=range_end, 467 encryption_options=None, 468 name=self.file_name, 469 path='/'.join(self.file_path), 470 share=self.share_name, 471 cls=deserialize_file_stream, 472 **kwargs 473 ) 474 await downloader._setup() # pylint: disable=protected-access 475 return downloader 476 477 @distributed_trace_async 478 async def delete_file(self, **kwargs): 479 # type: (Any) -> None 480 """Marks the specified file for deletion. The file is 481 later deleted during garbage collection. 482 483 :keyword int timeout: 484 The timeout parameter is expressed in seconds. 485 :rtype: None 486 487 .. admonition:: Example: 488 489 .. literalinclude:: ../samples/file_samples_client_async.py 490 :start-after: [START delete_file] 491 :end-before: [END delete_file] 492 :language: python 493 :dedent: 16 494 :caption: Delete a file. 495 """ 496 timeout = kwargs.pop('timeout', None) 497 try: 498 await self._client.file.delete(timeout=timeout, **kwargs) 499 except StorageErrorException as error: 500 process_storage_error(error) 501 502 @distributed_trace_async 503 async def get_file_properties(self, **kwargs): 504 # type: (Any) -> FileProperties 505 """Returns all user-defined metadata, standard HTTP properties, and 506 system properties for the file. 507 508 :keyword int timeout: 509 The timeout parameter is expressed in seconds. 510 :returns: FileProperties 511 :rtype: ~azure.storage.fileshare.FileProperties 512 """ 513 timeout = kwargs.pop('timeout', None) 514 try: 515 file_props = await self._client.file.get_properties( 516 sharesnapshot=self.snapshot, timeout=timeout, cls=deserialize_file_properties, **kwargs 517 ) 518 except StorageErrorException as error: 519 process_storage_error(error) 520 file_props.name = self.file_name 521 file_props.share = self.share_name 522 file_props.snapshot = self.snapshot 523 file_props.path = "/".join(self.file_path) 524 return file_props # type: ignore 525 526 @distributed_trace_async 527 async def set_http_headers(self, content_settings, # type: ContentSettings 528 file_attributes="preserve", # type: Union[str, NTFSAttributes] 529 file_creation_time="preserve", # type: Union[str, datetime] 530 file_last_write_time="preserve", # type: Union[str, datetime] 531 file_permission=None, # type: Optional[str] 532 permission_key=None, # type: Optional[str] 533 **kwargs # type: Any 534 ): 535 # type: (...) -> Dict[str, Any] 536 """Sets HTTP headers on the file. 537 538 :param ~azure.storage.fileshare.ContentSettings content_settings: 539 ContentSettings object used to set file properties. Used to set content type, encoding, 540 language, disposition, md5, and cache control. 541 :param file_attributes: 542 The file system attributes for files and directories. 543 If not set, indicates preservation of existing values. 544 Here is an example for when the var type is str: 'Temporary|Archive' 545 :type file_attributes: str or :class:`~azure.storage.fileshare.NTFSAttributes` 546 :param file_creation_time: Creation time for the file 547 Default value: Preserve. 548 :type file_creation_time: str or ~datetime.datetime 549 :param file_last_write_time: Last write time for the file 550 Default value: Preserve. 551 :type file_last_write_time: str or ~datetime.datetime 552 :param file_permission: If specified the permission (security 553 descriptor) shall be set for the directory/file. This header can be 554 used if Permission size is <= 8KB, else x-ms-file-permission-key 555 header shall be used. Default value: Inherit. If SDDL is specified as 556 input, it must have owner, group and dacl. Note: Only one of the 557 x-ms-file-permission or x-ms-file-permission-key should be specified. 558 :type file_permission: str 559 :param permission_key: Key of the permission to be set for the 560 directory/file. Note: Only one of the x-ms-file-permission or 561 x-ms-file-permission-key should be specified. 562 :type permission_key: str 563 :keyword int timeout: 564 The timeout parameter is expressed in seconds. 565 :returns: File-updated property dict (Etag and last modified). 566 :rtype: dict(str, Any) 567 """ 568 timeout = kwargs.pop('timeout', None) 569 file_content_length = kwargs.pop("size", None) 570 file_http_headers = FileHTTPHeaders( 571 file_cache_control=content_settings.cache_control, 572 file_content_type=content_settings.content_type, 573 file_content_md5=bytearray(content_settings.content_md5) if content_settings.content_md5 else None, 574 file_content_encoding=content_settings.content_encoding, 575 file_content_language=content_settings.content_language, 576 file_content_disposition=content_settings.content_disposition, 577 ) 578 file_permission = _get_file_permission(file_permission, permission_key, 'preserve') 579 try: 580 return await self._client.file.set_http_headers( # type: ignore 581 file_content_length=file_content_length, 582 file_http_headers=file_http_headers, 583 file_attributes=_str(file_attributes), 584 file_creation_time=_datetime_to_str(file_creation_time), 585 file_last_write_time=_datetime_to_str(file_last_write_time), 586 file_permission=file_permission, 587 file_permission_key=permission_key, 588 timeout=timeout, 589 cls=return_response_headers, 590 **kwargs 591 ) 592 except StorageErrorException as error: 593 process_storage_error(error) 594 595 @distributed_trace_async 596 async def set_file_metadata(self, metadata=None, **kwargs): # type: ignore 597 # type: (Optional[Dict[str, Any]], Any) -> Dict[str, Any] 598 """Sets user-defined metadata for the specified file as one or more 599 name-value pairs. 600 601 Each call to this operation replaces all existing metadata 602 attached to the file. To remove all metadata from the file, 603 call this operation with no metadata dict. 604 605 :param metadata: 606 Name-value pairs associated with the file as metadata. 607 :type metadata: dict(str, str) 608 :keyword int timeout: 609 The timeout parameter is expressed in seconds. 610 :returns: File-updated property dict (Etag and last modified). 611 :rtype: dict(str, Any) 612 """ 613 timeout = kwargs.pop('timeout', None) 614 headers = kwargs.pop("headers", {}) 615 headers.update(add_metadata_headers(metadata)) # type: ignore 616 try: 617 return await self._client.file.set_metadata( # type: ignore 618 timeout=timeout, cls=return_response_headers, headers=headers, metadata=metadata, **kwargs 619 ) 620 except StorageErrorException as error: 621 process_storage_error(error) 622 623 @distributed_trace_async 624 async def upload_range( # type: ignore 625 self, 626 data, # type: bytes 627 offset, # type: int 628 length, # type: int 629 **kwargs 630 ): 631 # type: (...) -> Dict[str, Any] 632 """Upload a range of bytes to a file. 633 634 :param bytes data: 635 The data to upload. 636 :param int offset: 637 Start of byte range to use for uploading a section of the file. 638 The range can be up to 4 MB in size. 639 :param int length: 640 Number of bytes to use for uploading a section of the file. 641 The range can be up to 4 MB in size. 642 :keyword bool validate_content: 643 If true, calculates an MD5 hash of the page content. The storage 644 service checks the hash of the content that has arrived 645 with the hash that was sent. This is primarily valuable for detecting 646 bitflips on the wire if using http instead of https as https (the default) 647 will already validate. Note that this MD5 hash is not stored with the 648 file. 649 :keyword int timeout: 650 The timeout parameter is expressed in seconds. 651 :keyword str encoding: 652 Defaults to UTF-8. 653 :returns: File-updated property dict (Etag and last modified). 654 :rtype: Dict[str, Any] 655 """ 656 validate_content = kwargs.pop('validate_content', False) 657 timeout = kwargs.pop('timeout', None) 658 encoding = kwargs.pop('encoding', 'UTF-8') 659 if self.require_encryption or (self.key_encryption_key is not None): 660 raise ValueError("Encryption not supported.") 661 if isinstance(data, six.text_type): 662 data = data.encode(encoding) 663 end_range = offset + length - 1 # Reformat to an inclusive range index 664 content_range = 'bytes={0}-{1}'.format(offset, end_range) 665 try: 666 return await self._client.file.upload_range( # type: ignore 667 range=content_range, 668 content_length=length, 669 optionalbody=data, 670 timeout=timeout, 671 validate_content=validate_content, 672 cls=return_response_headers, 673 **kwargs 674 ) 675 except StorageErrorException as error: 676 process_storage_error(error) 677 678 @distributed_trace_async 679 async def upload_range_from_url(self, source_url, 680 offset, 681 length, 682 source_offset, 683 **kwargs 684 ): 685 # type: (str, int, int, int, **Any) -> Dict[str, Any] 686 """ 687 Writes the bytes from one Azure File endpoint into the specified range of another Azure File endpoint. 688 689 :param int offset: 690 Start of byte range to use for updating a section of the file. 691 The range can be up to 4 MB in size. 692 :param int length: 693 Number of bytes to use for updating a section of the file. 694 The range can be up to 4 MB in size. 695 :param str source_url: 696 A URL of up to 2 KB in length that specifies an Azure file or blob. 697 The value should be URL-encoded as it would appear in a request URI. 698 If the source is in another account, the source must either be public 699 or must be authenticated via a shared access signature. If the source 700 is public, no authentication is required. 701 Examples: 702 https://myaccount.file.core.windows.net/myshare/mydir/myfile 703 https://otheraccount.file.core.windows.net/myshare/mydir/myfile?sastoken 704 :param int source_offset: 705 This indicates the start of the range of bytes(inclusive) that has to be taken from the copy source. 706 The service will read the same number of bytes as the destination range (length-offset). 707 :keyword int timeout: 708 The timeout parameter is expressed in seconds. 709 """ 710 options = self._upload_range_from_url_options( 711 source_url=source_url, 712 offset=offset, 713 length=length, 714 source_offset=source_offset, 715 **kwargs 716 ) 717 try: 718 return await self._client.file.upload_range_from_url(**options) # type: ignore 719 except StorageErrorException as error: 720 process_storage_error(error) 721 722 @distributed_trace_async 723 async def get_ranges( # type: ignore 724 self, 725 offset=None, # type: Optional[int] 726 length=None, # type: Optional[int] 727 **kwargs 728 ): 729 # type: (...) -> List[Dict[str, int]] 730 """Returns the list of valid ranges of a file. 731 732 :param int offset: 733 Specifies the start offset of bytes over which to get ranges. 734 :param int length: 735 Number of bytes to use over which to get ranges. 736 :keyword int timeout: 737 The timeout parameter is expressed in seconds. 738 :returns: A list of valid ranges. 739 :rtype: List[dict[str, int]] 740 """ 741 timeout = kwargs.pop('timeout', None) 742 if self.require_encryption or (self.key_encryption_key is not None): 743 raise ValueError("Unsupported method for encryption.") 744 745 content_range = None 746 if offset is not None: 747 if length is not None: 748 end_range = offset + length - 1 # Reformat to an inclusive range index 749 content_range = "bytes={0}-{1}".format(offset, end_range) 750 else: 751 content_range = "bytes={0}-".format(offset) 752 try: 753 ranges = await self._client.file.get_range_list( 754 sharesnapshot=self.snapshot, timeout=timeout, range=content_range, **kwargs 755 ) 756 except StorageErrorException as error: 757 process_storage_error(error) 758 return [{"start": b.start, "end": b.end} for b in ranges] 759 760 @distributed_trace_async 761 async def clear_range( # type: ignore 762 self, 763 offset, # type: int 764 length, # type: int 765 **kwargs 766 ): 767 # type: (...) -> Dict[str, Any] 768 """Clears the specified range and releases the space used in storage for 769 that range. 770 771 :param int offset: 772 Start of byte range to use for clearing a section of the file. 773 The range can be up to 4 MB in size. 774 :param int length: 775 Number of bytes to use for clearing a section of the file. 776 The range can be up to 4 MB in size. 777 :keyword int timeout: 778 The timeout parameter is expressed in seconds. 779 :returns: File-updated property dict (Etag and last modified). 780 :rtype: Dict[str, Any] 781 """ 782 timeout = kwargs.pop('timeout', None) 783 if self.require_encryption or (self.key_encryption_key is not None): 784 raise ValueError("Unsupported method for encryption.") 785 786 if offset is None or offset % 512 != 0: 787 raise ValueError("offset must be an integer that aligns with 512 bytes file size") 788 if length is None or length % 512 != 0: 789 raise ValueError("length must be an integer that aligns with 512 bytes file size") 790 end_range = length + offset - 1 # Reformat to an inclusive range index 791 content_range = "bytes={0}-{1}".format(offset, end_range) 792 try: 793 return await self._client.file.upload_range( # type: ignore 794 timeout=timeout, 795 cls=return_response_headers, 796 content_length=0, 797 file_range_write="clear", 798 range=content_range, 799 **kwargs 800 ) 801 except StorageErrorException as error: 802 process_storage_error(error) 803 804 @distributed_trace_async 805 async def resize_file(self, size, **kwargs): 806 # type: (int, Any) -> Dict[str, Any] 807 """Resizes a file to the specified size. 808 809 :param int size: 810 Size to resize file to (in bytes) 811 :keyword int timeout: 812 The timeout parameter is expressed in seconds. 813 :returns: File-updated property dict (Etag and last modified). 814 :rtype: Dict[str, Any] 815 """ 816 timeout = kwargs.pop('timeout', None) 817 try: 818 return await self._client.file.set_http_headers( # type: ignore 819 file_content_length=size, 820 file_attributes="preserve", 821 file_creation_time="preserve", 822 file_last_write_time="preserve", 823 file_permission="preserve", 824 cls=return_response_headers, 825 timeout=timeout, 826 **kwargs 827 ) 828 except StorageErrorException as error: 829 process_storage_error(error) 830 831 @distributed_trace 832 def list_handles(self, **kwargs): 833 # type: (Any) -> AsyncItemPaged 834 """Lists handles for file. 835 836 :keyword int timeout: 837 The timeout parameter is expressed in seconds. 838 :returns: An auto-paging iterable of HandleItem 839 :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.fileshare.HandleItem] 840 """ 841 timeout = kwargs.pop('timeout', None) 842 results_per_page = kwargs.pop("results_per_page", None) 843 command = functools.partial( 844 self._client.file.list_handles, 845 sharesnapshot=self.snapshot, 846 timeout=timeout, 847 **kwargs) 848 return AsyncItemPaged( 849 command, results_per_page=results_per_page, 850 page_iterator_class=HandlesPaged) 851 852 @distributed_trace_async 853 async def close_handle(self, handle, **kwargs): 854 # type: (Union[str, HandleItem], Any) -> Dict[str, int] 855 """Close an open file handle. 856 857 :param handle: 858 A specific handle to close. 859 :type handle: str or ~azure.storage.fileshare.Handle 860 :keyword int timeout: 861 The timeout parameter is expressed in seconds. 862 :returns: 863 The number of handles closed (this may be 0 if the specified handle was not found) 864 and the number of handles failed to close in a dict. 865 :rtype: dict[str, int] 866 """ 867 try: 868 handle_id = handle.id # type: ignore 869 except AttributeError: 870 handle_id = handle 871 if handle_id == '*': 872 raise ValueError("Handle ID '*' is not supported. Use 'close_all_handles' instead.") 873 try: 874 response = await self._client.file.force_close_handles( 875 handle_id, 876 marker=None, 877 sharesnapshot=self.snapshot, 878 cls=return_response_headers, 879 **kwargs 880 ) 881 return { 882 'closed_handles_count': response.get('number_of_handles_closed', 0), 883 } 884 except StorageErrorException as error: 885 process_storage_error(error) 886 887 @distributed_trace_async 888 async def close_all_handles(self, **kwargs): 889 # type: (Any) -> Dict[str, int] 890 """Close any open file handles. 891 892 This operation will block until the service has closed all open handles. 893 894 :keyword int timeout: 895 The timeout parameter is expressed in seconds. 896 :returns: 897 The number of handles closed (this may be 0 if the specified handle was not found) 898 and the number of handles failed to close in a dict. 899 :rtype: dict[str, int] 900 """ 901 timeout = kwargs.pop('timeout', None) 902 start_time = time.time() 903 904 try_close = True 905 continuation_token = None 906 total_closed = 0 907 while try_close: 908 try: 909 response = await self._client.file.force_close_handles( 910 handle_id='*', 911 timeout=timeout, 912 marker=continuation_token, 913 sharesnapshot=self.snapshot, 914 cls=return_response_headers, 915 **kwargs 916 ) 917 except StorageErrorException as error: 918 process_storage_error(error) 919 continuation_token = response.get('marker') 920 try_close = bool(continuation_token) 921 total_closed += response.get('number_of_handles_closed', 0) 922 if timeout: 923 timeout = max(0, timeout - (time.time() - start_time)) 924 return { 925 'closed_handles_count': total_closed, 926 } 927