diff --git a/sdk/storage/azure-storage-blob/CHANGELOG.md b/sdk/storage/azure-storage-blob/CHANGELOG.md index c329969f2e17..2c761707d895 100644 --- a/sdk/storage/azure-storage-blob/CHANGELOG.md +++ b/sdk/storage/azure-storage-blob/CHANGELOG.md @@ -3,6 +3,13 @@ ## 12.26.0b1 (Unreleased) ### Features Added +- Added support for service version 2025-07-05. +- Added support for OAuth authentication in `ContainerClient`'s `get_container_access_policy` +and `set_container_access_policy` APIs. +- Added support for the keyword `source_token_intent` when copying from file share to blob and +authenticated via `TokenCredential` for the following `BlobClient` APIs: `upload_blob_from_url`, +`start_copy_from_url`, `stage_block_from_url`, `upload_pages_from_url`, and `append_block_from_url`. +- Added support for `query_blob` in the asynchronous `BlobClient`. ## 12.25.1 (2025-03-27) diff --git a/sdk/storage/azure-storage-blob/assets.json b/sdk/storage/azure-storage-blob/assets.json index 5f62daf58985..1defeedba97e 100644 --- a/sdk/storage/azure-storage-blob/assets.json +++ b/sdk/storage/azure-storage-blob/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-blob", - "Tag": "python/storage/azure-storage-blob_b11831f46e" + "Tag": "python/storage/azure-storage-blob_7756dd1c10" } diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 90049ff88e32..5e75c24417aa 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -422,6 +422,15 @@ def upload_blob_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :returns: Blob-updated property Dict (Etag and last modified) :rtype: Dict[str, Any] """ @@ -430,7 +439,8 @@ def upload_blob_from_url( options = _upload_blob_from_url_options( source_url=source_url, metadata=metadata, - **kwargs) + **kwargs + ) try: return cast(Dict[str, Any], self._client.block_blob.put_blob_from_url(**options)) except HttpResponseError as error: @@ -1746,6 +1756,15 @@ def start_copy_from_url( .. versionadded:: 12.9.0 + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -1770,7 +1789,8 @@ def start_copy_from_url( source_url=source_url, metadata=metadata, incremental_copy=incremental_copy, - **kwargs) + **kwargs + ) try: if incremental_copy: return cast(Dict[str, Union[str, datetime]], self._client.page_blob.copy_incremental(**options)) @@ -2046,6 +2066,15 @@ def stage_block_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :returns: Blob property dict. :rtype: dict[str, Any] """ @@ -2057,7 +2086,8 @@ def stage_block_from_url( source_offset=source_offset, source_length=source_length, source_content_md5=source_content_md5, - **kwargs) + **kwargs + ) try: return cast(Dict[str, Any], self._client.block_blob.stage_block_from_url(**options)) except HttpResponseError as error: @@ -2919,6 +2949,15 @@ def upload_pages_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :returns: Response after uploading pages from specified URL. :rtype: Dict[str, Any] """ @@ -3211,6 +3250,15 @@ def append_block_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :returns: Result after appending a new block. :rtype: Dict[str, Union[str, datetime, int]] """ diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py index a04f0ea02525..200b89c8ddc2 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client_helpers.py @@ -197,6 +197,7 @@ def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, A overwrite = kwargs.pop('overwrite', False) content_settings = kwargs.pop('content_settings', None) source_authorization = kwargs.pop('source_authorization', None) + source_token_intent = kwargs.pop('source_token_intent', None) if content_settings: kwargs['blob_http_headers'] = BlobHTTPHeaders( blob_cache_control=content_settings.cache_control, @@ -214,6 +215,7 @@ def _upload_blob_from_url_options(source_url: str, **kwargs: Any) -> Dict[str, A options = { 'copy_source_authorization': source_authorization, + 'file_request_intent': source_token_intent, 'content_length': 0, 'copy_source_blob_properties': kwargs.pop('include_source_blob_properties', True), 'source_content_md5': kwargs.pop('source_content_md5', None), @@ -376,7 +378,7 @@ def _quick_query_options(snapshot: Optional[str], query_expression: str, **kwarg 'timeout': kwargs.pop('timeout', None), 'cls': return_headers_and_deserialized, } - options.update(kwargs) + options.update({k: v for k, v in kwargs.items() if v is not None}) return options, delimiter def _generic_delete_blob_options(delete_snapshots: Optional[str] = None, **kwargs: Any) -> Dict[str, Any]: @@ -607,6 +609,7 @@ def _start_copy_from_url_options( # pylint:disable=too-many-statements requires_sync = kwargs.pop('requires_sync', None) encryption_scope_str = kwargs.pop('encryption_scope', None) source_authorization = kwargs.pop('source_authorization', None) + source_token_intent = kwargs.pop('source_token_intent', None) # If tags is a str, interpret that as copy_source_tags copy_source_tags = isinstance(tags, str) @@ -626,6 +629,8 @@ def _start_copy_from_url_options( # pylint:disable=too-many-statements headers['x-ms-encryption-scope'] = encryption_scope_str if source_authorization: headers['x-ms-copy-source-authorization'] = source_authorization + if source_token_intent: + headers['x-ms-file-request-intent'] = source_token_intent if copy_source_tags: headers['x-ms-copy-source-tag-option'] = tags else: @@ -635,6 +640,9 @@ def _start_copy_from_url_options( # pylint:disable=too-many-statements if source_authorization: raise ValueError( "Source authorization tokens are only supported for sync copy, please specify requires_sync=True") + if source_token_intent: + raise ValueError( + "Source token intent is only supported for sync copy, please specify requires_sync=True") if copy_source_tags: raise ValueError( "Copying source tags is only supported for sync copy, please specify requires_sync=True") @@ -729,6 +737,7 @@ def _stage_block_from_url_options( ) -> Dict[str, Any]: source_url = _encode_source_url(source_url=source_url) source_authorization = kwargs.pop('source_authorization', None) + source_token_intent = kwargs.pop('source_token_intent', None) if source_length is not None and source_offset is None: raise ValueError("Source offset value must not be None if length is set.") if source_length is not None and source_offset is not None: @@ -747,6 +756,7 @@ def _stage_block_from_url_options( encryption_algorithm=cpk.algorithm) options = { 'copy_source_authorization': source_authorization, + 'file_request_intent': source_token_intent, 'block_id': block_id, 'content_length': 0, 'source_url': source_url, @@ -1010,6 +1020,7 @@ def _upload_pages_from_url_options( if_sequence_number_equal_to=kwargs.pop('if_sequence_number_eq', None) ) source_authorization = kwargs.pop('source_authorization', None) + source_token_intent = kwargs.pop('source_token_intent', None) access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) source_mod_conditions = get_source_conditions(kwargs) @@ -1023,6 +1034,7 @@ def _upload_pages_from_url_options( options = { 'copy_source_authorization': source_authorization, + 'file_request_intent': source_token_intent, 'source_url': source_url, 'content_length': 0, 'source_range': source_range, @@ -1152,6 +1164,7 @@ def _append_block_from_url_options( append_position=appendpos_condition ) source_authorization = kwargs.pop('source_authorization', None) + source_token_intent = kwargs.pop('source_token_intent', None) access_conditions = get_access_conditions(kwargs.pop('lease', None)) mod_conditions = get_modify_conditions(kwargs) source_mod_conditions = get_source_conditions(kwargs) @@ -1164,6 +1177,7 @@ def _append_block_from_url_options( options = { 'copy_source_authorization': source_authorization, + 'file_request_intent': source_token_intent, 'source_url': copy_source_url, 'content_length': 0, 'source_range': source_range, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py index a429b713b744..da03ae8504c7 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_azure_blob_storage.py @@ -48,7 +48,7 @@ class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword :param base_url: Service URL. Required. Default value is "". :type base_url: str :keyword version: Specifies the version of the operation to use for this request. Default value - is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + is "2025-07-05". Note that overriding this default value may result in unsupported behavior. :paramtype version: str """ diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py index 2af7d1d282ea..c52076a5991a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_configuration.py @@ -23,12 +23,12 @@ class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attrib desired operation. Required. :type url: str :keyword version: Specifies the version of the operation to use for this request. Default value - is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + is "2025-07-05". Note that overriding this default value may result in unsupported behavior. :paramtype version: str """ def __init__(self, url: str, **kwargs: Any) -> None: - version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05") + version: Literal["2025-07-05"] = kwargs.pop("version", "2025-07-05") if url is None: raise ValueError("Parameter 'url' must not be None.") diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py index a066e16a64dd..7a0232de5ddc 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/_serialization.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. @@ -411,7 +411,7 @@ def from_dict( :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) @@ -1361,7 +1361,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument # Iter and wrapped, should have found one node only (the wrap one) if len(children) != 1: raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( xml_name ) ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py index 9a06e367a4d2..672763db4f19 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_azure_blob_storage.py @@ -48,7 +48,7 @@ class AzureBlobStorage: # pylint: disable=client-accepts-api-version-keyword :param base_url: Service URL. Required. Default value is "". :type base_url: str :keyword version: Specifies the version of the operation to use for this request. Default value - is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + is "2025-07-05". Note that overriding this default value may result in unsupported behavior. :paramtype version: str """ diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py index 7448ca36e972..1652e4a58132 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/_configuration.py @@ -23,12 +23,12 @@ class AzureBlobStorageConfiguration: # pylint: disable=too-many-instance-attrib desired operation. Required. :type url: str :keyword version: Specifies the version of the operation to use for this request. Default value - is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + is "2025-07-05". Note that overriding this default value may result in unsupported behavior. :paramtype version: str """ def __init__(self, url: str, **kwargs: Any) -> None: - version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05") + version: Literal["2025-07-05"] = kwargs.pop("version", "2025-07-05") if url is None: raise ValueError("Parameter 'url' must not be None.") diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py index 4eb79b31c99e..86887b77c310 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_append_blob_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -78,7 +79,6 @@ async def create( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Create Append Blob operation creates a new append blob. :param content_length: The length of the request. Required. @@ -264,7 +264,6 @@ async def append_block( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. @@ -445,6 +444,7 @@ async def append_block_from_url( transactional_content_md5: Optional[bytes] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, cpk_info: Optional[_models.CpkInfo] = None, cpk_scope_info: Optional[_models.CpkScopeInfo] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, @@ -453,7 +453,6 @@ async def append_block_from_url( source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob where the contents are read from a source url. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on @@ -486,6 +485,8 @@ async def append_block_from_url( :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. Default value is None. :type copy_source_authorization: str + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo :param cpk_scope_info: Parameter group. Default value is None. @@ -584,6 +585,7 @@ async def append_block_from_url( source_if_none_match=_source_if_none_match, request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, + file_request_intent=file_request_intent, comp=comp, version=self._config.version, headers=_headers, @@ -642,7 +644,6 @@ async def seal( append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version 2019-12-12 version or later. diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py index ee46d9efb883..3e3e9afc49e6 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -99,7 +99,6 @@ async def download( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> AsyncIterator[bytes]: - # pylint: disable=line-too-long """The Download operation reads or downloads a blob from the system, including its metadata and properties. You can also call Download to read a snapshot. @@ -415,7 +414,6 @@ async def get_properties( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. @@ -619,7 +617,6 @@ async def delete( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is permanently removed from the storage account. If the storage account's soft delete feature is enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible @@ -744,7 +741,6 @@ async def delete( async def undelete( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Undelete a blob that was previously soft deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -817,7 +813,6 @@ async def set_expiry( expires_on: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets the time a blob will expire and be deleted. :param expiry_options: Required. Indicates mode of the expiry time. Known values are: @@ -900,7 +895,6 @@ async def set_http_headers( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set HTTP Headers operation sets system properties on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -1027,7 +1021,6 @@ async def set_immutability_policy( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Immutability Policy operation sets the immutability policy on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -1133,7 +1126,6 @@ async def delete_immutability_policy( version_id: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Delete Immutability Policy operation deletes the immutability policy on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -1219,7 +1211,6 @@ async def set_legal_hold( version_id: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Legal Hold operation sets a legal hold on the blob. :param legal_hold: Specified if a legal hold should be set on the blob. Required. @@ -1311,7 +1302,6 @@ async def set_metadata( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more name-value pairs. @@ -1450,7 +1440,6 @@ async def acquire_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -1558,7 +1547,6 @@ async def release_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -1658,7 +1646,6 @@ async def renew_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -1760,7 +1747,6 @@ async def change_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -1866,7 +1852,6 @@ async def break_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -1976,7 +1961,6 @@ async def create_snapshot( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Create Snapshot operation creates a read-only snapshot of a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -2118,7 +2102,6 @@ async def start_copy_from_url( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Start Copy From URL operation copies a blob or an internet resource to a new blob. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of @@ -2287,13 +2270,13 @@ async def copy_from_url( legal_hold: Optional[bool] = None, copy_source_authorization: Optional[str] = None, copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, cpk_scope_info: Optional[_models.CpkScopeInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return a response until the copy is complete. @@ -2344,6 +2327,8 @@ async def copy_from_url( copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and "COPY". Default value is None. :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions @@ -2425,6 +2410,7 @@ async def copy_from_url( copy_source_authorization=copy_source_authorization, encryption_scope=_encryption_scope, copy_source_tags=copy_source_tags, + file_request_intent=file_request_intent, x_ms_requires_sync=x_ms_requires_sync, version=self._config.version, headers=_headers, @@ -2476,7 +2462,6 @@ async def abort_copy_from_url( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination blob with zero length and full metadata. @@ -2569,7 +2554,6 @@ async def set_tier( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium storage account and on a block blob in a blob storage account (locally redundant storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of @@ -2674,7 +2658,6 @@ async def set_tier( async def get_account_info( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns the sku name and account kind. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -2755,7 +2738,6 @@ async def query( query_request: Optional[_models.QueryRequest] = None, **kwargs: Any ) -> AsyncIterator[bytes]: - # pylint: disable=line-too-long """The Query operation enables users to select/project on blob data by providing simple query expressions. @@ -3005,7 +2987,6 @@ async def get_tags( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> _models.BlobTags: - # pylint: disable=line-too-long """The Get Tags operation enables users to get the tags associated with a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -3111,7 +3092,6 @@ async def set_tags( tags: Optional[_models.BlobTags] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Tags operation enables users to set tags on a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py index cdd31733b22f..70500f7b604c 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -87,7 +87,6 @@ async def upload( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Upload Block Blob operation updates the content of an existing block blob. Updating an existing block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put Blob; the content of the existing blob is overwritten with the content of @@ -305,6 +304,7 @@ async def put_blob_from_url( copy_source_blob_properties: Optional[bool] = None, copy_source_authorization: Optional[str] = None, copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, cpk_info: Optional[_models.CpkInfo] = None, @@ -313,7 +313,6 @@ async def put_blob_from_url( source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are not supported with Put Blob from URL; the content of an existing blob is @@ -367,6 +366,8 @@ async def put_blob_from_url( copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and "COPY". Default value is None. :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders :param lease_access_conditions: Parameter group. Default value is None. @@ -482,6 +483,7 @@ async def put_blob_from_url( copy_source_blob_properties=copy_source_blob_properties, copy_source_authorization=copy_source_authorization, copy_source_tags=copy_source_tags, + file_request_intent=file_request_intent, blob_type=blob_type, version=self._config.version, headers=_headers, @@ -542,7 +544,6 @@ async def stage_block( cpk_scope_info: Optional[_models.CpkScopeInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the @@ -690,13 +691,13 @@ async def stage_block_from_url( timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, cpk_info: Optional[_models.CpkInfo] = None, cpk_scope_info: Optional[_models.CpkScopeInfo] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob where the contents are read from a URL. @@ -728,6 +729,8 @@ async def stage_block_from_url( :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. Default value is None. :type copy_source_authorization: str + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo :param cpk_scope_info: Parameter group. Default value is None. @@ -798,6 +801,7 @@ async def stage_block_from_url( source_if_none_match=_source_if_none_match, request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, + file_request_intent=file_request_intent, comp=comp, version=self._config.version, headers=_headers, @@ -862,7 +866,6 @@ async def commit_block_list( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. In order to be written as part of a blob, a block must have been successfully written to the server in a prior Put Block operation. You can call Put Block List to update a blob by @@ -1066,7 +1069,6 @@ async def get_block_list( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> _models.BlockList: - # pylint: disable=line-too-long """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py index 6ffc595933af..5a9eb167ab93 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_container_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -86,7 +86,6 @@ async def create( container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """creates a new container under the specified account. If the container with the same name already exists, the operation fails. @@ -184,7 +183,6 @@ async def get_properties( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. @@ -287,7 +285,6 @@ async def delete( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """operation marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -378,7 +375,6 @@ async def set_metadata( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """operation sets one or more user-defined name-value pairs for the specified container. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -476,7 +472,6 @@ async def get_access_policy( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> List[_models.SignedIdentifier]: - # pylint: disable=line-too-long """gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. @@ -570,7 +565,6 @@ async def set_access_policy( container_acl: Optional[List[_models.SignedIdentifier]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """sets the permissions for the specified container. The permissions indicate whether blobs in a container may be accessed publicly. @@ -680,7 +674,6 @@ async def restore( deleted_container_version: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Restores a previously-deleted container. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -763,7 +756,6 @@ async def rename( source_lease_id: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Renames an existing container. :param source_container_name: Required. Specifies the name of the container to rename. @@ -846,7 +838,6 @@ async def submit_batch( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> AsyncIterator[bytes]: - # pylint: disable=line-too-long """The Batch operation allows multiple API calls to be embedded into a single HTTP request. :param content_length: The length of the request. Required. @@ -941,7 +932,6 @@ async def filter_blobs( include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, **kwargs: Any ) -> _models.FilterBlobSegment: - # pylint: disable=line-too-long """The Filter Blobs operation enables callers to list blobs in a container whose tags match a given search expression. Filter blobs searches within the given container. @@ -1046,7 +1036,6 @@ async def acquire_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -1147,7 +1136,6 @@ async def release_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -1240,7 +1228,6 @@ async def renew_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -1334,7 +1321,6 @@ async def break_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -1435,7 +1421,6 @@ async def change_lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -1536,7 +1521,6 @@ async def list_blob_flat_segment( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.ListBlobsFlatSegmentResponse: - # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param prefix: Filters the results to return only containers whose name begins with the @@ -1643,7 +1627,6 @@ async def list_blob_hierarchy_segment( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.ListBlobsHierarchySegmentResponse: - # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix @@ -1748,7 +1731,6 @@ async def list_blob_hierarchy_segment( async def get_account_info( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns the sku name and account kind. :param timeout: The timeout parameter is expressed in seconds. For more information, see diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py index 05771ac6c7d4..b1fdcf511253 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_page_blob_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -87,7 +87,6 @@ async def create( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Create operation creates a new page blob. :param content_length: The length of the request. Required. @@ -287,7 +286,6 @@ async def upload_pages( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob. :param content_length: The length of the request. Required. @@ -475,7 +473,6 @@ async def clear_pages( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Clear Pages operation clears a set of pages from a page blob. :param content_length: The length of the request. Required. @@ -626,6 +623,7 @@ async def upload_pages_from_url( timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, cpk_info: Optional[_models.CpkInfo] = None, cpk_scope_info: Optional[_models.CpkScopeInfo] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, @@ -634,7 +632,6 @@ async def upload_pages_from_url( source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. @@ -666,6 +663,8 @@ async def upload_pages_from_url( :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. Default value is None. :type copy_source_authorization: str + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo :param cpk_scope_info: Parameter group. Default value is None. @@ -770,6 +769,7 @@ async def upload_pages_from_url( source_if_none_match=_source_if_none_match, request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, + file_request_intent=file_request_intent, comp=comp, page_write=page_write, version=self._config.version, @@ -829,7 +829,6 @@ async def get_page_ranges( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> _models.PageList: - # pylint: disable=line-too-long """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a page blob. @@ -969,7 +968,6 @@ async def get_page_ranges_diff( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> _models.PageList: - # pylint: disable=line-too-long """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were changed between target blob and previous snapshot. @@ -1120,7 +1118,6 @@ async def resize( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Resize the Blob. :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 @@ -1247,7 +1244,6 @@ async def update_sequence_number( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Update the sequence number of the blob. :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the @@ -1361,7 +1357,6 @@ async def copy_incremental( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between the previously copied snapshot are transferred to the destination. The copied snapshots are complete copies of diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py index 3c2fc6503135..30ad827bbbc5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -73,7 +74,6 @@ async def set_properties( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets properties for a storage account's Blob service endpoint, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -150,7 +150,6 @@ async def set_properties( async def get_properties( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.StorageServiceProperties: - # pylint: disable=line-too-long """gets the properties of a storage account's Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -224,7 +223,6 @@ async def get_properties( async def get_statistics( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.StorageServiceStats: - # pylint: disable=line-too-long """Retrieves statistics related to replication for the Blob service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the storage account. @@ -307,7 +305,6 @@ async def list_containers_segment( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.ListContainersSegmentResponse: - # pylint: disable=line-too-long """The List Containers Segment operation returns a list of the containers under the specified account. @@ -407,7 +404,6 @@ async def get_user_delegation_key( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.UserDelegationKey: - # pylint: disable=line-too-long """Retrieves a user delegation key for the Blob service. This is only a valid operation when using bearer token authentication. @@ -489,7 +485,6 @@ async def get_user_delegation_key( async def get_account_info( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns the sku name and account kind. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -567,7 +562,6 @@ async def submit_batch( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> AsyncIterator[bytes]: - # pylint: disable=line-too-long """The Batch operation allows multiple API calls to be embedded into a single HTTP request. :param content_length: The length of the request. Required. @@ -660,7 +654,6 @@ async def filter_blobs( include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, **kwargs: Any ) -> _models.FilterBlobSegment: - # pylint: disable=line-too-long """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be scoped within the expression to a single container. diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py index bb9dc27da60f..7f56853c30a4 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/__init__.py @@ -81,6 +81,7 @@ CopyStatusType, DeleteSnapshotsOptionType, EncryptionAlgorithmType, + FileShareTokenIntent, FilterBlobsIncludeItem, GeoReplicationStatusType, LeaseDurationType, @@ -165,6 +166,7 @@ "CopyStatusType", "DeleteSnapshotsOptionType", "EncryptionAlgorithmType", + "FileShareTokenIntent", "FilterBlobsIncludeItem", "GeoReplicationStatusType", "LeaseDurationType", diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py index 12ccbf7312f9..43dbfa6bc654 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py @@ -152,6 +152,12 @@ class EncryptionAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): AES256 = "AES256" +class FileShareTokenIntent(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """FileShareTokenIntent.""" + + BACKUP = "backup" + + class FilterBlobsIncludeItem(str, Enum, metaclass=CaseInsensitiveEnumMeta): """FilterBlobsIncludeItem.""" diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py index a99691a1501b..d20e28f14e58 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_append_blob_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -72,7 +72,7 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) blob_type: Literal["AppendBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "AppendBlob")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -182,7 +182,7 @@ def build_append_block_request( comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -278,13 +278,14 @@ def build_append_block_from_url_request( source_if_none_match: Optional[str] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["appendblock"] = kwargs.pop("comp", _params.pop("comp", "appendblock")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -360,6 +361,8 @@ def build_append_block_from_url_request( _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( "copy_source_authorization", copy_source_authorization, "str" ) + if file_request_intent is not None: + _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) @@ -382,7 +385,7 @@ def build_seal_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["seal"] = kwargs.pop("comp", _params.pop("comp", "seal")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -456,7 +459,6 @@ def create( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Create Append Blob operation creates a new append blob. :param content_length: The length of the request. Required. @@ -642,7 +644,6 @@ def append_block( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version 2015-02-21 version or later. @@ -823,6 +824,7 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements transactional_content_md5: Optional[bytes] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, cpk_info: Optional[_models.CpkInfo] = None, cpk_scope_info: Optional[_models.CpkScopeInfo] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, @@ -831,7 +833,6 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Append Block operation commits a new block of data to the end of an existing append blob where the contents are read from a source url. The Append Block operation is permitted only if the blob was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on @@ -864,6 +865,8 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. Default value is None. :type copy_source_authorization: str + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo :param cpk_scope_info: Parameter group. Default value is None. @@ -962,6 +965,7 @@ def append_block_from_url( # pylint: disable=inconsistent-return-statements source_if_none_match=_source_if_none_match, request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, + file_request_intent=file_request_intent, comp=comp, version=self._config.version, headers=_headers, @@ -1020,7 +1024,6 @@ def seal( # pylint: disable=inconsistent-return-statements append_position_access_conditions: Optional[_models.AppendPositionAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version 2019-12-12 version or later. diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py index 89d32d272c27..14167fee919d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -66,7 +66,7 @@ def build_download_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -147,7 +147,7 @@ def build_get_properties_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -215,7 +215,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -266,7 +266,7 @@ def build_undelete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -304,7 +304,7 @@ def build_set_expiry_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -355,7 +355,7 @@ def build_set_http_headers_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -426,7 +426,7 @@ def build_set_immutability_policy_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -478,7 +478,7 @@ def build_delete_immutability_policy_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["immutabilityPolicies"] = kwargs.pop("comp", _params.pop("comp", "immutabilityPolicies")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -521,7 +521,7 @@ def build_set_legal_hold_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["legalhold"] = kwargs.pop("comp", _params.pop("comp", "legalhold")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -573,7 +573,7 @@ def build_set_metadata_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -641,7 +641,7 @@ def build_acquire_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -699,7 +699,7 @@ def build_release_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -754,7 +754,7 @@ def build_renew_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -810,7 +810,7 @@ def build_change_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -866,7 +866,7 @@ def build_break_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -926,7 +926,7 @@ def build_create_snapshot_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1005,7 +1005,7 @@ def build_start_copy_from_url_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1102,13 +1102,14 @@ def build_copy_from_url_request( copy_source_authorization: Optional[str] = None, encryption_scope: Optional[str] = None, copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) x_ms_requires_sync: Literal["true"] = kwargs.pop("x_ms_requires_sync", _headers.pop("x-ms-requires-sync", "true")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1179,6 +1180,8 @@ def build_copy_from_url_request( _headers["x-ms-encryption-scope"] = _SERIALIZER.header("encryption_scope", encryption_scope, "str") if copy_source_tags is not None: _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") + if file_request_intent is not None: + _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) @@ -1200,7 +1203,7 @@ def build_abort_copy_from_url_request( copy_action_abort_constant: Literal["abort"] = kwargs.pop( "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort") ) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1246,7 +1249,7 @@ def build_set_tier_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["tier"] = kwargs.pop("comp", _params.pop("comp", "tier")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1290,7 +1293,7 @@ def build_get_account_info_request( restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1339,7 +1342,7 @@ def build_query_request( comp: Literal["query"] = kwargs.pop("comp", _params.pop("comp", "query")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1403,7 +1406,7 @@ def build_get_tags_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1454,7 +1457,7 @@ def build_set_tags_request( comp: Literal["tags"] = kwargs.pop("comp", _params.pop("comp", "tags")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -1530,7 +1533,6 @@ def download( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> Iterator[bytes]: - # pylint: disable=line-too-long """The Download operation reads or downloads a blob from the system, including its metadata and properties. You can also call Download to read a snapshot. @@ -1846,7 +1848,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system properties for the blob. It does not return the content of the blob. @@ -2050,7 +2051,6 @@ def delete( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """If the storage account's soft delete feature is disabled then, when a blob is deleted, it is permanently removed from the storage account. If the storage account's soft delete feature is enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible @@ -2175,7 +2175,6 @@ def delete( # pylint: disable=inconsistent-return-statements def undelete( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Undelete a blob that was previously soft deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -2248,7 +2247,6 @@ def set_expiry( # pylint: disable=inconsistent-return-statements expires_on: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets the time a blob will expire and be deleted. :param expiry_options: Required. Indicates mode of the expiry time. Known values are: @@ -2331,7 +2329,6 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set HTTP Headers operation sets system properties on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -2458,7 +2455,6 @@ def set_immutability_policy( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Immutability Policy operation sets the immutability policy on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -2564,7 +2560,6 @@ def delete_immutability_policy( # pylint: disable=inconsistent-return-statement version_id: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Delete Immutability Policy operation deletes the immutability policy on the blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -2650,7 +2645,6 @@ def set_legal_hold( # pylint: disable=inconsistent-return-statements version_id: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Legal Hold operation sets a legal hold on the blob. :param legal_hold: Specified if a legal hold should be set on the blob. Required. @@ -2742,7 +2736,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more name-value pairs. @@ -2881,7 +2874,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -2989,7 +2981,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -3089,7 +3080,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -3191,7 +3181,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -3297,7 +3286,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete operations. @@ -3407,7 +3395,6 @@ def create_snapshot( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Create Snapshot operation creates a read-only snapshot of a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -3549,7 +3536,6 @@ def start_copy_from_url( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Start Copy From URL operation copies a blob or an internet resource to a new blob. :param copy_source: Specifies the name of the source page blob snapshot. This value is a URL of @@ -3718,13 +3704,13 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements legal_hold: Optional[bool] = None, copy_source_authorization: Optional[str] = None, copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, cpk_scope_info: Optional[_models.CpkScopeInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return a response until the copy is complete. @@ -3775,6 +3761,8 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and "COPY". Default value is None. :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param source_modified_access_conditions: Parameter group. Default value is None. :type source_modified_access_conditions: ~azure.storage.blob.models.SourceModifiedAccessConditions @@ -3856,6 +3844,7 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements copy_source_authorization=copy_source_authorization, encryption_scope=_encryption_scope, copy_source_tags=copy_source_tags, + file_request_intent=file_request_intent, x_ms_requires_sync=x_ms_requires_sync, version=self._config.version, headers=_headers, @@ -3907,7 +3896,6 @@ def abort_copy_from_url( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination blob with zero length and full metadata. @@ -4000,7 +3988,6 @@ def set_tier( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium storage account and on a block blob in a blob storage account (locally redundant storage only). A premium page blob's tier determines the allowed size, IOPS, and bandwidth of @@ -4105,7 +4092,6 @@ def set_tier( # pylint: disable=inconsistent-return-statements def get_account_info( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns the sku name and account kind. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -4186,7 +4172,6 @@ def query( query_request: Optional[_models.QueryRequest] = None, **kwargs: Any ) -> Iterator[bytes]: - # pylint: disable=line-too-long """The Query operation enables users to select/project on blob data by providing simple query expressions. @@ -4436,7 +4421,6 @@ def get_tags( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> _models.BlobTags: - # pylint: disable=line-too-long """The Get Tags operation enables users to get the tags associated with a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -4542,7 +4526,6 @@ def set_tags( # pylint: disable=inconsistent-return-statements tags: Optional[_models.BlobTags] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Set Tags operation enables users to set tags on a blob. :param timeout: The timeout parameter is expressed in seconds. For more information, see diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py index 206ee6aa19e8..620c686da88d 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -79,7 +79,7 @@ def build_upload_request( blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -213,13 +213,14 @@ def build_put_blob_from_url_request( copy_source_blob_properties: Optional[bool] = None, copy_source_authorization: Optional[str] = None, copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) blob_type: Literal["BlockBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "BlockBlob")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -317,6 +318,8 @@ def build_put_blob_from_url_request( ) if copy_source_tags is not None: _headers["x-ms-copy-source-tag-option"] = _SERIALIZER.header("copy_source_tags", copy_source_tags, "str") + if file_request_intent is not None: + _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) @@ -346,7 +349,7 @@ def build_stage_block_request( comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -422,13 +425,14 @@ def build_stage_block_from_url_request( source_if_none_match: Optional[str] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["block"] = kwargs.pop("comp", _params.pop("comp", "block")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -487,6 +491,8 @@ def build_stage_block_from_url_request( _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( "copy_source_authorization", copy_source_authorization, "str" ) + if file_request_intent is not None: + _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) @@ -529,7 +535,7 @@ def build_commit_block_list_request( comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -635,7 +641,7 @@ def build_get_block_list_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["blocklist"] = kwargs.pop("comp", _params.pop("comp", "blocklist")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -710,7 +716,6 @@ def upload( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Upload Block Blob operation updates the content of an existing block blob. Updating an existing block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put Blob; the content of the existing blob is overwritten with the content of @@ -928,6 +933,7 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements copy_source_blob_properties: Optional[bool] = None, copy_source_authorization: Optional[str] = None, copy_source_tags: Optional[Union[str, _models.BlobCopySourceTags]] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, blob_http_headers: Optional[_models.BlobHTTPHeaders] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, cpk_info: Optional[_models.CpkInfo] = None, @@ -936,7 +942,6 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are not supported with Put Blob from URL; the content of an existing blob is @@ -990,6 +995,8 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements copied or replaced with the tags specified by x-ms-tags. Known values are: "REPLACE" and "COPY". Default value is None. :type copy_source_tags: str or ~azure.storage.blob.models.BlobCopySourceTags + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param blob_http_headers: Parameter group. Default value is None. :type blob_http_headers: ~azure.storage.blob.models.BlobHTTPHeaders :param lease_access_conditions: Parameter group. Default value is None. @@ -1105,6 +1112,7 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements copy_source_blob_properties=copy_source_blob_properties, copy_source_authorization=copy_source_authorization, copy_source_tags=copy_source_tags, + file_request_intent=file_request_intent, blob_type=blob_type, version=self._config.version, headers=_headers, @@ -1165,7 +1173,6 @@ def stage_block( # pylint: disable=inconsistent-return-statements cpk_scope_info: Optional[_models.CpkScopeInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob. :param block_id: A valid Base64 string value that identifies the block. Prior to encoding, the @@ -1313,13 +1320,13 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, cpk_info: Optional[_models.CpkInfo] = None, cpk_scope_info: Optional[_models.CpkScopeInfo] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Stage Block operation creates a new block to be committed as part of a blob where the contents are read from a URL. @@ -1351,6 +1358,8 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. Default value is None. :type copy_source_authorization: str + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo :param cpk_scope_info: Parameter group. Default value is None. @@ -1421,6 +1430,7 @@ def stage_block_from_url( # pylint: disable=inconsistent-return-statements source_if_none_match=_source_if_none_match, request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, + file_request_intent=file_request_intent, comp=comp, version=self._config.version, headers=_headers, @@ -1485,7 +1495,6 @@ def commit_block_list( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Commit Block List operation writes a blob by specifying the list of block IDs that make up the blob. In order to be written as part of a blob, a block must have been successfully written to the server in a prior Put Block operation. You can call Put Block List to update a blob by @@ -1689,7 +1698,6 @@ def get_block_list( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> _models.BlockList: - # pylint: disable=line-too-long """The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block blob. diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py index 3593b490e3aa..368e19c3c594 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_container_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -56,7 +56,7 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -105,7 +105,7 @@ def build_get_properties_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -146,7 +146,7 @@ def build_delete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -192,7 +192,7 @@ def build_set_metadata_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -237,7 +237,7 @@ def build_get_access_policy_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -283,7 +283,7 @@ def build_set_access_policy_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -333,7 +333,7 @@ def build_restore_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -381,7 +381,7 @@ def build_rename_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -425,7 +425,7 @@ def build_submit_batch_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -470,7 +470,7 @@ def build_filter_blobs_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -521,7 +521,7 @@ def build_acquire_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -572,7 +572,7 @@ def build_release_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -620,7 +620,7 @@ def build_renew_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -668,7 +668,7 @@ def build_break_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -718,7 +718,7 @@ def build_change_lease_request( comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease")) restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -767,7 +767,7 @@ def build_list_blob_flat_segment_request( restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -818,7 +818,7 @@ def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -861,7 +861,7 @@ def build_get_account_info_request( restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -916,7 +916,6 @@ def create( # pylint: disable=inconsistent-return-statements container_cpk_scope_info: Optional[_models.ContainerCpkScopeInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """creates a new container under the specified account. If the container with the same name already exists, the operation fails. @@ -1014,7 +1013,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """returns all user-defined metadata and system properties for the specified container. The data returned does not include the container's list of blobs. @@ -1117,7 +1115,6 @@ def delete( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """operation marks the specified container for deletion. The container and any blobs contained within it are later deleted during garbage collection. @@ -1208,7 +1205,6 @@ def set_metadata( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """operation sets one or more user-defined name-value pairs for the specified container. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -1306,7 +1302,6 @@ def get_access_policy( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> List[_models.SignedIdentifier]: - # pylint: disable=line-too-long """gets the permissions for the specified container. The permissions indicate whether container data may be accessed publicly. @@ -1400,7 +1395,6 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements container_acl: Optional[List[_models.SignedIdentifier]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """sets the permissions for the specified container. The permissions indicate whether blobs in a container may be accessed publicly. @@ -1510,7 +1504,6 @@ def restore( # pylint: disable=inconsistent-return-statements deleted_container_version: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Restores a previously-deleted container. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -1593,7 +1586,6 @@ def rename( # pylint: disable=inconsistent-return-statements source_lease_id: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Renames an existing container. :param source_container_name: Required. Specifies the name of the container to rename. @@ -1676,7 +1668,6 @@ def submit_batch( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> Iterator[bytes]: - # pylint: disable=line-too-long """The Batch operation allows multiple API calls to be embedded into a single HTTP request. :param content_length: The length of the request. Required. @@ -1771,7 +1762,6 @@ def filter_blobs( include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, **kwargs: Any ) -> _models.FilterBlobSegment: - # pylint: disable=line-too-long """The Filter Blobs operation enables callers to list blobs in a container whose tags match a given search expression. Filter blobs searches within the given container. @@ -1876,7 +1866,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -1977,7 +1966,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -2070,7 +2058,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -2164,7 +2151,6 @@ def break_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -2265,7 +2251,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] establishes and manages a lock on a container for delete operations. The lock duration can be 15 to 60 seconds, or can be infinite. @@ -2366,7 +2351,6 @@ def list_blob_flat_segment( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.ListBlobsFlatSegmentResponse: - # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param prefix: Filters the results to return only containers whose name begins with the @@ -2473,7 +2457,6 @@ def list_blob_hierarchy_segment( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.ListBlobsHierarchySegmentResponse: - # pylint: disable=line-too-long """[Update] The List Blobs operation returns a list of the blobs under the specified container. :param delimiter: When the request includes this parameter, the operation returns a BlobPrefix @@ -2578,7 +2561,6 @@ def list_blob_hierarchy_segment( def get_account_info( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns the sku name and account kind. :param timeout: The timeout parameter is expressed in seconds. For more information, see diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py index 747cfbd8292d..3312c4b11299 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_page_blob_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -75,7 +75,7 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) blob_type: Literal["PageBlob"] = kwargs.pop("blob_type", _headers.pop("x-ms-blob-type", "PageBlob")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -193,7 +193,7 @@ def build_upload_pages_request( comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -299,7 +299,7 @@ def build_clear_pages_request( comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) page_write: Literal["clear"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "clear")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -391,6 +391,7 @@ def build_upload_pages_from_url_request( source_if_none_match: Optional[str] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) @@ -398,7 +399,7 @@ def build_upload_pages_from_url_request( comp: Literal["page"] = kwargs.pop("comp", _params.pop("comp", "page")) page_write: Literal["update"] = kwargs.pop("page_write", _headers.pop("x-ms-page-write", "update")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -479,6 +480,8 @@ def build_upload_pages_from_url_request( _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header( "copy_source_authorization", copy_source_authorization, "str" ) + if file_request_intent is not None: + _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) @@ -505,7 +508,7 @@ def build_get_page_ranges_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -573,7 +576,7 @@ def build_get_page_ranges_diff_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["pagelist"] = kwargs.pop("comp", _params.pop("comp", "pagelist")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -644,7 +647,7 @@ def build_resize_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -711,7 +714,7 @@ def build_update_sequence_number_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -770,7 +773,7 @@ def build_copy_incremental_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["incrementalcopy"] = kwargs.pop("comp", _params.pop("comp", "incrementalcopy")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -846,7 +849,6 @@ def create( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Create operation creates a new page blob. :param content_length: The length of the request. Required. @@ -1046,7 +1048,6 @@ def upload_pages( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob. :param content_length: The length of the request. Required. @@ -1234,7 +1235,6 @@ def clear_pages( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Clear Pages operation clears a set of pages from a page blob. :param content_length: The length of the request. Required. @@ -1385,6 +1385,7 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, copy_source_authorization: Optional[str] = None, + file_request_intent: Optional[Union[str, _models.FileShareTokenIntent]] = None, cpk_info: Optional[_models.CpkInfo] = None, cpk_scope_info: Optional[_models.CpkScopeInfo] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, @@ -1393,7 +1394,6 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Upload Pages operation writes a range of pages to a page blob where the contents are read from a URL. @@ -1425,6 +1425,8 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. Default value is None. :type copy_source_authorization: str + :param file_request_intent: Valid value is backup. "backup" Default value is None. + :type file_request_intent: str or ~azure.storage.blob.models.FileShareTokenIntent :param cpk_info: Parameter group. Default value is None. :type cpk_info: ~azure.storage.blob.models.CpkInfo :param cpk_scope_info: Parameter group. Default value is None. @@ -1529,6 +1531,7 @@ def upload_pages_from_url( # pylint: disable=inconsistent-return-statements source_if_none_match=_source_if_none_match, request_id_parameter=request_id_parameter, copy_source_authorization=copy_source_authorization, + file_request_intent=file_request_intent, comp=comp, page_write=page_write, version=self._config.version, @@ -1588,7 +1591,6 @@ def get_page_ranges( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> _models.PageList: - # pylint: disable=line-too-long """The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a page blob. @@ -1728,7 +1730,6 @@ def get_page_ranges_diff( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> _models.PageList: - # pylint: disable=line-too-long """The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were changed between target blob and previous snapshot. @@ -1879,7 +1880,6 @@ def resize( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Resize the Blob. :param blob_content_length: This header specifies the maximum size for the page blob, up to 1 @@ -2006,7 +2006,6 @@ def update_sequence_number( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Update the sequence number of the blob. :param sequence_number_action: Required if the x-ms-blob-sequence-number header is set for the @@ -2120,7 +2119,6 @@ def copy_incremental( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. The snapshot is copied such that only the differential changes between the previously copied snapshot are transferred to the destination. The copied snapshots are complete copies of diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py index e6c164efe29a..dac984c43203 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_service_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -49,7 +49,7 @@ def build_set_properties_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -85,7 +85,7 @@ def build_get_properties_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -119,7 +119,7 @@ def build_get_statistics_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -160,7 +160,7 @@ def build_list_containers_segment_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -202,7 +202,7 @@ def build_get_user_delegation_key_request( restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service")) comp: Literal["userdelegationkey"] = kwargs.pop("comp", _params.pop("comp", "userdelegationkey")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -238,7 +238,7 @@ def build_get_account_info_request( restype: Literal["account"] = kwargs.pop("restype", _params.pop("restype", "account")) comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -278,7 +278,7 @@ def build_submit_batch_request( comp: Literal["batch"] = kwargs.pop("comp", _params.pop("comp", "batch")) multipart_content_type: Optional[str] = kwargs.pop("multipart_content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -321,7 +321,7 @@ def build_filter_blobs_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["blobs"] = kwargs.pop("comp", _params.pop("comp", "blobs")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-07-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-07-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -381,7 +381,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets properties for a storage account's Blob service endpoint, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -458,7 +457,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements def get_properties( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.StorageServiceProperties: - # pylint: disable=line-too-long """gets the properties of a storage account's Blob service, including properties for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -532,7 +530,6 @@ def get_properties( def get_statistics( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.StorageServiceStats: - # pylint: disable=line-too-long """Retrieves statistics related to replication for the Blob service. It is only available on the secondary location endpoint when read-access geo-redundant replication is enabled for the storage account. @@ -615,7 +612,6 @@ def list_containers_segment( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.ListContainersSegmentResponse: - # pylint: disable=line-too-long """The List Containers Segment operation returns a list of the containers under the specified account. @@ -715,7 +711,6 @@ def get_user_delegation_key( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.UserDelegationKey: - # pylint: disable=line-too-long """Retrieves a user delegation key for the Blob service. This is only a valid operation when using bearer token authentication. @@ -797,7 +792,6 @@ def get_user_delegation_key( def get_account_info( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns the sku name and account kind. :param timeout: The timeout parameter is expressed in seconds. For more information, see @@ -875,7 +869,6 @@ def submit_batch( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> Iterator[bytes]: - # pylint: disable=line-too-long """The Batch operation allows multiple API calls to be embedded into a single HTTP request. :param content_length: The length of the request. Required. @@ -968,7 +961,6 @@ def filter_blobs( include: Optional[List[Union[str, _models.FilterBlobsIncludeItem]]] = None, **kwargs: Any ) -> _models.FilterBlobSegment: - # pylint: disable=line-too-long """The Filter Blobs operation enables callers to list blobs across all containers whose tags match a given search expression. Filter blobs searches across all containers within a storage account but can be scoped within the expression to a single container. diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_quick_query_helper.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_quick_query_helper.py index 95f8a4427bba..60bda00db5c2 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_quick_query_helper.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_quick_query_helper.py @@ -5,7 +5,10 @@ # -------------------------------------------------------------------------- from io import BytesIO -from typing import Any, Dict, Generator, IO, Iterable, Optional, Type, Union, TYPE_CHECKING +from typing import ( + Any, Dict, Generator, IO, Iterable, Optional, Type, + TYPE_CHECKING +) from ._shared.avro.avro_io import DatumReader from ._shared.avro.datafile import DataFileReader @@ -14,11 +17,11 @@ from ._models import BlobQueryError -class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes +class BlobQueryReader: # pylint: disable=too-many-instance-attributes """A streaming object to read query results.""" name: str - """The name of the blob being quered.""" + """The name of the blob being queried.""" container: str """The name of the container where the blob is.""" response_headers: Dict[str, Any] @@ -28,8 +31,7 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes method will return these lines via a generator.""" def __init__( - self, - name: str = None, # type: ignore [assignment] + self, name: str = None, # type: ignore [assignment] container: str = None, # type: ignore [assignment] errors: Any = None, record_delimiter: str = '\n', @@ -50,7 +52,7 @@ def __init__( self._first_result = self._process_record(next(self._parsed_results)) self._error_cls = error_cls - def __len__(self): + def __len__(self) -> int: return self._size def _process_record(self, result: Dict[str, Any]) -> Optional[bytes]: @@ -77,21 +79,19 @@ def _iter_stream(self) -> Generator[bytes, None, None]: if processed_result is not None: yield processed_result - def readall(self) -> Union[bytes, str]: + def readall(self) -> bytes: """Return all query results. This operation is blocking until all data is downloaded. - If encoding has been configured - this will be used to decode individual - records are they are received. :returns: The query results. - :rtype: Union[bytes, str] + :rtype: bytes """ stream = BytesIO() self.readinto(stream) data = stream.getvalue() if self._encoding: - return data.decode(self._encoding) + return data.decode(self._encoding) # type: ignore [return-value] return data def readinto(self, stream: IO) -> None: @@ -105,29 +105,25 @@ def readinto(self, stream: IO) -> None: for record in self._iter_stream(): stream.write(record) - def records(self) -> Iterable[Union[bytes, str]]: + def records(self) -> Iterable[bytes]: """Returns a record generator for the query result. Records will be returned line by line. - If encoding has been configured - this will be used to decode individual - records are they are received. :returns: A record generator for the query result. - :rtype: Iterable[Union[bytes, str]] + :rtype: Iterable[bytes] """ delimiter = self.record_delimiter.encode('utf-8') for record_chunk in self._iter_stream(): for record in record_chunk.split(delimiter): if self._encoding: - yield record.decode(self._encoding) + yield record.decode(self._encoding) # type: ignore [misc] else: yield record -class QuickQueryStreamer(object): - """ - File-like streaming iterator. - """ +class QuickQueryStreamer: + """File-like streaming iterator.""" def __init__(self, generator): self.generator = generator @@ -183,7 +179,7 @@ def read(self, size): if relative_start < 0: raise ValueError("Buffer has dumped too much data") relative_end = relative_start + size - data = self._buf[relative_start: relative_end] + data = self._buf[relative_start:relative_end] # dump the extra data in buffer # buffer start--------------------16bytes----current read position diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py index 316e321cd8af..6ed341938224 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_serialize.py @@ -59,6 +59,7 @@ '2024-11-04', '2025-01-05', '2025-05-05', + '2025-07-05', ] diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py index 7cb074487f58..a88fe9980655 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_blob_client_async.py @@ -9,7 +9,8 @@ from datetime import datetime from functools import partial from typing import ( - Any, AnyStr, AsyncIterable, cast, Dict, IO, Iterable, List, Optional, overload, Tuple, Union, + Any, AnyStr, AsyncIterable, Callable, cast, Dict, IO, + Iterable, List, Optional, overload, Tuple, Union, TYPE_CHECKING ) from typing_extensions import Self @@ -23,6 +24,7 @@ from ._download_async import StorageStreamDownloader from ._lease_async import BlobLeaseClient from ._models import PageRangePaged +from ._quick_query_helper_async import BlobQueryReader from ._upload_helpers import ( upload_append_blob, upload_block_blob, @@ -46,6 +48,7 @@ _get_block_list_result, _get_page_ranges_options, _parse_url, + _quick_query_options, _resize_blob_options, _seal_append_blob_options, _set_blob_metadata_options, @@ -69,21 +72,27 @@ from .._encryption import StorageEncryptionMixin, _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION from .._generated.aio import AzureBlobStorage from .._generated.models import CpkInfo -from .._models import BlobType, BlobBlock, BlobProperties, PageRange +from .._models import BlobType, BlobBlock, BlobProperties, BlobQueryError, PageRange from .._serialize import get_access_conditions, get_api_version, get_modify_conditions, get_version_id from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str from .._shared.policies_async import ExponentialRetry from .._shared.response_handlers import process_storage_error, return_response_headers if TYPE_CHECKING: + from azure.core import MatchConditions from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential from azure.core.credentials_async import AsyncTokenCredential from azure.core.pipeline.policies import AsyncHTTPPolicy + from azure.storage.blob import CustomerProvidedEncryptionKey from azure.storage.blob.aio import ContainerClient from .._models import ( + ArrowDialect, ContentSettings, + DelimitedJsonDialect, + DelimitedTextDialect, ImmutabilityPolicy, PremiumPageBlobTier, + QuickQueryDialect, SequenceNumberAction, StandardBlobTier ) @@ -412,6 +421,15 @@ async def upload_blob_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :returns: Response from creating a new block blob for a given URL. :rtype: Dict[str, Any] """ @@ -420,7 +438,8 @@ async def upload_blob_from_url( options = _upload_blob_from_url_options( source_url=source_url, metadata=metadata, - **kwargs) + **kwargs + ) try: return cast(Dict[str, Any], await self._client.block_blob.put_blob_from_url(**options)) except HttpResponseError as error: @@ -746,6 +765,133 @@ async def download_blob( await downloader._setup() # pylint: disable=protected-access return downloader + @distributed_trace_async + async def query_blob( + self, query_expression: str, + *, + on_error: Optional[Callable[[BlobQueryError], None]] = None, + blob_format: Optional[Union["DelimitedTextDialect", "DelimitedJsonDialect", "QuickQueryDialect", str]] = None, + output_format: Optional[Union["DelimitedTextDialect", "DelimitedJsonDialect", "QuickQueryDialect", List["ArrowDialect"], str]] = None, # pylint: disable=line-too-long + lease: Optional[Union[BlobLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional["MatchConditions"] = None, + if_tags_match_condition: Optional[str] = None, + cpk: Optional["CustomerProvidedEncryptionKey"] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> BlobQueryReader: + """Enables users to select/project on blob/or blob snapshot data by providing simple query expressions. + This operation returns a BlobQueryReader, users need to use readall() or readinto() to get query data. + + :param str query_expression: + Required. a query statement. For more details see + https://learn.microsoft.com/azure/storage/blobs/query-acceleration-sql-reference. + :keyword Callable[~azure.storage.blob.BlobQueryError] on_error: + A function to be called on any processing errors returned by the service. + :keyword blob_format: + Optional. Defines the serialization of the data currently stored in the blob. The default is to + treat the blob data as CSV data formatted in the default dialect. This can be overridden with + a custom DelimitedTextDialect, or DelimitedJsonDialect or "ParquetDialect" (passed as a string or enum). + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string. + + .. note:: + "ParquetDialect" is in preview, so some features may not work as intended. + + :paramtype blob_format: + ~azure.storage.blob.DelimitedTextDialect or + ~azure.storage.blob.DelimitedJsonDialect or + ~azure.storage.blob.QuickQueryDialect or + str + :keyword output_format: + Optional. Defines the output serialization for the data stream. By default the data will be returned + as it is represented in the blob (Parquet formats default to DelimitedTextDialect). + By providing an output format, the blob data will be reformatted according to that profile. + This value can be a DelimitedTextDialect or a DelimitedJsonDialect or ArrowDialect. + These dialects can be passed through their respective classes, the QuickQueryDialect enum or as a string. + :paramtype output_format: + ~azure.storage.blob.DelimitedTextDialect or + ~azure.storage.blob.DelimitedJsonDialect or + ~azure.storage.blob.QuickQueryDialect or + List[~azure.storage.blob.ArrowDialect] or + str + :keyword lease: + Required if the blob has an active lease. Value can be a BlobLeaseClient object + or the lease ID as a string. + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetime will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has been modified since the specified date/time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetime will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword str if_tags_match_condition: + Specify a SQL where clause on blob tags to operate only on blob with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + + .. versionadded:: 12.4.0 + + :keyword ~azure.storage.blob.CustomerProvidedEncryptionKey cpk: + Encrypts the data on the service-side with the given key. + Use of customer-provided keys must be done over HTTPS. + As the encryption key itself is provided in the request, + a secure connection must be established to transfer the key. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timeouts + see `here `__. + :returns: A streaming object (BlobQueryReader) + :rtype: ~azure.storage.blob.aio.BlobQueryReader + """ + error_cls = kwargs.pop("error_cls", BlobQueryError) + encoding = kwargs.pop("encoding", None) + if cpk and self.scheme.lower() != 'https': + raise ValueError("Customer provided encryption key must be used over HTTPS.") + options, delimiter = _quick_query_options( + self.snapshot, + query_expression, + blob_format=blob_format, + output_format=output_format, + lease=lease, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + if_tags_match_condition=if_tags_match_condition, + cpk=cpk, + timeout=timeout, + **kwargs + ) + try: + headers, raw_response_body = await self._client.blob.query(**options) + except HttpResponseError as error: + process_storage_error(error) + blob_query_reader = BlobQueryReader( + name=self.blob_name, + container=self.container_name, + errors=on_error, + record_delimiter=delimiter, + encoding=encoding, + headers=headers, + response=raw_response_body, + error_cls=error_cls + ) + await blob_query_reader._setup() # pylint: disable=protected-access + return blob_query_reader + @distributed_trace_async async def delete_blob(self, delete_snapshots: Optional[str] = None, **kwargs: Any) -> None: """Marks the specified blob for deletion. @@ -1650,6 +1796,15 @@ async def start_copy_from_url( .. versionadded:: 12.9.0 + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :keyword str encryption_scope: A predefined encryption scope used to encrypt the data on the sync copied blob. An encryption scope can be created using the Management API and referenced here by name. If a default @@ -1674,7 +1829,8 @@ async def start_copy_from_url( source_url=source_url, metadata=metadata, incremental_copy=incremental_copy, - **kwargs) + **kwargs + ) try: if incremental_copy: return cast(Dict[str, Union[str, datetime]], await self._client.page_blob.copy_incremental(**options)) @@ -1944,6 +2100,15 @@ async def stage_block_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :returns: Blob property dict. :rtype: Dict[str, Any] """ @@ -1955,7 +2120,8 @@ async def stage_block_from_url( source_offset=source_offset, source_length=source_length, source_content_md5=source_content_md5, - **kwargs) + **kwargs + ) try: return cast(Dict[str, Any], await self._client.block_blob.stage_block_from_url(**options)) except HttpResponseError as error: @@ -2819,6 +2985,15 @@ async def upload_pages_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :returns: Response after uploading pages from specified URL. :rtype: Dict[str, Any] """ @@ -3112,6 +3287,15 @@ async def append_block_from_url( :keyword str source_authorization: Authenticate as a service principal using a client secret to access a source blob. Ensure "bearer " is the prefix of the source_authorization string. + :keyword source_token_intent: + Required when source is Azure Storage Files and using `TokenCredential` for authentication. + This is ignored for other forms of authentication. + Specifies the intent for all requests when using `TokenCredential` authentication. Possible values are: + + backup - Specifies requests are intended for backup/admin type operations, meaning that all file/directory + ACLs are bypassed and full permissions are granted. User must also have required RBAC permission. + + :paramtype source_token_intent: Literal['backup'] :returns: Result after appending a new block. :rtype: Dict[str, Union[str, datetime, int]] """ diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_quick_query_helper_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_quick_query_helper_async.py new file mode 100644 index 000000000000..3d05d2e771e9 --- /dev/null +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_quick_query_helper_async.py @@ -0,0 +1,194 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from io import BytesIO +from typing import ( + Any, AsyncGenerator, AsyncIterable, Dict, IO, Optional, Type, + TYPE_CHECKING +) + +from .._shared.avro.avro_io_async import AsyncDatumReader +from .._shared.avro.datafile_async import AsyncDataFileReader + +if TYPE_CHECKING: + from .._models import BlobQueryError + + +class BlobQueryReader: # pylint: disable=too-many-instance-attributes + """A streaming object to read query results.""" + + name: str + """The name of the blob being queried.""" + container: str + """The name of the container where the blob is.""" + response_headers: Dict[str, Any] + """The response headers of the quick query request.""" + record_delimiter: str + """The delimiter used to separate lines, or records with the data. The `records` + method will return these lines via a generator.""" + + def __init__( + self, name: str = None, # type: ignore [assignment] + container: str = None, # type: ignore [assignment] + errors: Any = None, + record_delimiter: str = '\n', + encoding: Optional[str] = None, + headers: Dict[str, Any] = None, # type: ignore [assignment] + response: Any = None, + error_cls: Type["BlobQueryError"] = None, # type: ignore [assignment] + ) -> None: + self.name = name + self.container = container + self.response_headers = headers + self.record_delimiter = record_delimiter + self._size = 0 + self._bytes_processed = 0 + self._errors = errors + self._encoding = encoding + self._parsed_results = AsyncDataFileReader(QuickQueryStreamer(response), AsyncDatumReader()) + self._error_cls = error_cls + + async def _setup(self): + self._parsed_results = await self._parsed_results.init() + first_result = await self._parsed_results.__anext__() + self._first_result = self._process_record(first_result) # pylint: disable=attribute-defined-outside-init + + def __len__(self) -> int: + return self._size + + def _process_record(self, result: Dict[str, Any]) -> Optional[bytes]: + self._size = result.get('totalBytes', self._size) + self._bytes_processed = result.get('bytesScanned', self._bytes_processed) + if 'data' in result: + return result.get('data') + if 'fatal' in result: + error = self._error_cls( + error=result['name'], + is_fatal=result['fatal'], + description=result['description'], + position=result['position'] + ) + if self._errors: + self._errors(error) + return None + + async def _aiter_stream(self) -> AsyncGenerator[bytes, None]: + if self._first_result is not None: + yield self._first_result + async for next_result in self._parsed_results: + processed_result = self._process_record(next_result) + if processed_result is not None: + yield processed_result + + async def readall(self) -> bytes: + """Return all query results. + + This operation is blocking until all data is downloaded. + + :returns: The query results. + :rtype: bytes + """ + stream = BytesIO() + await self.readinto(stream) + data = stream.getvalue() + if self._encoding: + return data.decode(self._encoding) # type: ignore [return-value] + return data + + async def readinto(self, stream: IO) -> None: + """Download the query result to a stream. + + :param IO stream: + The stream to download to. This can be an open file-handle, + or any writable stream. + :returns: None + """ + async for record in self._aiter_stream(): + stream.write(record) + + async def records(self) -> AsyncIterable[bytes]: + """Returns a record generator for the query result. + + Records will be returned line by line. + + :returns: A record generator for the query result. + :rtype: AsyncIterable[bytes] + """ + delimiter = self.record_delimiter.encode('utf-8') + async for record_chunk in self._aiter_stream(): + for record in record_chunk.split(delimiter): + if self._encoding: + yield record.decode(self._encoding) # type: ignore [misc] + else: + yield record + + +class QuickQueryStreamer: + """File-like streaming iterator.""" + + def __init__(self, generator): + self.generator = generator + self.iterator = generator.__aiter__() + self._buf = b"" + self._point = 0 + self._download_offset = 0 + self._buf_start = 0 + self.file_length = None + + def __len__(self): + return self.file_length + + def __aiter__(self): + return self.iterator + + @staticmethod + def seekable(): + return True + + async def __anext__(self): + next_part = await self.iterator.__anext__() + self._download_offset += len(next_part) + return next_part + + def tell(self): + return self._point + + async def seek(self, offset, whence=0): + if whence == 0: + self._point = offset + elif whence == 1: + self._point += offset + else: + raise ValueError("whence must be 0 or 1") + if self._point < 0: # pylint: disable=consider-using-max-builtin + self._point = 0 + + async def read(self, size): + try: + # keep reading from the generator until the buffer of this stream has enough data to read + while self._point + size > self._download_offset: + self._buf += await self.__anext__() + except StopAsyncIteration: + self.file_length = self._download_offset + + start_point = self._point + + # EOF + self._point = min(self._point + size, self._download_offset) + + relative_start = start_point - self._buf_start + if relative_start < 0: + raise ValueError("Buffer has dumped too much data") + relative_end = relative_start + size + data = self._buf[relative_start:relative_end] + + # dump the extra data in buffer + # buffer start--------------------16bytes----current read position + dumped_size = max(relative_end - 16 - relative_start, 0) + self._buf_start += dumped_size + self._buf = self._buf[dumped_size:] + + return data diff --git a/sdk/storage/azure-storage-blob/swagger/README.md b/sdk/storage/azure-storage-blob/swagger/README.md index 739f59cae350..139692914e2a 100644 --- a/sdk/storage/azure-storage-blob/swagger/README.md +++ b/sdk/storage/azure-storage-blob/swagger/README.md @@ -16,7 +16,7 @@ autorest --v3 --python ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2025-01-05/blob.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2025-07-05/blob.json output-folder: ../azure/storage/blob/_generated namespace: azure.storage.blob no-namespace-folders: true diff --git a/sdk/storage/azure-storage-blob/tests/test_block_blob.py b/sdk/storage/azure-storage-blob/tests/test_block_blob.py index dce1b606e07d..82b97ce0f156 100644 --- a/sdk/storage/azure-storage-blob/tests/test_block_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_block_blob.py @@ -3,9 +3,11 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +import requests import tempfile -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from io import BytesIO +from typing import Any, Dict, Tuple import pytest from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceModifiedError, ResourceNotFoundError @@ -23,6 +25,7 @@ ImmutabilityPolicy, StandardBlobTier, ) +from azure.storage.blob._serialize import get_api_version from azure.storage.blob._shared.policies import StorageContentValidation from devtools_testutils import recorded_by_proxy @@ -33,6 +36,7 @@ #------------------------------------------------------------------------------ TEST_BLOB_PREFIX = 'blob' +SMALL_BLOB_SIZE = 1024 LARGE_BLOB_SIZE = 5 * 1024 + 5 TEST_ENCRYPTION_KEY = CustomerProvidedEncryptionKey(key_value=CPK_KEY_VALUE, key_hash=CPK_KEY_HASH) #------------------------------------------------------------------------------ @@ -76,6 +80,47 @@ def _create_source_blob(self, data): blob_client.upload_blob(data, overwrite=True) return blob_client + def _get_bearer_token_string(self, resource: str = "https://storage.azure.com/.default") -> str: + return "Bearer " + f"{self.get_credential(BlobServiceClient).get_token(resource).token}" + + def _build_base_file_share_headers(self, bearer_token_string: str, content_length: int = 0) -> Dict[str, Any]: + return { + 'Authorization': bearer_token_string, + 'Content-Length': str(content_length), + 'x-ms-date': datetime.now(timezone.utc).strftime('%a, %d %b %Y %H:%M:%S GMT'), + 'x-ms-version': get_api_version({}), + 'x-ms-file-request-intent': 'backup', + } + + def _create_file_share_oauth( + self, bearer_token_string: str, + storage_account_name: str, + data: bytes + ) -> Tuple[str, str]: + share_name = self.get_resource_name('utshare') + file_name = self.get_resource_name('file') + base_url = f"https://{storage_account_name}.file.core.windows.net/{share_name}" + + # Creates file share + with requests.Session() as session: + session.put( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string), + params={'restype': 'share'} + ) + + # Creates the file itself + headers = self._build_base_file_share_headers(bearer_token_string) + headers.update({'x-ms-content-length': '1024', 'x-ms-type': 'file'}) + session.put(url=base_url + "/" + file_name, headers=headers) + + # Upload the supplied data to the file + headers = self._build_base_file_share_headers(bearer_token_string, 1024) + headers.update({'x-ms-range': 'bytes=0-1023', 'x-ms-write': 'update'}) + session.put(url=base_url + "/" + file_name, headers=headers, data=data, params={'comp': 'range'}) + + return file_name, base_url + def assertBlobEqual(self, container_name, blob_name, expected_data): blob = self.bsc.get_blob_client(container_name, blob_name) actual_data = blob.download_blob() @@ -93,7 +138,7 @@ def test_upload_blob_from_url_with_oauth(self, **kwargs): source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = self._create_source_blob(data=source_blob_data) destination_blob_client = self._create_blob() - token = "Bearer {}".format(self.get_credential(BlobServiceClient).get_token("https://storage.azure.com/.default").token) + token = self._get_bearer_token_string() # Assert this operation fails without a credential with pytest.raises(HttpResponseError): @@ -103,6 +148,210 @@ def test_upload_blob_from_url_with_oauth(self, **kwargs): destination_blob_data = destination_blob_client.download_blob().readall() assert source_blob_data == destination_blob_data + @BlobPreparer() + @recorded_by_proxy + def test_upload_from_file_to_blob_with_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + self._setup(storage_account_name, storage_account_key) + bearer_token_string = self._get_bearer_token_string() + + # Set up source file share with random data + source_data = self.get_random_bytes(SMALL_BLOB_SIZE) + file_name, base_url = self._create_file_share_oauth( + bearer_token_string, + storage_account_name, + source_data + ) + + # Set up destination blob without data + blob_service_client = BlobServiceClient( + account_url=self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + destination_blob_client = blob_service_client.get_blob_client( + container=self.source_container_name, + blob=self.get_resource_name(TEST_BLOB_PREFIX + "1") + ) + + try: + # Act + destination_blob_client.upload_blob_from_url( + source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup' + ) + destination_blob_data = destination_blob_client.download_blob().readall() + + # Assert + assert destination_blob_data == source_data + finally: + requests.delete( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string, 0), + params={'restype': 'share'} + ) + blob_service_client.delete_container(self.source_container_name) + + @BlobPreparer() + @recorded_by_proxy + def test_stage_from_file_to_blob_with_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + self._setup(storage_account_name, storage_account_key) + bearer_token_string = self._get_bearer_token_string() + + # Set up source file share with random data + source_data = self.get_random_bytes(SMALL_BLOB_SIZE) + file_name, base_url = self._create_file_share_oauth( + bearer_token_string, + storage_account_name, + source_data + ) + + # Set up destination blob without data + blob_service_client = BlobServiceClient( + account_url=self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + destination_blob_client = blob_service_client.get_blob_client( + container=self.source_container_name, + blob=self.get_resource_name(TEST_BLOB_PREFIX + "1") + ) + + try: + # Act / Assert + block_id = '1' + destination_blob_client.stage_block_from_url( + block_id=block_id, + source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup' + ) + block_list = [BlobBlock(block_id=block_id)] + resp = destination_blob_client.commit_block_list(block_list) + assert resp is not None + + destination_blob_data = destination_blob_client.download_blob().readall() + assert destination_blob_data == source_data + finally: + requests.delete( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string, 0), + params={'restype': 'share'} + ) + blob_service_client.delete_container(self.source_container_name) + + @BlobPreparer() + @recorded_by_proxy + def test_copy_from_file_to_blob_with_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + self._setup(storage_account_name, storage_account_key) + bearer_token_string = self._get_bearer_token_string() + + # Set up source file share with random data + source_data = self.get_random_bytes(SMALL_BLOB_SIZE) + file_name, base_url = self._create_file_share_oauth( + bearer_token_string, + storage_account_name, + source_data + ) + + # Set up destination blob without data + blob_service_client = BlobServiceClient( + account_url=self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + destination_blob_client = blob_service_client.get_blob_client( + container=self.source_container_name, + blob=self.get_resource_name(TEST_BLOB_PREFIX + "1") + ) + + try: + # Act + with pytest.raises(ValueError): + destination_blob_client.start_copy_from_url( + source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup', + requires_sync=False + ) + destination_blob_client.start_copy_from_url( + source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup', + requires_sync=True + ) + destination_blob_data = destination_blob_client.download_blob().readall() + + # Assert + assert destination_blob_data == source_data + finally: + requests.delete( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string, 0), + params={'restype': 'share'} + ) + blob_service_client.delete_container(self.source_container_name) + + @BlobPreparer() + @recorded_by_proxy + def test_append_block_from_file_to_blob_with_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + self._setup(storage_account_name, storage_account_key) + bearer_token_string = self._get_bearer_token_string() + + # Set up source file share with random data + source_data = self.get_random_bytes(SMALL_BLOB_SIZE) + file_name, base_url = self._create_file_share_oauth( + bearer_token_string, + storage_account_name, + source_data + ) + + # Set up destination blob without data + account_url = self.account_url(storage_account_name, "blob") + blob_service_client = BlobServiceClient( + account_url=account_url, + credential=storage_account_key + ) + destination_blob_client = BlobClient( + account_url=account_url, + container_name=self.source_container_name, + blob_name=self.get_resource_name(TEST_BLOB_PREFIX + "1"), + credential=storage_account_key + ) + destination_blob_client.create_append_blob() + + try: + # Act + destination_blob_client.append_block_from_url( + copy_source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup' + ) + destination_blob_data = destination_blob_client.download_blob().readall() + + # Assert + assert destination_blob_data == source_data + finally: + requests.delete( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string, 0), + params={'restype': 'share'} + ) + blob_service_client.delete_container(self.source_container_name) + @BlobPreparer() @recorded_by_proxy def test_upload_blob_with_and_without_overwrite(self, **kwargs): diff --git a/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py index e6994eb8f217..33f87552c749 100644 --- a/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py @@ -3,10 +3,12 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +import aiohttp import tempfile import uuid -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from io import BytesIO +from typing import Any, Dict, Tuple import pytest from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceModifiedError, ResourceNotFoundError @@ -18,7 +20,9 @@ StandardBlobTier, generate_blob_sas, BlobSasPermissions, CustomerProvidedEncryptionKey, - BlobImmutabilityPolicyMode, ImmutabilityPolicy) + BlobImmutabilityPolicyMode, ImmutabilityPolicy +) +from azure.storage.blob._serialize import get_api_version from azure.storage.blob.aio import BlobClient, BlobServiceClient from azure.storage.blob._shared.policies import StorageContentValidation @@ -30,6 +34,7 @@ # ------------------------------------------------------------------------------ TEST_BLOB_PREFIX = 'blob' +SMALL_BLOB_SIZE = 1024 LARGE_BLOB_SIZE = 5 * 1024 + 5 TEST_ENCRYPTION_KEY = CustomerProvidedEncryptionKey(key_value=CPK_KEY_VALUE, key_hash=CPK_KEY_HASH) # ------------------------------------------------------------------------------ @@ -93,6 +98,48 @@ async def _create_source_blob(self, data): await blob_client.upload_blob(data, overwrite=True) return blob_client + async def _get_bearer_token_string(self, resource: str = "https://storage.azure.com/.default") -> str: + access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token(resource) + return "Bearer " + access_token.token + + def _build_base_file_share_headers(self, bearer_token_string: str, content_length: int = 0) -> Dict[str, Any]: + return { + 'Authorization': bearer_token_string, + 'Content-Length': str(content_length), + 'x-ms-date': datetime.now(timezone.utc).strftime('%a, %d %b %Y %H:%M:%S GMT'), + 'x-ms-version': get_api_version({}), + 'x-ms-file-request-intent': 'backup', + } + + async def _create_file_share_oauth( + self, bearer_token_string: str, + storage_account_name: str, + data: bytes + ) -> Tuple[str, str]: + share_name = self.get_resource_name('utshare') + file_name = self.get_resource_name('file') + base_url = f"https://{storage_account_name}.file.core.windows.net/{share_name}" + + async with aiohttp.ClientSession() as session: + # Creates file share + await session.put( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string), + params={'restype': 'share'} + ) + + # Creates the file itself + headers = self._build_base_file_share_headers(bearer_token_string) + headers.update({'x-ms-content-length': '1024', 'x-ms-type': 'file'}) + await session.put(url=base_url + "/" + file_name, headers=headers) + + # Upload the supplied data to the file + headers = self._build_base_file_share_headers(bearer_token_string, 1024) + headers.update({'x-ms-range': 'bytes=0-1023', 'x-ms-write': 'update'}) + await session.put(url=base_url + "/" + file_name, headers=headers, data=data, params={'comp': 'range'}) + + return file_name, base_url + async def assertBlobEqual(self, container_name, blob_name, expected_data): blob = self.bsc.get_blob_client(container_name, blob_name) stream = await blob.download_blob() @@ -111,8 +158,7 @@ async def test_upload_blob_from_url_with_oauth(self, **kwargs): source_blob_data = self.get_random_bytes(LARGE_BLOB_SIZE) source_blob_client = await self._create_source_blob(data=source_blob_data) destination_blob_client = await self._create_blob() - access_token = await self.get_credential(BlobServiceClient, is_async=True).get_token("https://storage.azure.com/.default") - token = "Bearer {}".format(access_token.token) + token = await self._get_bearer_token_string() # Assert this operation fails without a credential with pytest.raises(HttpResponseError): @@ -124,6 +170,218 @@ async def test_upload_blob_from_url_with_oauth(self, **kwargs): destination_blob_data = await destination_blob.readall() assert source_blob_data == destination_blob_data + @BlobPreparer() + @recorded_by_proxy_async + async def test_upload_from_file_to_blob_with_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + await self._setup(storage_account_name, storage_account_key) + bearer_token_string = await self._get_bearer_token_string() + + # Set up source file share with random data + source_data = self.get_random_bytes(SMALL_BLOB_SIZE) + file_name, base_url = await self._create_file_share_oauth( + bearer_token_string, + storage_account_name, + source_data + ) + + # Set up destination blob without data + blob_service_client = BlobServiceClient( + account_url=self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + destination_blob_client = blob_service_client.get_blob_client( + container=self.source_container_name, + blob=self.get_resource_name(TEST_BLOB_PREFIX + "1") + ) + + try: + # Act + await destination_blob_client.upload_blob_from_url( + source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup' + ) + destination_blob = await destination_blob_client.download_blob() + destination_blob_data = await destination_blob.readall() + + # Assert + assert destination_blob_data == source_data + finally: + async with aiohttp.ClientSession() as requests: + await requests.delete( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string, 0), + params={'restype': 'share'} + ) + await blob_service_client.delete_container(self.source_container_name) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_stage_from_file_to_blob_with_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + await self._setup(storage_account_name, storage_account_key) + bearer_token_string = await self._get_bearer_token_string() + + # Set up source file share with random data + source_data = self.get_random_bytes(SMALL_BLOB_SIZE) + file_name, base_url = await self._create_file_share_oauth( + bearer_token_string, + storage_account_name, + source_data + ) + + # Set up destination blob without data + blob_service_client = BlobServiceClient( + account_url=self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + destination_blob_client = blob_service_client.get_blob_client( + container=self.source_container_name, + blob=self.get_resource_name(TEST_BLOB_PREFIX + "1") + ) + + try: + # Act / Assert + block_id = '1' + await destination_blob_client.stage_block_from_url( + block_id=block_id, + source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup' + ) + block_list = [BlobBlock(block_id=block_id)] + resp = await destination_blob_client.commit_block_list(block_list) + assert resp is not None + + destination_blob = await destination_blob_client.download_blob() + destination_blob_data = await destination_blob.readall() + assert destination_blob_data == source_data + finally: + async with aiohttp.ClientSession() as requests: + await requests.delete( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string, 0), + params={'restype': 'share'} + ) + await blob_service_client.delete_container(self.source_container_name) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_copy_from_file_to_blob_with_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + await self._setup(storage_account_name, storage_account_key) + bearer_token_string = await self._get_bearer_token_string() + + # Set up source file share with random data + source_data = self.get_random_bytes(SMALL_BLOB_SIZE) + file_name, base_url = await self._create_file_share_oauth( + bearer_token_string, + storage_account_name, + source_data + ) + + # Set up destination blob without data + blob_service_client = BlobServiceClient( + account_url=self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + destination_blob_client = blob_service_client.get_blob_client( + container=self.source_container_name, + blob=self.get_resource_name(TEST_BLOB_PREFIX + "1") + ) + + try: + # Act + with pytest.raises(ValueError): + await destination_blob_client.start_copy_from_url( + source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup', + requires_sync=False + ) + await destination_blob_client.start_copy_from_url( + source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup', + requires_sync=True + ) + destination_blob = await destination_blob_client.download_blob() + destination_blob_data = await destination_blob.readall() + + # Assert + assert destination_blob_data == source_data + finally: + async with aiohttp.ClientSession() as requests: + await requests.delete( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string, 0), + params={'restype': 'share'} + ) + await blob_service_client.delete_container(self.source_container_name) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_append_block_from_file_to_blob_with_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + await self._setup(storage_account_name, storage_account_key) + bearer_token_string = await self._get_bearer_token_string() + + # Set up source file share with random data + source_data = self.get_random_bytes(SMALL_BLOB_SIZE) + file_name, base_url = await self._create_file_share_oauth( + bearer_token_string, + storage_account_name, + source_data + ) + + # Set up destination blob without data + account_url = self.account_url(storage_account_name, "blob") + blob_service_client = BlobServiceClient( + account_url=account_url, + credential=storage_account_key + ) + destination_blob_client = BlobClient( + account_url=account_url, + container_name=self.source_container_name, + blob_name=self.get_resource_name(TEST_BLOB_PREFIX + "1"), + credential=storage_account_key + ) + await destination_blob_client.create_append_blob() + + try: + # Act + await destination_blob_client.append_block_from_url( + copy_source_url=base_url + "/" + file_name, + source_authorization=bearer_token_string, + source_token_intent='backup' + ) + destination_blob = await destination_blob_client.download_blob() + destination_blob_data = await destination_blob.readall() + + # Assert + assert destination_blob_data == source_data + finally: + async with aiohttp.ClientSession() as requests: + await requests.delete( + url=base_url, + headers=self._build_base_file_share_headers(bearer_token_string, 0), + params={'restype': 'share'} + ) + await blob_service_client.delete_container(self.source_container_name) + @BlobPreparer() @recorded_by_proxy_async async def test_upload_blob_with_and_without_overwrite(self, **kwargs): diff --git a/sdk/storage/azure-storage-blob/tests/test_container.py b/sdk/storage/azure-storage-blob/tests/test_container.py index 8fb99e114e2b..764f7a3ef432 100644 --- a/sdk/storage/azure-storage-blob/tests/test_container.py +++ b/sdk/storage/azure-storage-blob/tests/test_container.py @@ -2721,4 +2721,33 @@ def test_storage_account_audience_container_client(self, **kwargs): # Assert response = cc.exists() - assert response is not None \ No newline at end of file + assert response is not None + + @BlobPreparer() + @recorded_by_proxy + def test_get_and_set_access_policy_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + + token_credential = self.get_credential(BlobServiceClient) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), token_credential) + container = self._create_container(bsc) + + # Act + container.set_container_access_policy(signed_identifiers={}) + + # Assert + acl = container.get_container_access_policy() + assert acl is not None + + @BlobPreparer() + @recorded_by_proxy + def test_get_account_information_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + + token_credential = self.get_credential(BlobServiceClient) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), token_credential) + container = self._create_container(bsc) + + # Act / Assert + cc_info = container.get_account_information() + assert cc_info is not None diff --git a/sdk/storage/azure-storage-blob/tests/test_container_async.py b/sdk/storage/azure-storage-blob/tests/test_container_async.py index a8eea27cabe7..608811f9ec13 100644 --- a/sdk/storage/azure-storage-blob/tests/test_container_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_container_async.py @@ -2591,4 +2591,33 @@ async def test_storage_account_audience_container_client(self, **kwargs): # Assert response = await cc.exists() - assert response is not None \ No newline at end of file + assert response is not None + + @BlobPreparer() + @recorded_by_proxy_async + async def test_get_and_set_access_policy_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + + token_credential = self.get_credential(BlobServiceClient, is_async=True) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), token_credential) + container: ContainerClient = await self._create_container(bsc) + + # Act + await container.set_container_access_policy(signed_identifiers={}) + + # Assert + acl = await container.get_container_access_policy() + assert acl is not None + + @BlobPreparer() + @recorded_by_proxy_async + async def test_get_account_information_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + + token_credential = self.get_credential(BlobServiceClient, is_async=True) + bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), token_credential) + container: ContainerClient = await self._create_container(bsc) + + # Act / Assert + cc_info = await container.get_account_information() + assert cc_info is not None diff --git a/sdk/storage/azure-storage-blob/tests/test_page_blob.py b/sdk/storage/azure-storage-blob/tests/test_page_blob.py index 2c76e73854e4..160bc46081a3 100644 --- a/sdk/storage/azure-storage-blob/tests/test_page_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_page_blob.py @@ -22,7 +22,8 @@ ImmutabilityPolicy, PremiumPageBlobTier, SequenceNumberAction, - generate_blob_sas) + generate_blob_sas +) from azure.storage.blob._shared.policies import StorageContentValidation from devtools_testutils import recorded_by_proxy diff --git a/sdk/storage/azure-storage-blob/tests/test_page_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_page_blob_async.py index def21b7e0d85..0b3c9d16f6b8 100644 --- a/sdk/storage/azure-storage-blob/tests/test_page_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_page_blob_async.py @@ -21,7 +21,8 @@ ImmutabilityPolicy, PremiumPageBlobTier, SequenceNumberAction, - generate_blob_sas) + generate_blob_sas +) from azure.storage.blob.aio import BlobClient, BlobServiceClient from azure.storage.blob._shared.policies import StorageContentValidation diff --git a/sdk/storage/azure-storage-blob/tests/test_quick_query.py b/sdk/storage/azure-storage-blob/tests/test_quick_query.py index b16b730d0956..fdebf9ec053d 100644 --- a/sdk/storage/azure-storage-blob/tests/test_quick_query.py +++ b/sdk/storage/azure-storage-blob/tests/test_quick_query.py @@ -1011,12 +1011,20 @@ def on_error(error): "SELECT _2 from BlobStorage WHERE _1 > 250", on_error=on_error, output_format=output_format) + expected_result = ( + b'/////3gAAAAQAAAAAAAKAAwABgAFAAgACgAAAAABBAAMAAAACAAIAAAABAAIAAAABAAAAAEAAAAU' + b'AAAAEAAUAAgABgAHAAwAAAAQABAAAAAAAAEHEAAAABwAAAAEAAAAAAAAAAMAAABhYmMACAAMAAQA' + b'CAAIAAAABAAAAAIAAAD/////cAAAABAAAAAAAAoADgAGAAUACAAKAAAAAAMEABAAAAAAAAoADAAA' + b'AAQACAAKAAAAMAAAAAQAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + b'AQAAAAAAAAAAAAAAAAAAAAAAAAD/////iAAAABQAAAAAAAAADAAWAAYABQAIAAwADAAAAAADBAAY' + b'AAAAEAAAAAAAAAAAAAoAGAAMAAQACAAKAAAAPAAAABAAAAABAAAAAAAAAAAAAAACAAAAAAAAAAAA' + b'AAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAQAAAAEAAAAAAAAAAAAAAAAAAACQAQAAAAAA' + b'AAAAAAAAAAAA' + ) query_result = base64.b64encode(resp.readall()) - # expected_result = b'/////3gAAAAQAAAAAAAKAAwABgAFAAgACgAAAAABBAAMAAAACAAIAAAABAAIAAAABAAAAAEAAAAUAAAAEAAUAAgABgAHAAwAAAAQABAAAAAAAAEHEAAAABwAAAAEAAAAAAAAAAMAAABhYmMACAAMAAQACAAIAAAABAAAAAIAAAD/////cAAAABAAAAAAAAoADgAGAAUACAAKAAAAAAMEABAAAAAAAAoADAAAAAQACAAKAAAAMAAAAAQAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAP////+IAAAAFAAAAAAAAAAMABYABgAFAAgADAAMAAAAAAMEABgAAAAQAAAAAAAAAAAACgAYAAwABAAIAAoAAAA8AAAAEAAAAAEAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAABAAAAAQAAAAAAAAAAAAAAAAAAAJABAAAAAAAAAAAAAAAAAAA=' assert len(errors) == 0 - # Skip this assert for now, requires further investigation: https://github.com/Azure/azure-sdk-for-python/issues/24690 - # assert query_result == expected_result + assert query_result == expected_result self._teardown(bsc) @BlobPreparer() diff --git a/sdk/storage/azure-storage-blob/tests/test_quick_query_async.py b/sdk/storage/azure-storage-blob/tests/test_quick_query_async.py new file mode 100644 index 000000000000..71ac1c85799e --- /dev/null +++ b/sdk/storage/azure-storage-blob/tests/test_quick_query_async.py @@ -0,0 +1,1174 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import base64 +import os + +import pytest + +from azure.storage.blob.aio import BlobServiceClient +from azure.storage.blob import ( + DelimitedJsonDialect, + DelimitedTextDialect +) +from devtools_testutils.aio import recorded_by_proxy_async +from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase +from settings.testcase import BlobPreparer + + +# ------------------------------------------------------------------------------ +from azure.storage.blob._models import ArrowDialect, ArrowType, QuickQueryDialect + +CSV_DATA = b'Service,Package,Version,RepoPath,MissingDocs\r\nApp Configuration,' \ + b'azure-data-appconfiguration,1,appconfiguration,FALSE\r\nEvent Hubs' \ + b'\r\nEvent Hubs - Azure Storage CheckpointStore,' \ + b'azure-messaging-eventhubs-checkpointstore-blob,1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,' \ + b'1.1.0-beta.1,identity,FALSE\r\nKey Vault - Certificates,azure-security-keyvault-certificates,' \ + b'4.0.0,keyvault,FALSE\r\nKey Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,' \ + b'FALSE\r\nKey Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' \ + b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\nStorage - Blobs Batch,' \ + b'azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\nStorage - Blobs Cryptography,' \ + b'azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\nStorage - File Shares,' \ + b'azure-storage-file-share,12.2.0,storage,FALSE\r\nStorage - Queues,' \ + b'azure-storage-queue,12.3.0,storage,FALSE\r\nText Analytics,' \ + b'azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\nTracing,' \ + b'azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\nService,Package,Version,RepoPath,' \ + b'MissingDocs\r\nApp Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' \ + b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' \ + b'Event Hubs - Azure Storage CheckpointStore,azure-messaging-eventhubs-checkpointstore-blob,' \ + b'1.0.1,eventhubs,FALSE\r\nIdentity,azure-identity,1.1.0-beta.1,identity,FALSE\r\n' \ + b'Key Vault - Certificates,azure-security-keyvault-certificates,4.0.0,keyvault,FALSE\r\n' \ + b'Key Vault - Keys,azure-security-keyvault-keys,4.2.0-beta.1,keyvault,FALSE\r\n' \ + b'Key Vault - Secrets,azure-security-keyvault-secrets,4.1.0,keyvault,FALSE\r\n' \ + b'Storage - Blobs,azure-storage-blob,12.4.0,storage,FALSE\r\n' \ + b'Storage - Blobs Batch,azure-storage-blob-batch,12.4.0-beta.1,storage,FALSE\r\n' \ + b'Storage - Blobs Cryptography,azure-storage-blob-cryptography,12.4.0,storage,FALSE\r\n' \ + b'Storage - File Shares,azure-storage-file-share,12.2.0,storage,FALSE\r\n' \ + b'Storage - Queues,azure-storage-queue,12.3.0,storage,FALSE\r\n' \ + b'Text Analytics,azure-ai-textanalytics,1.0.0-beta.2,textanalytics,FALSE\r\n' \ + b'Tracing,azure-core-tracing-opentelemetry,1.0.0-beta.2,core,FALSE\r\n' \ + b'Service,Package,Version,RepoPath,MissingDocs\r\n' \ + b'App Configuration,azure-data-appconfiguration,1.0.1,appconfiguration,FALSE\r\n' \ + b'Event Hubs,azure-messaging-eventhubs,5.0.1,eventhubs,FALSE\r\n' + +CONVERTED_CSV_DATA = b"Service;Package;Version;RepoPath;MissingDocs.App Configuration;azure-data-appconfiguration;" \ + b"1;appconfiguration;FALSE.Event Hubs.Event Hubs - Azure Storage CheckpointStore;azure-messaging-eventhubs-checkpointstore-blob;" \ + b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;FALSE.Key Vault - Certificates;" \ + b"azure-security-keyvault-certificates;'4.0.0';keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;" \ + b"'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;azure-security-keyvault-secrets;'4.1.0';keyvault;" \ + b"FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';storage;FALSE.Storage - Blobs Batch;" \ + b"azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" \ + b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" \ + b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" \ + b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" \ + b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" \ + b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" \ + b"'5.0.1';eventhubs;FALSE.Event Hubs - Azure Storage CheckpointStore;azure-messaging-eventhubs-checkpointstore-blob;" \ + b"'1.0.1';eventhubs;FALSE.Identity;azure-identity;'1.1.0-beta.1';identity;" \ + b"FALSE.Key Vault - Certificates;azure-security-keyvault-certificates;'4.0.0';" \ + b"keyvault;FALSE.Key Vault - Keys;azure-security-keyvault-keys;'4.2.0-beta.1';keyvault;FALSE.Key Vault - Secrets;" \ + b"azure-security-keyvault-secrets;'4.1.0';keyvault;FALSE.Storage - Blobs;azure-storage-blob;'12.4.0';" \ + b"storage;FALSE.Storage - Blobs Batch;azure-storage-blob-batch;'12.4.0-beta.1';storage;FALSE.Storage - Blobs Cryptography;" \ + b"azure-storage-blob-cryptography;'12.4.0';storage;FALSE.Storage - File Shares;azure-storage-file-share;" \ + b"'12.2.0';storage;FALSE.Storage - Queues;azure-storage-queue;'12.3.0';storage;FALSE.Text Analytics;" \ + b"azure-ai-textanalytics;'1.0.0-beta.2';textanalytics;FALSE.Tracing;azure-core-tracing-opentelemetry;" \ + b"'1.0.0-beta.2';core;FALSE.Service;Package;Version;RepoPath;MissingDocs.App Configuration;" \ + b"azure-data-appconfiguration;'1.0.1';appconfiguration;FALSE.Event Hubs;azure-messaging-eventhubs;" \ + b"'5.0.1';eventhubs;FALSE." + +# ------------------------------------------------------------------------------ + + +class TestStorageQuickQuery(AsyncStorageRecordedTestCase): + async def _setup(self, bsc): + self.config = bsc._config + self.container_name = self.get_resource_name('utqqcontainer') + + if self.is_live: + try: + await bsc.create_container(self.container_name) + except: + pass + + async def _teardown(self, bsc): + if self.is_live: + try: + await bsc.delete_container(self.container_name) + except: + pass + + # --Helpers----------------------------------------------------------------- + + def _get_blob_reference(self): + return self.get_resource_name("csvfile") + + # -- Test cases for APIs supporting CPK ---------------------------------------------- + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + reader = await blob_client.query_blob("SELECT * from BlobStorage", on_error=on_error) + data = await reader.readall() + assert len(errors) == 0 + assert len(reader) == len(CSV_DATA) + assert reader._size == reader._bytes_processed + assert data, CSV_DATA.replace(b'\r\n' == b'\n') + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + reader = await blob_client.query_blob("SELECT * from BlobStorage") + read_records = reader.records() + + # Assert first line has header + data = await read_records.__anext__() + assert data == b'Service,Package,Version,RepoPath,MissingDocs' + + async for record in read_records: + data += record + + assert len(reader) == len(CSV_DATA) + assert reader._size == reader._bytes_processed + assert data, CSV_DATA.replace(b'\r\n' == b'') + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall_with_encoding(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + reader = await blob_client.query_blob("SELECT * from BlobStorage", on_error=on_error, encoding='utf-8') + data = await reader.readall() + + assert len(errors) == 0 + assert len(reader) == len(CSV_DATA) + assert reader._size == reader._bytes_processed + assert data, CSV_DATA.replace(b'\r\n' == b'\n').decode('utf-8') + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_encoding(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + reader = await blob_client.query_blob("SELECT * from BlobStorage", encoding='utf-8') + data = '' + async for record in reader.records(): + data += record + + assert len(reader) == len(CSV_DATA) + assert reader._size == reader._bytes_processed + assert data, CSV_DATA.replace(b'\r\n' == b'').decode('utf-8') + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_output_records_excluding_headers(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + input_format = DelimitedTextDialect(has_header=True) + output_format = DelimitedTextDialect(has_header=False) + reader = await blob_client.query_blob( + "SELECT * from BlobStorage", + blob_format=input_format, + output_format=output_format + ) + read_records = reader.records() + + # Assert first line does not include header + data = await read_records.__anext__() + assert data == b'App Configuration,azure-data-appconfiguration,1,appconfiguration,FALSE' + + async for record in read_records: + data += record + + assert len(reader) == len(CSV_DATA) + assert reader._size == reader._bytes_processed + assert data, CSV_DATA.replace(b'\r\n' == b'')[44:] + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_output_records_including_headers(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + input_format = DelimitedTextDialect(has_header=True) + reader = await blob_client.query_blob("SELECT * from BlobStorage", blob_format=input_format) + read_records = reader.records() + + # Assert first line does not include header + data = await read_records.__anext__() + assert data == b'Service,Package,Version,RepoPath,MissingDocs' + + async for record in read_records: + data += record + + assert len(reader) == len(CSV_DATA) + assert reader._size == reader._bytes_processed + assert data, CSV_DATA.replace(b'\r\n' == b'') + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_progress(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + reader = await blob_client.query_blob("SELECT * from BlobStorage") + data = b'' + progress = 0 + async for record in reader.records(): + if record: + data += record + progress += len(record) + 2 + + assert len(reader) == len(CSV_DATA) + assert reader._size == reader._bytes_processed + assert data, CSV_DATA.replace(b'\r\n' == b'') + assert progress == reader._size + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall_with_serialization_setting(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + input_format = DelimitedTextDialect( + delimiter=',', + quotechar='"', + lineterminator='\n', + escapechar='', + has_header=False + ) + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\' + ) + resp = await blob_client.query_blob( + "SELECT * from BlobStorage", + on_error=on_error, + blob_format=input_format, + output_format=output_format + ) + query_result = await resp.readall() + + assert len(errors) == 0 + assert resp._size == len(CSV_DATA) + assert query_result == CONVERTED_CSV_DATA + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_serialization_setting(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + input_format = DelimitedTextDialect( + delimiter=',', + quotechar='"', + lineterminator='\n', + escapechar='', + has_header=False + ) + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='%', + escapechar='\\' + ) + + reader = await blob_client.query_blob( + "SELECT * from BlobStorage", + blob_format=input_format, + output_format=output_format + ) + data = [] + async for record in reader.records(): + if record: + data.append(record) + + assert len(reader) == len(CSV_DATA) + assert reader._size == reader._bytes_processed + assert len(data) == 33 + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall_with_fatal_error_handler(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{name: owner}' + data2 = b'{name2: owner2}' + data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data = data1 + b'\n' + data2 + b'\n' + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + input_format = DelimitedJsonDialect() + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\' + ) + resp = await blob_client.query_blob( + "SELECT * from BlobStorage", + on_error=on_error, + blob_format=input_format, + output_format=output_format + ) + query_result = await resp.readall() + + assert len(errors) == 1 + assert resp._size == 43 + assert query_result == b'' + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_fatal_error_handler(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{name: owner}' + data2 = b'{name2: owner2}' + data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data = data1 + b'\n' + data2 + b'\n' + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + input_format = DelimitedJsonDialect() + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\' + ) + resp = await blob_client.query_blob( + "SELECT * from BlobStorage", + on_error=on_error, + blob_format=input_format, + output_format=output_format + ) + data = [] + async for record in resp.records(): + data.append(record) + + assert len(errors) == 1 + assert resp._size == 43 + assert data == [b''] + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall_with_fatal_error_handler_raise(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{name: owner}' + data2 = b'{name2: owner2}' + data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data = data1 + b'\n' + data2 + b'\n' + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + errors = [] + + def on_error(error): + raise Exception(error.description) + + input_format = DelimitedJsonDialect() + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\' + ) + resp = await blob_client.query_blob( + "SELECT * from BlobStorage", + on_error=on_error, + blob_format=input_format, + output_format=output_format + ) + with pytest.raises(Exception): + query_result = await resp.readall() + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_fatal_error_handler_raise(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{name: owner}' + data2 = b'{name2: owner2}' + data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data = data1 + b'\n' + data2 + b'\n' + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + errors = [] + + def on_error(error): + raise Exception(error.description) + + input_format = DelimitedJsonDialect() + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\' + ) + resp = await blob_client.query_blob( + "SELECT * from BlobStorage", + on_error=on_error, + blob_format=input_format, + output_format=output_format + ) + + with pytest.raises(Exception): + async for record in resp.records(): + print(record) + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall_with_fatal_error_ignore(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{name: owner}' + data2 = b'{name2: owner2}' + data = data1 + b'\n' + data2 + b'\n' + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + input_format = DelimitedJsonDialect() + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\' + ) + resp = await blob_client.query_blob( + "SELECT * from BlobStorage", + blob_format=input_format, + output_format=output_format + ) + query_result = await resp.readall() + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_fatal_error_ignore(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{name: owner}' + data2 = b'{name2: owner2}' + data3 = b'{version:0,begin:1601-01-01T00:00:00.000Z,intervalSecs:3600,status:Finalized,config:' \ + b'{version:0,configVersionEtag:0x8d75ef460eb1a12,numShards:1,recordsFormat:avro,formatSchemaVersion:3,' \ + b'shardDistFnVersion:1},chunkFilePaths:[$blobchangefeed/log/00/1601/01/01/0000/],storageDiagnostics:' \ + b'{version:0,lastModifiedTime:2019-11-01T17:53:18.861Z,' \ + b'data:{aid:d305317d-a006-0042-00dd-902bbb06fc56}}}' + data = data1 + b'\n' + data2 + b'\n' + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + input_format = DelimitedJsonDialect() + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\' + ) + resp = await blob_client.query_blob( + "SELECT * from BlobStorage", + blob_format=input_format, + output_format=output_format + ) + + async for record in resp.records(): + print(record) + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall_with_nonfatal_error_handler(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + input_format = DelimitedTextDialect( + delimiter=',', + quotechar='"', + lineterminator='\n', + escapechar='', + has_header=True + ) + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\', + ) + resp = await blob_client.query_blob( + "SELECT RepoPath from BlobStorage", + blob_format=input_format, + output_format=output_format, + on_error=on_error + ) + query_result = await resp.readall() + + # the error is because that line only has one column + assert len(errors) == 1 + assert resp._size == len(CSV_DATA) + assert len(query_result) > 0 + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_nonfatal_error_handler(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + input_format = DelimitedTextDialect( + delimiter=',', + quotechar='"', + lineterminator='\n', + escapechar='', + has_header=True + ) + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='%', + escapechar='\\', + ) + resp = await blob_client.query_blob( + "SELECT RepoPath from BlobStorage", + blob_format=input_format, + output_format=output_format, + on_error=on_error + ) + + data = [] + async for record in resp.records(): + data.append(record) + + # the error is because that line only has one column + assert len(errors) == 1 + assert resp._size == len(CSV_DATA) + assert len(data) == 32 + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall_with_nonfatal_error_ignore(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + input_format = DelimitedTextDialect( + delimiter=',', + quotechar='"', + lineterminator='\n', + escapechar='', + has_header=True + ) + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='.', + escapechar='\\', + ) + resp = await blob_client.query_blob( + "SELECT RepoPath from BlobStorage", + blob_format=input_format, + output_format=output_format + ) + query_result = await resp.readall() + assert resp._size == len(CSV_DATA) + assert len(query_result) > 0 + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_nonfatal_error_ignore(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the csv file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(CSV_DATA, overwrite=True) + + input_format = DelimitedTextDialect( + delimiter=',', + quotechar='"', + lineterminator='\n', + escapechar='', + has_header=True + ) + output_format = DelimitedTextDialect( + delimiter=';', + quotechar="'", + lineterminator='$', + escapechar='\\', + ) + resp = await blob_client.query_blob( + "SELECT RepoPath from BlobStorage", + blob_format=input_format, + output_format=output_format + ) + + data = [] + async for record in resp.records(): + data.append(record) + + assert resp._size == len(CSV_DATA) + assert len(data) == 32 + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_readall_with_json_serialization_setting(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{\"name\": \"owner\", \"id\": 1}' + data2 = b'{\"name2\": \"owner2\"}' + data = data1 + b'\n' + data2 + b'\n' + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + errors = [] + def on_error(error): + errors.append(error) + + input_format = DelimitedJsonDialect(delimiter='\n') + output_format = DelimitedJsonDialect(delimiter=';') + + resp = await blob_client.query_blob( + "SELECT name from BlobStorage", + on_error=on_error, + blob_format=input_format, + output_format=output_format + ) + query_result = await resp.readall() + + assert len(errors) == 0 + assert resp._size == len(data) + assert query_result == b'{"name":"owner"};{};{"name":"owner"};' + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_iter_records_with_json_serialization_setting(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{\"name\": \"owner\", \"id\": 1}' + data2 = b'{\"name2\": \"owner2\"}' + data = data1 + b'\n' + data2 + b'\n' + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + errors = [] + def on_error(error): + errors.append(error) + + input_format = DelimitedJsonDialect(delimiter='\n') + output_format = DelimitedJsonDialect(delimiter=';') + + resp = await blob_client.query_blob( + "SELECT name from BlobStorage", + on_error=on_error, + blob_format=input_format, + output_format=output_format + ) + + listdata = [] + async for record in resp.records(): + listdata.append(record) + + assert len(errors) == 0 + assert resp._size == len(data) + assert listdata, [b'{"name":"owner"}',b'{}',b'{"name":"owner"}' == b''] + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_with_only_input_json_serialization_setting(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data1 = b'{\"name\": \"owner\", \"id\": 1}' + data2 = b'{\"name2\": \"owner2\"}' + data = data1 + data2 + data1 + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + input_format = DelimitedJsonDialect(delimiter='\n') + output_format = None + + resp = await blob_client.query_blob( + "SELECT name from BlobStorage", + on_error=on_error, + blob_format=input_format, + output_format=output_format + ) + query_result = await resp.readall() + + assert len(errors) == 0 + assert resp._size == len(data) + assert query_result == b'{"name":"owner"}\n{}\n{"name":"owner"}\n' + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_output_in_arrow_format(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + data = b'100,200,300,400\n300,400,500,600\n' + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + await blob_client.upload_blob(data, overwrite=True) + + errors = [] + + def on_error(error): + errors.append(error) + + output_format = [ArrowDialect(ArrowType.DECIMAL, name="abc", precision=4, scale=2)] + + resp = await blob_client.query_blob( + "SELECT _2 from BlobStorage WHERE _1 > 250", + on_error=on_error, + output_format=output_format + ) + data = await resp.readall() + expected_result = ( + b'/////3gAAAAQAAAAAAAKAAwABgAFAAgACgAAAAABBAAMAAAACAAIAAAABAAIAAAABAAAAAEAAAAU' + b'AAAAEAAUAAgABgAHAAwAAAAQABAAAAAAAAEHEAAAABwAAAAEAAAAAAAAAAMAAABhYmMACAAMAAQA' + b'CAAIAAAABAAAAAIAAAD/////cAAAABAAAAAAAAoADgAGAAUACAAKAAAAAAMEABAAAAAAAAoADAAA' + b'AAQACAAKAAAAMAAAAAQAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + b'AQAAAAAAAAAAAAAAAAAAAAAAAAD/////iAAAABQAAAAAAAAADAAWAAYABQAIAAwADAAAAAADBAAY' + b'AAAAEAAAAAAAAAAAAAoAGAAMAAQACAAKAAAAPAAAABAAAAABAAAAAAAAAAAAAAACAAAAAAAAAAAA' + b'AAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAQAAAAEAAAAAAAAAAAAAAAAAAACQAQAAAAAA' + b'AAAAAAAAAAAA' + ) + query_result = base64.b64encode(data) + + assert len(errors) == 0 + assert query_result == expected_result + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_input_in_arrow_format(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + + # upload the json file + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + + errors = [] + + def on_error(error): + errors.append(error) + + input_format = [ArrowDialect(ArrowType.DECIMAL, name="abc", precision=4, scale=2)] + + with pytest.raises(ValueError): + await blob_client.query_blob( + "SELECT * from BlobStorage", + on_error=on_error, + blob_format=input_format + ) + + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_input_in_parquet_format(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + expression = "select * from blobstorage where id < 1;" + expected_data = b"0,mdifjt55.ea3,mdifjt55.ea3\n" + + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + parquet_path = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "./resources/parquet.parquet")) + with open(parquet_path, "rb") as parquet_data: + await blob_client.upload_blob(parquet_data, overwrite=True) + + reader = await blob_client.query_blob(expression, blob_format=QuickQueryDialect.Parquet) + real_data = await reader.readall() + + assert real_data == expected_data + await self._teardown(bsc) + + @BlobPreparer() + @recorded_by_proxy_async + async def test_quick_query_output_in_parquet_format(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + # Arrange + bsc = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=storage_account_key + ) + await self._setup(bsc) + expression = "SELECT * from BlobStorage" + + blob_name = self._get_blob_reference() + blob_client = bsc.get_blob_client(self.container_name, blob_name) + parquet_path = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "./resources/parquet.parquet")) + with open(parquet_path, "rb") as parquet_data: + await blob_client.upload_blob(parquet_data, overwrite=True) + + with pytest.raises(ValueError): + await blob_client.query_blob( + expression, + blob_format="ParquetDialect", + output_format="ParquetDialect" + ) diff --git a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md index 36e160d1ea5d..60296160a8ba 100644 --- a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md +++ b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md @@ -3,6 +3,14 @@ ## 12.21.0b1 (Unreleased) ### Features Added +- Added support for service version 2025-07-05. +- Added support for OAuth authentication in `FileSystemClient`'s `get_file_system_access_policy` +and `set_file_system_access_policy` APIs. +- Added support for progress tracking to `DataLakeFileClient`'s `upload_data` and `download_file` +APIs via a new optional callback, `progress_hook`. + +### Bugs Fixed +- Fixed an issue where URL safe encoding is inconsistent between Blob and Datalake paths. ## 12.20.0 (2025-03-27) diff --git a/sdk/storage/azure-storage-file-datalake/assets.json b/sdk/storage/azure-storage-file-datalake/assets.json index c0de211922c0..650376011e8c 100644 --- a/sdk/storage/azure-storage-file-datalake/assets.json +++ b/sdk/storage/azure-storage-file-datalake/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-file-datalake", - "Tag": "python/storage/azure-storage-file-datalake_202e9f5227" + "Tag": "python/storage/azure-storage-file-datalake_a7ee812abc" } diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index 09f3fd7ed81a..eecd275fd815 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -457,6 +457,11 @@ def upload_data( Defaults to 100*1024*1024, or 100MB. :keyword str encryption_context: Specifies the encryption context to set on the file. + :keyword progress_hook: + A callback to track the progress of a long-running upload. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], None] :returns: A dictionary of response headers. :rtype: Dict[str, Any] """ @@ -683,6 +688,11 @@ def download_file( Maximum number of parallel connections to use when transferring the file in chunks. This option does not affect the underlying connection pool, and may require a separate configuration of the connection pool. + :keyword progress_hook: + A callback to track the progress of a long-running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, int], None] :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py index ae1c9c2d97cf..65729c812058 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py @@ -42,7 +42,7 @@ class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version is "filesystem". Note that overriding this default value may result in unsupported behavior. :paramtype resource: str :keyword version: Specifies the version of the operation to use for this request. Default value - is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + is "2025-05-05". Note that overriding this default value may result in unsupported behavior. :paramtype version: str """ diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py index ce7d9c28810d..cbd92ca6a0dc 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py @@ -30,13 +30,13 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst is "filesystem". Note that overriding this default value may result in unsupported behavior. :paramtype resource: str :keyword version: Specifies the version of the operation to use for this request. Default value - is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + is "2025-05-05". Note that overriding this default value may result in unsupported behavior. :paramtype version: str """ def __init__(self, url: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None: resource: Literal["filesystem"] = kwargs.pop("resource", "filesystem") - version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05") + version: Literal["2025-05-05"] = kwargs.pop("version", "2025-05-05") if url is None: raise ValueError("Parameter 'url' must not be None.") diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py index a066e16a64dd..f5187701d7be 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_serialization.py @@ -1,28 +1,10 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 # -------------------------------------------------------------------------- -# # Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pyright: reportUnnecessaryTypeIgnoreComment=false @@ -411,7 +393,7 @@ def from_dict( :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) @@ -1361,7 +1343,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument # Iter and wrapped, should have found one node only (the wrap one) if len(children) != 1: raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( xml_name ) ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py index ecfcec9b6dc3..29f7176b48c5 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py @@ -42,7 +42,7 @@ class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version is "filesystem". Note that overriding this default value may result in unsupported behavior. :paramtype resource: str :keyword version: Specifies the version of the operation to use for this request. Default value - is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + is "2025-05-05". Note that overriding this default value may result in unsupported behavior. :paramtype version: str """ diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py index 57b28d3b142c..8426a9ac3b96 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py @@ -30,13 +30,13 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst is "filesystem". Note that overriding this default value may result in unsupported behavior. :paramtype resource: str :keyword version: Specifies the version of the operation to use for this request. Default value - is "2025-01-05". Note that overriding this default value may result in unsupported behavior. + is "2025-05-05". Note that overriding this default value may result in unsupported behavior. :paramtype version: str """ def __init__(self, url: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None: resource: Literal["filesystem"] = kwargs.pop("resource", "filesystem") - version: Literal["2025-01-05"] = kwargs.pop("version", "2025-01-05") + version: Literal["2025-05-05"] = kwargs.pop("version", "2025-05-05") if url is None: raise ValueError("Parameter 'url' must not be None.") diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py index ee5629316afb..1c9d6f822826 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -71,7 +72,6 @@ async def create( properties: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create FileSystem. Create a FileSystem rooted at the specified location. If the FileSystem already exists, the @@ -83,7 +83,7 @@ async def create( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param properties: Optional. User-defined properties to be stored with the filesystem, in the @@ -158,12 +158,11 @@ async def set_properties( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Set FileSystem Properties. Set properties for the FileSystem. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -171,7 +170,7 @@ async def set_properties( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param properties: Optional. User-defined properties to be stored with the filesystem, in the @@ -248,7 +247,6 @@ async def set_properties( async def get_properties( self, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Get FileSystem Properties. All system and user-defined filesystem properties are specified in the response headers. @@ -259,7 +257,7 @@ async def get_properties( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -324,7 +322,6 @@ async def delete( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Delete FileSystem. Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same @@ -335,7 +332,7 @@ async def delete( directories within the filesystem, will fail with status code 404 (Not Found) while the filesystem is being deleted. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -343,7 +340,7 @@ async def delete( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param modified_access_conditions: Parameter group. Default value is None. @@ -416,7 +413,6 @@ async def list_paths( upn: Optional[bool] = None, **kwargs: Any ) -> _models.PathList: - # pylint: disable=line-too-long """List Paths. List FileSystem paths and their properties. @@ -429,7 +425,7 @@ async def list_paths( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param continuation: Optional. When deleting a directory, the number of paths that are deleted @@ -525,7 +521,6 @@ async def list_blob_hierarchy_segment( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.ListBlobsHierarchySegmentResponse: - # pylint: disable=line-too-long """The List Blobs operation returns a list of the blobs under the specified container. :param prefix: Filters results to filesystems within the specified prefix. Default value is @@ -555,7 +550,7 @@ async def list_blob_hierarchy_segment( :type showonly: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py index d3ed5c3ca469..48a26493b58a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -100,14 +100,13 @@ async def create( cpk_info: Optional[_models.CpkInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create File | Create Directory | Rename File | Rename Directory. Create or rename a file or directory. By default, the destination is overwritten and if the destination already exists and has a lease the lease is broken. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. To fail if the destination already exists, use a conditional request with If-None-Match: "*". :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character @@ -116,7 +115,7 @@ async def create( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param resource: Required only for Create File and Create Directory. The value must be "file" @@ -357,7 +356,6 @@ async def update( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> Optional[_models.SetAccessControlRecursiveResponse]: - # pylint: disable=line-too-long """Append Data | Flush Data | Set Properties | Set Access Control. Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file, @@ -365,7 +363,7 @@ async def update( can only be appended to a file. Concurrent writes to the same file using multiple clients are not supported. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param action: The action must be "append" to upload data to be appended to a file, "flush" to flush previously uploaded data to a file, "setProperties" to set the properties of a file or @@ -390,7 +388,7 @@ async def update( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param max_records: Optional. Valid for "SetAccessControlRecursive" operation. It specifies the @@ -622,13 +620,12 @@ async def lease( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Lease Path. Create and manage a lease to restrict write and delete access to the path. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param x_ms_lease_action: There are five lease actions: "acquire", "break", "change", "renew", and "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration" @@ -647,7 +644,7 @@ async def lease( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param x_ms_lease_break_period: The lease break period duration is optional to break a lease, @@ -762,13 +759,12 @@ async def read( cpk_info: Optional[_models.CpkInfo] = None, **kwargs: Any ) -> AsyncIterator[bytes]: - # pylint: disable=line-too-long """Read File. Read the contents of a file. For read operations, range requests are supported. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -776,7 +772,7 @@ async def read( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param range: The HTTP Range request header specifies one or more byte ranges of the resource @@ -955,14 +951,13 @@ async def get_properties( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Get Properties | Get Status | Get Access Control List. Get Properties returns all system and user defined properties for a path. Get Status returns all system defined properties for a path. Get Access Control List returns the access control list for a path. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -970,7 +965,7 @@ async def get_properties( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param action: Optional. If the value is "getStatus" only the system defined properties for the @@ -1073,6 +1068,22 @@ async def get_properties( response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-context"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-context") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) if cls: return cls(pipeline_response, None, response_headers) # type: ignore @@ -1089,12 +1100,11 @@ async def delete( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Delete File | Delete Directory. Delete the file or directory. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -1102,7 +1112,7 @@ async def delete( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param recursive: Required. Default value is None. @@ -1213,12 +1223,11 @@ async def set_access_control( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Set the owner, group, permissions, or access control list for a path. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param owner: Optional. The owner of the blob or directory. Default value is None. @@ -1332,7 +1341,6 @@ async def set_access_control_recursive( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.SetAccessControlRecursiveResponse: - # pylint: disable=line-too-long """Set the access control list for a path and sub-paths. :param mode: Mode "set" sets POSIX access control rights on files and directories, "modify" @@ -1342,7 +1350,7 @@ async def set_access_control_recursive( :type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param continuation: Optional. When deleting a directory, the number of paths that are deleted @@ -1452,12 +1460,11 @@ async def flush_data( cpk_info: Optional[_models.CpkInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Set the owner, group, permissions, or access control list for a path. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param position: This parameter allows the caller to upload data in parallel and control the @@ -1651,7 +1658,6 @@ async def append_data( cpk_info: Optional[_models.CpkInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Append data to the file. :param body: Initial data. Required. @@ -1667,7 +1673,7 @@ async def append_data( :type position: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush @@ -1817,7 +1823,6 @@ async def set_expiry( expires_on: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets the time a blob will expire and be deleted. :param expiry_options: Required. Indicates mode of the expiry time. Known values are: @@ -1825,7 +1830,7 @@ async def set_expiry( :type expiry_options: str or ~azure.storage.filedatalake.models.PathExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character @@ -1898,12 +1903,11 @@ async def undelete( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Undelete a path that was previously soft deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param undelete_source: Only for hierarchical namespace enabled accounts. Optional. The path of diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py index 0e0243e91b90..64c5f2b9ea0c 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -67,7 +68,6 @@ def list_file_systems( timeout: Optional[int] = None, **kwargs: Any ) -> AsyncIterable["_models.FileSystem"]: - # pylint: disable=line-too-long """List FileSystems. List filesystems and their properties in given account. @@ -91,7 +91,7 @@ def list_file_systems( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :return: An iterator like instance of either FileSystem or the result of cls(response) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py index 235402a1d450..56f804b14c32 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -50,7 +51,7 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -91,7 +92,7 @@ def build_set_properties_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -129,7 +130,7 @@ def build_get_properties_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -167,7 +168,7 @@ def build_delete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -212,7 +213,7 @@ def build_list_paths_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -264,7 +265,7 @@ def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -331,7 +332,6 @@ def create( # pylint: disable=inconsistent-return-statements properties: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create FileSystem. Create a FileSystem rooted at the specified location. If the FileSystem already exists, the @@ -343,7 +343,7 @@ def create( # pylint: disable=inconsistent-return-statements :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param properties: Optional. User-defined properties to be stored with the filesystem, in the @@ -418,12 +418,11 @@ def set_properties( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Set FileSystem Properties. Set properties for the FileSystem. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -431,7 +430,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param properties: Optional. User-defined properties to be stored with the filesystem, in the @@ -508,7 +507,6 @@ def set_properties( # pylint: disable=inconsistent-return-statements def get_properties( # pylint: disable=inconsistent-return-statements self, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Get FileSystem Properties. All system and user-defined filesystem properties are specified in the response headers. @@ -519,7 +517,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -584,7 +582,6 @@ def delete( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Delete FileSystem. Marks the FileSystem for deletion. When a FileSystem is deleted, a FileSystem with the same @@ -595,7 +592,7 @@ def delete( # pylint: disable=inconsistent-return-statements directories within the filesystem, will fail with status code 404 (Not Found) while the filesystem is being deleted. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -603,7 +600,7 @@ def delete( # pylint: disable=inconsistent-return-statements :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param modified_access_conditions: Parameter group. Default value is None. @@ -676,7 +673,6 @@ def list_paths( upn: Optional[bool] = None, **kwargs: Any ) -> _models.PathList: - # pylint: disable=line-too-long """List Paths. List FileSystem paths and their properties. @@ -689,7 +685,7 @@ def list_paths( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param continuation: Optional. When deleting a directory, the number of paths that are deleted @@ -785,7 +781,6 @@ def list_blob_hierarchy_segment( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.ListBlobsHierarchySegmentResponse: - # pylint: disable=line-too-long """The List Blobs operation returns a list of the blobs under the specified container. :param prefix: Filters results to filesystems within the specified prefix. Default value is @@ -815,7 +810,7 @@ def list_blob_hierarchy_segment( :type showonly: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py index b6d6a0a9484e..4aa54b23a8cb 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -84,7 +84,7 @@ def build_create_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -219,7 +219,7 @@ def build_update_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -318,7 +318,7 @@ def build_lease_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -381,7 +381,7 @@ def build_read_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -446,7 +446,7 @@ def build_get_properties_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -502,7 +502,7 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -562,7 +562,7 @@ def build_set_access_control_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) action: Literal["setAccessControl"] = kwargs.pop("action", _params.pop("action", "setAccessControl")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -623,7 +623,7 @@ def build_set_access_control_recursive_request( # pylint: disable=name-too-long action: Literal["setAccessControlRecursive"] = kwargs.pop( "action", _params.pop("action", "setAccessControlRecursive") ) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -689,7 +689,7 @@ def build_flush_data_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) action: Literal["flush"] = kwargs.pop("action", _params.pop("action", "flush")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -785,7 +785,7 @@ def build_append_data_request( action: Literal["append"] = kwargs.pop("action", _params.pop("action", "append")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -861,7 +861,7 @@ def build_set_expiry_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -901,7 +901,7 @@ def build_undelete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -977,14 +977,13 @@ def create( # pylint: disable=inconsistent-return-statements cpk_info: Optional[_models.CpkInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create File | Create Directory | Rename File | Rename Directory. Create or rename a file or directory. By default, the destination is overwritten and if the destination already exists and has a lease the lease is broken. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. To fail if the destination already exists, use a conditional request with If-None-Match: "*". :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character @@ -993,7 +992,7 @@ def create( # pylint: disable=inconsistent-return-statements :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param resource: Required only for Create File and Create Directory. The value must be "file" @@ -1234,7 +1233,6 @@ def update( modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> Optional[_models.SetAccessControlRecursiveResponse]: - # pylint: disable=line-too-long """Append Data | Flush Data | Set Properties | Set Access Control. Uploads data to be appended to a file, flushes (writes) previously uploaded data to a file, @@ -1242,7 +1240,7 @@ def update( can only be appended to a file. Concurrent writes to the same file using multiple clients are not supported. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param action: The action must be "append" to upload data to be appended to a file, "flush" to flush previously uploaded data to a file, "setProperties" to set the properties of a file or @@ -1267,7 +1265,7 @@ def update( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param max_records: Optional. Valid for "SetAccessControlRecursive" operation. It specifies the @@ -1499,13 +1497,12 @@ def lease( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Lease Path. Create and manage a lease to restrict write and delete access to the path. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param x_ms_lease_action: There are five lease actions: "acquire", "break", "change", "renew", and "release". Use "acquire" and specify the "x-ms-proposed-lease-id" and "x-ms-lease-duration" @@ -1524,7 +1521,7 @@ def lease( # pylint: disable=inconsistent-return-statements :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param x_ms_lease_break_period: The lease break period duration is optional to break a lease, @@ -1639,13 +1636,12 @@ def read( cpk_info: Optional[_models.CpkInfo] = None, **kwargs: Any ) -> Iterator[bytes]: - # pylint: disable=line-too-long """Read File. Read the contents of a file. For read operations, range requests are supported. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -1653,7 +1649,7 @@ def read( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param range: The HTTP Range request header specifies one or more byte ranges of the resource @@ -1832,14 +1828,13 @@ def get_properties( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Get Properties | Get Status | Get Access Control List. Get Properties returns all system and user defined properties for a path. Get Status returns all system defined properties for a path. Get Access Control List returns the access control list for a path. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -1847,7 +1842,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param action: Optional. If the value is "getStatus" only the system defined properties for the @@ -1950,6 +1945,22 @@ def get_properties( # pylint: disable=inconsistent-return-statements response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration")) response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state")) response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status")) + response_headers["x-ms-server-encrypted"] = self._deserialize( + "bool", response.headers.get("x-ms-server-encrypted") + ) + response_headers["x-ms-encryption-key-sha256"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-key-sha256") + ) + response_headers["x-ms-encryption-context"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-context") + ) + response_headers["x-ms-encryption-scope"] = self._deserialize( + "str", response.headers.get("x-ms-encryption-scope") + ) + response_headers["x-ms-creation-time"] = self._deserialize( + "rfc-1123", response.headers.get("x-ms-creation-time") + ) + response_headers["x-ms-expiry-time"] = self._deserialize("rfc-1123", response.headers.get("x-ms-expiry-time")) if cls: return cls(pipeline_response, None, response_headers) # type: ignore @@ -1966,12 +1977,11 @@ def delete( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Delete File | Delete Directory. Delete the file or directory. This operation supports conditional HTTP requests. For more information, see `Specifying Conditional Headers for Blob Service Operations - `_. + `_. :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -1979,7 +1989,7 @@ def delete( # pylint: disable=inconsistent-return-statements :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param recursive: Required. Default value is None. @@ -2090,12 +2100,11 @@ def set_access_control( # pylint: disable=inconsistent-return-statements modified_access_conditions: Optional[_models.ModifiedAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Set the owner, group, permissions, or access control list for a path. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param owner: Optional. The owner of the blob or directory. Default value is None. @@ -2209,7 +2218,6 @@ def set_access_control_recursive( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> _models.SetAccessControlRecursiveResponse: - # pylint: disable=line-too-long """Set the access control list for a path and sub-paths. :param mode: Mode "set" sets POSIX access control rights on files and directories, "modify" @@ -2219,7 +2227,7 @@ def set_access_control_recursive( :type mode: str or ~azure.storage.filedatalake.models.PathSetAccessControlRecursiveMode :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param continuation: Optional. When deleting a directory, the number of paths that are deleted @@ -2329,12 +2337,11 @@ def flush_data( # pylint: disable=inconsistent-return-statements cpk_info: Optional[_models.CpkInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Set the owner, group, permissions, or access control list for a path. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param position: This parameter allows the caller to upload data in parallel and control the @@ -2528,7 +2535,6 @@ def append_data( # pylint: disable=inconsistent-return-statements cpk_info: Optional[_models.CpkInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Append data to the file. :param body: Initial data. Required. @@ -2544,7 +2550,7 @@ def append_data( # pylint: disable=inconsistent-return-statements :type position: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param content_length: Required for "Append Data" and "Flush Data". Must be 0 for "Flush @@ -2694,7 +2700,6 @@ def set_expiry( # pylint: disable=inconsistent-return-statements expires_on: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets the time a blob will expire and be deleted. :param expiry_options: Required. Indicates mode of the expiry time. Known values are: @@ -2702,7 +2707,7 @@ def set_expiry( # pylint: disable=inconsistent-return-statements :type expiry_options: str or ~azure.storage.filedatalake.models.PathExpiryOptions :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character @@ -2775,12 +2780,11 @@ def undelete( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Undelete a path that was previously soft deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :param undelete_source: Only for hierarchical namespace enabled accounts. Optional. The path of diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py index f0baeb32c2d0..0312c17bbd3f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -52,7 +53,7 @@ def build_list_file_systems_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["account"] = kwargs.pop("resource", _params.pop("resource", "account")) - version: Literal["2025-01-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-01-05")) + version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -114,7 +115,6 @@ def list_file_systems( timeout: Optional[int] = None, **kwargs: Any ) -> Iterable["_models.FileSystem"]: - # pylint: disable=line-too-long """List FileSystems. List filesystems and their properties in given account. @@ -138,7 +138,7 @@ def list_file_systems( :type request_id_parameter: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations">Setting Timeouts for Blob Service Operations.`. Default value is None. :type timeout: int :return: An iterator like instance of either FileSystem or the result of cls(response) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client_helpers.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client_helpers.py index 645c4046b663..0ef3bad262f9 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client_helpers.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client_helpers.py @@ -46,7 +46,7 @@ def _parse_url(account_url: str) -> "ParseResult": def _format_url(scheme: str, hostname: str, file_system_name: Union[str, bytes], path_name: str, query_str: str) -> str: if isinstance(file_system_name, str): file_system_name = file_system_name.encode('UTF-8') - return f"{scheme}://{hostname}/{quote(file_system_name)}/{quote(path_name, safe='~')}{query_str}" + return f"{scheme}://{hostname}/{quote(file_system_name)}/{quote(path_name, safe='~/')}{query_str}" def _create_path_options( diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py index 72c89ca8c2f9..3b462db8a484 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_serialize.py @@ -54,6 +54,7 @@ '2024-11-04', '2025-01-05', '2025-05-05', + '2025-07-05', ] # This list must be in chronological order! diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_upload_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_upload_helper.py index c9673f90f9dc..d9215a46cc5d 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_upload_helper.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_upload_helper.py @@ -53,6 +53,7 @@ def upload_datalake_file( modified_access_conditions = kwargs.pop('modified_access_conditions', None) chunk_size = kwargs.pop('chunk_size', 100 * 1024 * 1024) encryption_context = kwargs.pop('encryption_context', None) + progress_hook = kwargs.pop('progress_hook', None) if not overwrite: # if customers didn't specify access conditions, they cannot flush data to existing file @@ -95,6 +96,7 @@ def upload_datalake_file( stream=stream, max_concurrency=max_concurrency, validate_content=validate_content, + progress_hook=progress_hook, **kwargs ) else: @@ -106,6 +108,7 @@ def upload_datalake_file( max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, + progress_hook=progress_hook, **kwargs ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index 0c56f364cadf..e48cd606de26 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -473,7 +473,12 @@ async def upload_data( Defaults to 100*1024*1024, or 100MB. :keyword str encryption_context: Specifies the encryption context to set on the file. - :return: A dictionary of response headers. + :keyword progress_hook: + A callback to track the progress of a long-running upload. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] + :returns: A dictionary of response headers. :rtype: Dict[str, Any] """ options = _upload_options( @@ -699,6 +704,11 @@ async def download_file( Maximum number of parallel connections to use when transferring the file in chunks. This option does not affect the underlying connection pool, and may require a separate configuration of the connection pool. + :keyword progress_hook: + A callback to track the progress of a long-running download. The signature is + function(current: int, total: int) where current is the number of bytes transferred + so far, and total is the total size of the download. + :paramtype progress_hook: Callable[[int, Optional[int]], Awaitable[None]] :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py index c51c9a0df3fb..ebef79ee1514 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_upload_helper.py @@ -53,6 +53,7 @@ async def upload_datalake_file( modified_access_conditions = kwargs.pop('modified_access_conditions', None) chunk_size = kwargs.pop('chunk_size', 100 * 1024 * 1024) encryption_context = kwargs.pop('encryption_context', None) + progress_hook = kwargs.pop('progress_hook', None) if not overwrite: # if customers didn't specify access conditions, they cannot flush data to existing file @@ -95,6 +96,7 @@ async def upload_datalake_file( stream=stream, max_concurrency=max_concurrency, validate_content=validate_content, + progress_hook=progress_hook, **kwargs ) else: @@ -106,6 +108,7 @@ async def upload_datalake_file( max_concurrency=max_concurrency, stream=stream, validate_content=validate_content, + progress_hook=progress_hook, **kwargs ) diff --git a/sdk/storage/azure-storage-file-datalake/swagger/README.md b/sdk/storage/azure-storage-file-datalake/swagger/README.md index 1d5a13243f99..c2e67f7507c0 100644 --- a/sdk/storage/azure-storage-file-datalake/swagger/README.md +++ b/sdk/storage/azure-storage-file-datalake/swagger/README.md @@ -16,7 +16,7 @@ autorest --v3 --python ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Azure.Storage.Files.DataLake/stable/2025-01-05/DataLakeStorage.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Azure.Storage.Files.DataLake/stable/2025-05-05/DataLakeStorage.json output-folder: ../azure/storage/filedatalake/_generated namespace: azure.storage.filedatalake no-namespace-folders: true diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file.py b/sdk/storage/azure-storage-file-datalake/tests/test_file.py index 8abd5731d0d4..e01aaa5bc854 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file.py @@ -37,7 +37,7 @@ from devtools_testutils import recorded_by_proxy from devtools_testutils.storage import StorageRecordedTestCase from settings.testcase import DataLakePreparer -from test_helpers import MockStorageTransport +from test_helpers import MockStorageTransport, ProgressTracker # ------------------------------------------------------------------------------ @@ -1692,6 +1692,33 @@ def test_mock_transport_with_content_validation(self, **kwargs): file_data = file_client.download_file(validate_content=True).read() assert file_data == b"Hello World!" # data is fixed by mock transport + @DataLakePreparer() + @recorded_by_proxy + def test_progress_hook_upload_data(self, **kwargs): + datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") + datalake_storage_account_key = kwargs.pop("datalake_storage_account_key") + + # Arrange + self._setUp(datalake_storage_account_name, datalake_storage_account_key) + file_client = self._create_file_and_return_client( + directory=self._get_directory_reference(), + file=self._get_file_reference() + ) + data = self.get_random_bytes(8 * 1024) + progress = ProgressTracker(len(data), 1024) + + # Act + file_client.upload_data( + data, + overwrite=True, + progress_hook=progress.assert_progress, + max_concurrency=1, + chunk_size=1024 + ) + + # Assert + progress.assert_complete() + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py index c6e83956a5be..29271e90e832 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py @@ -35,7 +35,7 @@ from devtools_testutils.aio import recorded_by_proxy_async from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase from settings.testcase import DataLakePreparer -from test_helpers_async import AsyncStream, MockStorageTransport +from test_helpers_async import AsyncStream, MockStorageTransport, ProgressTracker # ------------------------------------------------------------------------------ TEST_DIRECTORY_PREFIX = 'directory' @@ -1594,6 +1594,33 @@ async def test_mock_transport_with_content_validation(self, **kwargs): file_data = await (await file_client.download_file(validate_content=True)).read() assert file_data == b"Hello Async World!" # data is fixed by mock transport + @DataLakePreparer() + @recorded_by_proxy_async + async def test_progress_hook_upload_data(self, **kwargs): + datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") + datalake_storage_account_key = kwargs.pop("datalake_storage_account_key") + + # Arrange + await self._setUp(datalake_storage_account_name, datalake_storage_account_key) + file_client = await self._create_file_and_return_client( + directory=self._get_directory_reference(), + file=self._get_file_reference() + ) + data = self.get_random_bytes(8 * 1024) + progress = ProgressTracker(len(data), 1024) + + # Act + await file_client.upload_data( + data, + overwrite=True, + progress_hook=progress.assert_progress, + max_concurrency=1, + chunk_size=1024 + ) + + # Assert + progress.assert_complete() + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py index fe4da0ab2252..d37d41b6e666 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_system.py @@ -1127,6 +1127,28 @@ def test_bad_audience_service_client(self, **kwargs): fsc.exists() fsc.create_directory('testdir22') + @DataLakePreparer() + @recorded_by_proxy + def test_get_and_set_access_control_oauth(self, **kwargs): + datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") + + # Arrange + token_credential = self.get_credential(DataLakeServiceClient) + dsc = DataLakeServiceClient( + self.account_url(datalake_storage_account_name, 'dfs'), + token_credential + ) + file_system = dsc.create_file_system(self.get_resource_name(TEST_FILE_SYSTEM_PREFIX)) + directory_client = file_system._get_root_directory_client() + + # Act + acl = 'user::rwx,group::r-x,other::rwx' + directory_client.set_access_control(acl=acl) + access_control = directory_client.get_access_control() + + # Assert + assert acl == access_control['acl'] + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py index 86165ec18d0b..5185e15b4a9c 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_system_async.py @@ -1257,6 +1257,28 @@ async def test_bad_audience_service_client(self, **kwargs): await fsc.exists() await fsc.create_directory('testdir22') + @DataLakePreparer() + @recorded_by_proxy_async + async def test_get_and_set_access_control_oauth(self, **kwargs): + datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") + + # Arrange + token_credential = self.get_credential(DataLakeServiceClient, is_async=True) + dsc = DataLakeServiceClient( + self.account_url(datalake_storage_account_name, 'dfs'), + token_credential + ) + file_system = await dsc.create_file_system(self.get_resource_name(TEST_FILE_SYSTEM_PREFIX)) + directory_client = file_system._get_root_directory_client() + + # Act + acl = 'user::rwx,group::r-x,other::rwx' + await directory_client.set_access_control(acl=acl) + access_control = await directory_client.get_access_control() + + # Assert + assert acl == access_control['acl'] + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_helpers.py b/sdk/storage/azure-storage-file-datalake/tests/test_helpers.py index adb32eb9a290..377e1081c9e4 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_helpers.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_helpers.py @@ -4,7 +4,7 @@ # license information. # -------------------------------------------------------------------------- -from typing import Any, Dict +from typing import Any, Dict, Optional from typing_extensions import Self from urllib.parse import urlparse @@ -14,6 +14,24 @@ from urllib3 import HTTPResponse +class ProgressTracker: + def __init__(self, total: int, step: int): + self.total = total + self.step = step + self.current = 0 + + def assert_progress(self, current: int, total: Optional[int]): + if self.current != self.total: + self.current += self.step + + if total: + assert self.total == total + assert self.current == current + + def assert_complete(self): + assert self.total == self.current + + class MockHttpClientResponse(Response): def __init__( self, url: str, diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_helpers_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_helpers_async.py index b0b7720bfc15..8b0185e6eb85 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_helpers_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_helpers_async.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from typing import Any, Dict +from typing import Any, Dict, Optional from urllib.parse import urlparse from azure.core.pipeline.transport import AioHttpTransportResponse, AsyncHttpTransport @@ -11,6 +11,24 @@ from aiohttp import ClientResponse +class ProgressTracker: + def __init__(self, total: int, step: int): + self.total = total + self.step = step + self.current = 0 + + async def assert_progress(self, current: int, total: Optional[int]): + if self.current != self.total: + self.current += self.step + + if total: + assert self.total == total + assert self.current == current + + def assert_complete(self): + assert self.total == self.current + + class AsyncStream: def __init__(self, data: bytes): self._data = data diff --git a/sdk/storage/azure-storage-file-share/CHANGELOG.md b/sdk/storage/azure-storage-file-share/CHANGELOG.md index 3d51f48626e2..b452e547cc30 100644 --- a/sdk/storage/azure-storage-file-share/CHANGELOG.md +++ b/sdk/storage/azure-storage-file-share/CHANGELOG.md @@ -3,6 +3,9 @@ ## 12.22.0b1 (Unreleased) ### Features Added +- Added support for service version 2025-07-05. +- Added new `create_symbolic_link` and `get_symbolic_link` APIs to `ShareFileClient` to +create/get symbolic links to specified files and is only supported for the `NFS` protocol. ## 12.21.0 (2025-03-11) diff --git a/sdk/storage/azure-storage-file-share/assets.json b/sdk/storage/azure-storage-file-share/assets.json index e23532bd7b28..6d9937df9054 100644 --- a/sdk/storage/azure-storage-file-share/assets.json +++ b/sdk/storage/azure-storage-file-share/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-file-share", - "Tag": "python/storage/azure-storage-file-share_adb38dd356" + "Tag": "python/storage/azure-storage-file-share_1350cebb03" } diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py index e2fc4f119de2..3ecf241b4044 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py @@ -1737,3 +1737,84 @@ def create_hardlink( )) except HttpResponseError as error: process_storage_error(error) + + @distributed_trace + def create_symlink( + self, target: str, + *, + metadata: Optional[Dict[str, str]] = None, + file_creation_time: Optional[Union[str, datetime]] = None, + file_last_write_time: Optional[Union[str, datetime]] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + lease: Optional[Union[ShareLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: + """NFS only. Creates a symbolic link to the specified file. + + :param str target: + Specifies the file path the symbolic link will point to. The file path can be either relative or absolute. + :keyword dict[str, str] metadata: + Name-value pairs associated with the file as metadata. + :keyword file_creation_time: Creation time for the file. + :paramtype file_creation_time: str or ~datetime.datetime + :keyword file_last_write_time: Last write time for the file. + :paramtype file_last_write_time: str or ~datetime.datetime + :keyword str owner: The owner of the file. + :keyword str group: The owning group of the file. + :keyword lease: + Required if the file has an active lease. Value can be a ShareLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.fileshare.ShareLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. + This value is not tracked or validated on the client. To configure client-side network timeouts + see `here `__. + :returns: File-updated property dict (ETag and last modified). + :rtype: dict[str, Any] + """ + try: + return cast(Dict[str, Any], self._client.file.create_symbolic_link( + link_text=target, + metadata=metadata, + file_creation_time=file_creation_time, + file_last_write_time=file_last_write_time, + owner=owner, + group=group, + lease_access_conditions=lease, + timeout=timeout, + cls=return_response_headers, + **kwargs + )) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace + def get_symlink( + self, + *, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: + """NFS only. Gets the symbolic link for the file client. + + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. + This value is not tracked or validated on the client. To configure client-side network timeouts + see `here `__. + :returns: File-updated property dict (ETag and last modified). + :rtype: dict[str, Any] + """ + try: + return cast(Dict[str, Any], self._client.file.get_symbolic_link( + timeout=timeout, + cls=return_response_headers, + **kwargs + )) + except HttpResponseError as error: + process_storage_error(error) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py index a066e16a64dd..7a0232de5ddc 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/_serialization.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. @@ -411,7 +411,7 @@ def from_dict( :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) @@ -1361,7 +1361,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument # Iter and wrapped, should have found one node only (the wrap one) if len(children) != 1: raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( xml_name ) ) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py index 83a849858ad9..e7da0373f8d8 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -82,12 +82,11 @@ async def create( file_mode: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a new directory under the specified share or parent directory. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -222,7 +221,6 @@ async def create( async def get_properties( self, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns all system properties for the specified directory, and can also be used to check the existence of a directory. The data returned does not include the files in the directory or any subdirectories. @@ -232,7 +230,7 @@ async def get_properties( :type sharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -315,13 +313,12 @@ async def get_properties( @distributed_trace_async async def delete(self, timeout: Optional[int] = None, **kwargs: Any) -> None: - # pylint: disable=line-too-long """Removes the specified empty directory. Note that the directory must be empty before it can be deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -390,12 +387,11 @@ async def set_properties( file_mode: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets properties on the directory. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param file_permission: If specified the permission (security descriptor) shall be set for the @@ -527,12 +523,11 @@ async def set_properties( async def set_metadata( self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Updates user defined metadata for the specified directory. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -607,7 +602,6 @@ async def list_files_and_directories_segment( include_extended_info: Optional[bool] = None, **kwargs: Any ) -> _models.ListFilesAndDirectoriesSegmentResponse: - # pylint: disable=line-too-long """Returns a list of files or directories under the specified share or directory. It lists the contents only for a single level of the directory hierarchy. @@ -628,7 +622,7 @@ async def list_files_and_directories_segment( :type maxresults: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param include: Include this parameter to specify one or more datasets to include in the @@ -709,7 +703,6 @@ async def list_handles( recursive: Optional[bool] = None, **kwargs: Any ) -> _models.ListHandlesResponse: - # pylint: disable=line-too-long """Lists handles for directory. :param marker: A string value that identifies the portion of the list to be returned with the @@ -723,7 +716,7 @@ async def list_handles( :type maxresults: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -801,7 +794,6 @@ async def force_close_handles( recursive: Optional[bool] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Closes all handles open for given directory. :param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk @@ -809,7 +801,7 @@ async def force_close_handles( :type handle_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param marker: A string value that identifies the portion of the list to be returned with the @@ -900,7 +892,6 @@ async def rename( copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Renames a directory. :param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in @@ -908,7 +899,7 @@ async def rename( :type rename_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param replace_if_exists: Optional. A boolean value for if the destination file already exists, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py index 93f91f246035..e6b6e33af658 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -100,14 +100,13 @@ async def create( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a new file or replaces a file. Note it only initializes the file with no content. :param file_content_length: Specifies the maximum size for the file, up to 4 TB. Required. :type file_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -281,12 +280,11 @@ async def download( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> AsyncIterator[bytes]: - # pylint: disable=line-too-long """Reads or downloads a file from the system, including its metadata and properties. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param range: Return file data only from the specified byte range. Default value is None. @@ -429,7 +427,6 @@ async def get_properties( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns all user-defined metadata, standard HTTP properties, and system properties for the file. It does not return the content of the file. @@ -438,7 +435,7 @@ async def get_properties( :type sharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -553,12 +550,11 @@ async def delete( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """removes the file from the storage account. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -636,12 +632,11 @@ async def set_http_headers( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets HTTP headers on the file. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param file_content_length: Resizes a file to the specified size. If the specified byte value @@ -809,12 +804,11 @@ async def set_metadata( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Updates user-defined metadata for the specified file. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -891,13 +885,12 @@ async def acquire_lease( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease File operation establishes and manages a lock on a file for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a @@ -977,7 +970,6 @@ async def acquire_lease( async def release_lease( self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease File operation establishes and manages a lock on a file for write and delete operations. @@ -985,7 +977,7 @@ async def release_lease( :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character @@ -1060,7 +1052,6 @@ async def change_lease( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease File operation establishes and manages a lock on a file for write and delete operations. @@ -1068,7 +1059,7 @@ async def change_lease( :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns @@ -1148,13 +1139,12 @@ async def break_lease( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease File operation establishes and manages a lock on a file for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character @@ -1242,7 +1232,6 @@ async def upload_range( optionalbody: Optional[IO[bytes]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Upload a range of bytes to a file. :param range: Specifies the range of bytes to be written. Both the start and end of the range @@ -1257,7 +1246,7 @@ async def upload_range( :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param file_range_write: Specify one of the following options: - Update: Writes the bytes @@ -1379,7 +1368,6 @@ async def upload_range_from_url( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Upload a range of bytes to a file where the contents are read from a URL. :param range: Writes data to the specified byte range in the file. Required. @@ -1398,7 +1386,7 @@ async def upload_range_from_url( :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param source_range: Bytes of source data in the specified range. Default value is None. @@ -1510,7 +1498,6 @@ async def get_range_list( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> _models.ShareFileRangeList: - # pylint: disable=line-too-long """Returns the list of valid ranges for a file. :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -1521,7 +1508,7 @@ async def get_range_list( :type prevsharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param range: Specifies the range of bytes over which to list ranges, inclusively. Default @@ -1620,7 +1607,6 @@ async def start_copy( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Copies a blob or file to a destination file within the storage account. :param copy_source: Specifies the URL of the source file or blob, up to 2 KB in length. To copy @@ -1633,7 +1619,7 @@ async def start_copy( :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -1778,7 +1764,6 @@ async def abort_copy( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Aborts a pending Copy File operation, and leaves a destination file with zero length and full metadata. @@ -1787,7 +1772,7 @@ async def abort_copy( :type copy_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -1861,7 +1846,6 @@ async def list_handles( sharesnapshot: Optional[str] = None, **kwargs: Any ) -> _models.ListHandlesResponse: - # pylint: disable=line-too-long """Lists handles for file. :param marker: A string value that identifies the portion of the list to be returned with the @@ -1875,7 +1859,7 @@ async def list_handles( :type maxresults: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -1948,7 +1932,6 @@ async def force_close_handles( sharesnapshot: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Closes all handles open for given file. :param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk @@ -1956,7 +1939,7 @@ async def force_close_handles( :type handle_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param marker: A string value that identifies the portion of the list to be returned with the @@ -2044,7 +2027,6 @@ async def rename( file_http_headers: Optional[_models.FileHTTPHeaders] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Renames a file. :param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in @@ -2052,7 +2034,7 @@ async def rename( :type rename_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param replace_if_exists: Optional. A boolean value for if the destination file already exists, @@ -2217,7 +2199,6 @@ async def create_symbolic_link( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a symbolic link. :param link_text: NFS only. Required. The path to the original file, the symbolic link is @@ -2227,7 +2208,7 @@ async def create_symbolic_link( :type link_text: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -2339,12 +2320,11 @@ async def get_symbolic_link( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """get_symbolic_link. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -2420,7 +2400,6 @@ async def create_hard_link( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a hard link. :param target_file: NFS only. Required. Specifies the path of the target file to which the link @@ -2429,7 +2408,7 @@ async def create_hard_link( :type target_file: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py index 4814e22192fe..a128805b4b54 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -62,7 +63,6 @@ def __init__(self, *args, **kwargs) -> None: async def set_properties( self, storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets properties for a storage account's File service endpoint, including properties for Storage Analytics metrics and CORS (Cross-Origin Resource Sharing) rules. @@ -70,7 +70,7 @@ async def set_properties( :type storage_service_properties: ~azure.storage.fileshare.models.StorageServiceProperties :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -130,13 +130,12 @@ async def set_properties( @distributed_trace_async async def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _models.StorageServiceProperties: - # pylint: disable=line-too-long """Gets the properties of a storage account's File service, including properties for Storage Analytics metrics and CORS (Cross-Origin Resource Sharing) rules. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: StorageServiceProperties or the result of cls(response) @@ -203,7 +202,6 @@ async def list_shares_segment( timeout: Optional[int] = None, **kwargs: Any ) -> _models.ListSharesResponse: - # pylint: disable=line-too-long """The List Shares Segment operation returns a list of the shares and share snapshots under the specified account. @@ -224,7 +222,7 @@ async def list_shares_segment( :type include: list[str or ~azure.storage.fileshare.models.ListSharesIncludeType] :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: ListSharesResponse or the result of cls(response) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py index 373424ef3a61..5e46d6e1ad9c 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_share_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -91,13 +91,12 @@ async def create( share_provisioned_bandwidth_mibps: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a new share under the specified account. If the share with the same name already exists, the operation fails. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -219,7 +218,6 @@ async def get_properties( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns all user-defined metadata and system properties for the specified share or share snapshot. The data returned does not include the share's list of files. @@ -228,7 +226,7 @@ async def get_properties( :type sharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -354,7 +352,6 @@ async def delete( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Operation marks the specified share or share snapshot for deletion. The share or share snapshot and any files contained within it are later deleted during garbage collection. @@ -363,7 +360,7 @@ async def delete( :type sharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param delete_snapshots: Specifies the option include to delete the base share and all of its @@ -443,13 +440,12 @@ async def acquire_lease( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a @@ -539,7 +535,6 @@ async def release_lease( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. @@ -547,7 +542,7 @@ async def release_lease( :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -628,7 +623,6 @@ async def change_lease( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. @@ -636,7 +630,7 @@ async def change_lease( :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns @@ -722,7 +716,6 @@ async def renew_lease( request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. @@ -730,7 +723,7 @@ async def renew_lease( :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -812,13 +805,12 @@ async def break_lease( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before @@ -910,12 +902,11 @@ async def break_lease( async def create_snapshot( self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a read-only snapshot of a share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -985,14 +976,13 @@ async def create_permission( content_type: str = "application/json", **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create a permission (a security descriptor). :param share_permission: A permission (a security descriptor) at the share level. Required. :type share_permission: ~azure.storage.fileshare.models.SharePermission :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. @@ -1012,14 +1002,13 @@ async def create_permission( content_type: str = "application/json", **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create a permission (a security descriptor). :param share_permission: A permission (a security descriptor) at the share level. Required. :type share_permission: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :keyword content_type: Body Parameter content-type. Content type parameter for binary body. @@ -1034,7 +1023,6 @@ async def create_permission( async def create_permission( self, share_permission: Union[_models.SharePermission, IO[bytes]], timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create a permission (a security descriptor). :param share_permission: A permission (a security descriptor) at the share level. Is either a @@ -1042,7 +1030,7 @@ async def create_permission( :type share_permission: ~azure.storage.fileshare.models.SharePermission or IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -1119,7 +1107,6 @@ async def get_permission( timeout: Optional[int] = None, **kwargs: Any ) -> _models.SharePermission: - # pylint: disable=line-too-long """Returns the permission (security descriptor) for a given key. :param file_permission_key: Key of the permission to be set for the directory/file. Required. @@ -1133,7 +1120,7 @@ async def get_permission( :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: SharePermission or the result of cls(response) @@ -1209,12 +1196,11 @@ async def set_properties( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets properties for the specified share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param quota: Specifies the maximum size of the share, in gigabytes. Default value is None. @@ -1347,12 +1333,11 @@ async def set_metadata( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets one or more user-defined name-value pairs for the specified share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -1426,12 +1411,11 @@ async def get_access_policy( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> List[_models.SignedIdentifier]: - # pylint: disable=line-too-long """Returns information about stored access policies specified on the share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -1506,12 +1490,11 @@ async def set_access_policy( share_acl: Optional[List[_models.SignedIdentifier]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets a stored access policy for use with shared access signatures. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -1593,12 +1576,11 @@ async def get_statistics( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> _models.ShareStats: - # pylint: disable=line-too-long """Retrieves statistics related to the share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -1674,12 +1656,11 @@ async def restore( deleted_share_version: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Restores a previously deleted Share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py index 18e4eabd4fe4..a7dfa30a9b4b 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_directory_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -594,12 +594,11 @@ def create( # pylint: disable=inconsistent-return-statements file_mode: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a new directory under the specified share or parent directory. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -734,7 +733,6 @@ def create( # pylint: disable=inconsistent-return-statements def get_properties( # pylint: disable=inconsistent-return-statements self, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns all system properties for the specified directory, and can also be used to check the existence of a directory. The data returned does not include the files in the directory or any subdirectories. @@ -744,7 +742,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements :type sharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -829,13 +827,12 @@ def get_properties( # pylint: disable=inconsistent-return-statements def delete( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Removes the specified empty directory. Note that the directory must be empty before it can be deleted. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -904,12 +901,11 @@ def set_properties( # pylint: disable=inconsistent-return-statements file_mode: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets properties on the directory. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param file_permission: If specified the permission (security descriptor) shall be set for the @@ -1041,12 +1037,11 @@ def set_properties( # pylint: disable=inconsistent-return-statements def set_metadata( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Updates user defined metadata for the specified directory. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -1121,7 +1116,6 @@ def list_files_and_directories_segment( include_extended_info: Optional[bool] = None, **kwargs: Any ) -> _models.ListFilesAndDirectoriesSegmentResponse: - # pylint: disable=line-too-long """Returns a list of files or directories under the specified share or directory. It lists the contents only for a single level of the directory hierarchy. @@ -1142,7 +1136,7 @@ def list_files_and_directories_segment( :type maxresults: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param include: Include this parameter to specify one or more datasets to include in the @@ -1223,7 +1217,6 @@ def list_handles( recursive: Optional[bool] = None, **kwargs: Any ) -> _models.ListHandlesResponse: - # pylint: disable=line-too-long """Lists handles for directory. :param marker: A string value that identifies the portion of the list to be returned with the @@ -1237,7 +1230,7 @@ def list_handles( :type maxresults: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -1315,7 +1308,6 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements recursive: Optional[bool] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Closes all handles open for given directory. :param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk @@ -1323,7 +1315,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements :type handle_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param marker: A string value that identifies the portion of the list to be returned with the @@ -1414,7 +1406,6 @@ def rename( # pylint: disable=inconsistent-return-statements copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Renames a directory. :param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in @@ -1422,7 +1413,7 @@ def rename( # pylint: disable=inconsistent-return-statements :type rename_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param replace_if_exists: Optional. A boolean value for if the destination file already exists, diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py index d67f90d900f2..28202b426cfd 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -1337,14 +1337,13 @@ def create( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a new file or replaces a file. Note it only initializes the file with no content. :param file_content_length: Specifies the maximum size for the file, up to 4 TB. Required. :type file_content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -1518,12 +1517,11 @@ def download( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> Iterator[bytes]: - # pylint: disable=line-too-long """Reads or downloads a file from the system, including its metadata and properties. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param range: Return file data only from the specified byte range. Default value is None. @@ -1666,7 +1664,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns all user-defined metadata, standard HTTP properties, and system properties for the file. It does not return the content of the file. @@ -1675,7 +1672,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements :type sharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -1790,12 +1787,11 @@ def delete( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """removes the file from the storage account. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -1873,12 +1869,11 @@ def set_http_headers( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets HTTP headers on the file. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param file_content_length: Resizes a file to the specified size. If the specified byte value @@ -2046,12 +2041,11 @@ def set_metadata( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Updates user-defined metadata for the specified file. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -2128,13 +2122,12 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease File operation establishes and manages a lock on a file for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a @@ -2214,7 +2207,6 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements def release_lease( # pylint: disable=inconsistent-return-statements self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease File operation establishes and manages a lock on a file for write and delete operations. @@ -2222,7 +2214,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character @@ -2297,7 +2289,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease File operation establishes and manages a lock on a file for write and delete operations. @@ -2305,7 +2296,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns @@ -2385,13 +2376,12 @@ def break_lease( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """[Update] The Lease File operation establishes and manages a lock on a file for write and delete operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character @@ -2479,7 +2469,6 @@ def upload_range( # pylint: disable=inconsistent-return-statements optionalbody: Optional[IO[bytes]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Upload a range of bytes to a file. :param range: Specifies the range of bytes to be written. Both the start and end of the range @@ -2494,7 +2483,7 @@ def upload_range( # pylint: disable=inconsistent-return-statements :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param file_range_write: Specify one of the following options: - Update: Writes the bytes @@ -2616,7 +2605,6 @@ def upload_range_from_url( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Upload a range of bytes to a file where the contents are read from a URL. :param range: Writes data to the specified byte range in the file. Required. @@ -2635,7 +2623,7 @@ def upload_range_from_url( # pylint: disable=inconsistent-return-statements :type content_length: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param source_range: Bytes of source data in the specified range. Default value is None. @@ -2747,7 +2735,6 @@ def get_range_list( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> _models.ShareFileRangeList: - # pylint: disable=line-too-long """Returns the list of valid ranges for a file. :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -2758,7 +2745,7 @@ def get_range_list( :type prevsharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param range: Specifies the range of bytes over which to list ranges, inclusively. Default @@ -2857,7 +2844,6 @@ def start_copy( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Copies a blob or file to a destination file within the storage account. :param copy_source: Specifies the URL of the source file or blob, up to 2 KB in length. To copy @@ -2870,7 +2856,7 @@ def start_copy( # pylint: disable=inconsistent-return-statements :type copy_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -3015,7 +3001,6 @@ def abort_copy( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Aborts a pending Copy File operation, and leaves a destination file with zero length and full metadata. @@ -3024,7 +3009,7 @@ def abort_copy( # pylint: disable=inconsistent-return-statements :type copy_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -3098,7 +3083,6 @@ def list_handles( sharesnapshot: Optional[str] = None, **kwargs: Any ) -> _models.ListHandlesResponse: - # pylint: disable=line-too-long """Lists handles for file. :param marker: A string value that identifies the portion of the list to be returned with the @@ -3112,7 +3096,7 @@ def list_handles( :type maxresults: int :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -3185,7 +3169,6 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements sharesnapshot: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Closes all handles open for given file. :param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk @@ -3193,7 +3176,7 @@ def force_close_handles( # pylint: disable=inconsistent-return-statements :type handle_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param marker: A string value that identifies the portion of the list to be returned with the @@ -3281,7 +3264,6 @@ def rename( # pylint: disable=inconsistent-return-statements file_http_headers: Optional[_models.FileHTTPHeaders] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Renames a file. :param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in @@ -3289,7 +3271,7 @@ def rename( # pylint: disable=inconsistent-return-statements :type rename_source: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param replace_if_exists: Optional. A boolean value for if the destination file already exists, @@ -3454,7 +3436,6 @@ def create_symbolic_link( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a symbolic link. :param link_text: NFS only. Required. The path to the original file, the symbolic link is @@ -3464,7 +3445,7 @@ def create_symbolic_link( # pylint: disable=inconsistent-return-statements :type link_text: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -3576,12 +3557,11 @@ def get_symbolic_link( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """get_symbolic_link. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -3657,7 +3637,6 @@ def create_hard_link( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a hard link. :param target_file: NFS only. Required. Specifies the path of the target file to which the link @@ -3666,7 +3645,7 @@ def create_hard_link( # pylint: disable=inconsistent-return-statements :type target_file: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py index 57e5b2462f94..6654ac5d495c 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_service_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -188,7 +189,6 @@ def __init__(self, *args, **kwargs): def set_properties( # pylint: disable=inconsistent-return-statements self, storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets properties for a storage account's File service endpoint, including properties for Storage Analytics metrics and CORS (Cross-Origin Resource Sharing) rules. @@ -196,7 +196,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements :type storage_service_properties: ~azure.storage.fileshare.models.StorageServiceProperties :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -256,13 +256,12 @@ def set_properties( # pylint: disable=inconsistent-return-statements @distributed_trace def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _models.StorageServiceProperties: - # pylint: disable=line-too-long """Gets the properties of a storage account's File service, including properties for Storage Analytics metrics and CORS (Cross-Origin Resource Sharing) rules. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: StorageServiceProperties or the result of cls(response) @@ -329,7 +328,6 @@ def list_shares_segment( timeout: Optional[int] = None, **kwargs: Any ) -> _models.ListSharesResponse: - # pylint: disable=line-too-long """The List Shares Segment operation returns a list of the shares and share snapshots under the specified account. @@ -350,7 +348,7 @@ def list_shares_segment( :type include: list[str or ~azure.storage.fileshare.models.ListSharesIncludeType] :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: ListSharesResponse or the result of cls(response) diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py index 920a64e28520..d5b0e4bce723 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_share_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -921,13 +921,12 @@ def create( # pylint: disable=inconsistent-return-statements share_provisioned_bandwidth_mibps: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a new share under the specified account. If the share with the same name already exists, the operation fails. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -1049,7 +1048,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Returns all user-defined metadata and system properties for the specified share or share snapshot. The data returned does not include the share's list of files. @@ -1058,7 +1056,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements :type sharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -1184,7 +1182,6 @@ def delete( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Operation marks the specified share or share snapshot for deletion. The share or share snapshot and any files contained within it are later deleted during garbage collection. @@ -1193,7 +1190,7 @@ def delete( # pylint: disable=inconsistent-return-statements :type sharesnapshot: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param delete_snapshots: Specifies the option include to delete the base share and all of its @@ -1273,13 +1270,12 @@ def acquire_lease( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a @@ -1369,7 +1365,6 @@ def release_lease( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. @@ -1377,7 +1372,7 @@ def release_lease( # pylint: disable=inconsistent-return-statements :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -1458,7 +1453,6 @@ def change_lease( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. @@ -1466,7 +1460,7 @@ def change_lease( # pylint: disable=inconsistent-return-statements :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns @@ -1552,7 +1546,6 @@ def renew_lease( # pylint: disable=inconsistent-return-statements request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. @@ -1560,7 +1553,7 @@ def renew_lease( # pylint: disable=inconsistent-return-statements :type lease_id: str :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present, @@ -1642,13 +1635,12 @@ def break_lease( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot for set and delete share operations. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param break_period: For a break operation, proposed duration the lease should continue before @@ -1740,12 +1732,11 @@ def break_lease( # pylint: disable=inconsistent-return-statements def create_snapshot( # pylint: disable=inconsistent-return-statements self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Creates a read-only snapshot of a share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -1815,14 +1806,13 @@ def create_permission( content_type: str = "application/json", **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create a permission (a security descriptor). :param share_permission: A permission (a security descriptor) at the share level. Required. :type share_permission: ~azure.storage.fileshare.models.SharePermission :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. @@ -1842,14 +1832,13 @@ def create_permission( content_type: str = "application/json", **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create a permission (a security descriptor). :param share_permission: A permission (a security descriptor) at the share level. Required. :type share_permission: IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :keyword content_type: Body Parameter content-type. Content type parameter for binary body. @@ -1864,7 +1853,6 @@ def create_permission( def create_permission( # pylint: disable=inconsistent-return-statements self, share_permission: Union[_models.SharePermission, IO[bytes]], timeout: Optional[int] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Create a permission (a security descriptor). :param share_permission: A permission (a security descriptor) at the share level. Is either a @@ -1872,7 +1860,7 @@ def create_permission( # pylint: disable=inconsistent-return-statements :type share_permission: ~azure.storage.fileshare.models.SharePermission or IO[bytes] :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: None or the result of cls(response) @@ -1949,7 +1937,6 @@ def get_permission( timeout: Optional[int] = None, **kwargs: Any ) -> _models.SharePermission: - # pylint: disable=line-too-long """Returns the permission (security descriptor) for a given key. :param file_permission_key: Key of the permission to be set for the directory/file. Required. @@ -1963,7 +1950,7 @@ def get_permission( :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :return: SharePermission or the result of cls(response) @@ -2039,12 +2026,11 @@ def set_properties( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets properties for the specified share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param quota: Specifies the maximum size of the share, in gigabytes. Default value is None. @@ -2177,12 +2163,11 @@ def set_metadata( # pylint: disable=inconsistent-return-statements lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets one or more user-defined name-value pairs for the specified share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param metadata: A name-value pair to associate with a file storage object. Default value is @@ -2256,12 +2241,11 @@ def get_access_policy( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> List[_models.SignedIdentifier]: - # pylint: disable=line-too-long """Returns information about stored access policies specified on the share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -2336,12 +2320,11 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements share_acl: Optional[List[_models.SignedIdentifier]] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Sets a stored access policy for use with shared access signatures. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -2423,12 +2406,11 @@ def get_statistics( lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, **kwargs: Any ) -> _models.ShareStats: - # pylint: disable=line-too-long """Retrieves statistics related to the share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param lease_access_conditions: Parameter group. Default value is None. @@ -2504,12 +2486,11 @@ def restore( # pylint: disable=inconsistent-return-statements deleted_share_version: Optional[str] = None, **kwargs: Any ) -> None: - # pylint: disable=line-too-long """Restores a previously deleted Share. :param timeout: The timeout parameter is expressed in seconds. For more information, see :code:`Setting + href="https://learn.microsoft.com/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations">Setting Timeouts for File Service Operations.`. Default value is None. :type timeout: int :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py index 4f98931590cb..a936badfe37a 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_serialize.py @@ -48,6 +48,7 @@ '2024-11-04', '2025-01-05', '2025-05-05', + '2025-07-05', ] diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py index 6272949bf9c4..6b4ddc0cf8af 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py @@ -1738,3 +1738,84 @@ async def create_hardlink( )) except HttpResponseError as error: process_storage_error(error) + + @distributed_trace_async + async def create_symlink( + self, target: str, + *, + metadata: Optional[Dict[str, str]] = None, + file_creation_time: Optional[Union[str, datetime]] = None, + file_last_write_time: Optional[Union[str, datetime]] = None, + owner: Optional[str] = None, + group: Optional[str] = None, + lease: Optional[Union[ShareLeaseClient, str]] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: + """NFS only. Creates a symbolic link to the specified file. + + :param str target: + Specifies the file path the symbolic link will point to. The file path can be either relative or absolute. + :keyword dict[str, str] metadata: + Name-value pairs associated with the file as metadata. + :keyword file_creation_time: Creation time for the file. + :paramtype file_creation_time: str or ~datetime.datetime + :keyword file_last_write_time: Last write time for the file. + :paramtype file_last_write_time: str or ~datetime.datetime + :keyword str owner: The owner of the file. + :keyword str group: The owning group of the file. + :keyword lease: + Required if the file has an active lease. Value can be a ShareLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.fileshare.ShareLeaseClient or str + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `__. + :returns: File-updated property dict (ETag and last modified). + :rtype: dict[str, Any] + """ + try: + return cast(Dict[str, Any], await self._client.file.create_symbolic_link( + link_text=target, + metadata=metadata, + file_creation_time=file_creation_time, + file_last_write_time=file_last_write_time, + owner=owner, + group=group, + lease_access_conditions=lease, + timeout=timeout, + cls=return_response_headers, + **kwargs + )) + except HttpResponseError as error: + process_storage_error(error) + + @distributed_trace_async + async def get_symlink( + self, + *, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: + """NFS only. Gets the symbolic link for the file client. + + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. + This value is not tracked or validated on the client. To configure client-side network timeouts + see `here `__. + :returns: File-updated property dict (ETag and last modified). + :rtype: dict[str, Any] + """ + try: + return cast(Dict[str, Any], await self._client.file.get_symbolic_link( + timeout=timeout, + cls=return_response_headers, + **kwargs + )) + except HttpResponseError as error: + process_storage_error(error) diff --git a/sdk/storage/azure-storage-file-share/tests/test_nfs.py b/sdk/storage/azure-storage-file-share/tests/test_nfs.py index f47679a6cc97..55fa460202c1 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_nfs.py +++ b/sdk/storage/azure-storage-file-share/tests/test_nfs.py @@ -5,6 +5,7 @@ # -------------------------------------------------------------------------- import pytest from typing import Any, Dict, Optional, Union +from urllib.parse import unquote from azure.core.exceptions import ( ClientAuthenticationError, @@ -272,3 +273,69 @@ def test_create_hardlink_error(self, **kwargs: Any): hard_link_file_client.create_hardlink(target=f"{directory_name}/{source_file_name}") assert 'ParentNotFound' in e.value.args[0] + + @FileSharePreparer() + @recorded_by_proxy + def test_create_and_get_symlink(self, **kwargs): + premium_storage_file_account_name = kwargs.pop("premium_storage_file_account_name") + + self._setup(premium_storage_file_account_name) + + share_client = self.fsc.get_share_client(self.share_name) + directory_name = self._get_directory_name() + directory_client = share_client.create_directory(directory_name) + source_file_name = self._get_file_name('file1') + source_file_client = directory_client.get_file_client(source_file_name) + source_file_client.create_file(size=1024) + symbolic_link_file_name = self._get_file_name('file2') + symbolic_link_file_client = directory_client.get_file_client(symbolic_link_file_name) + metadata = {"test1": "foo", "test2": "bar"} + owner, group = "345", "123" + target = f"{directory_name}/{source_file_name}" + + resp = symbolic_link_file_client.create_symlink( + target=target, + metadata=metadata, + owner=owner, + group=group + ) + assert resp is not None + assert resp['file_file_type'] == 'SymLink' + assert resp['owner'] == owner + assert resp['group'] == group + assert resp['file_creation_time'] is not None + assert resp['file_last_write_time'] is not None + assert resp['file_id'] is not None + assert resp['file_parent_id'] is not None + assert 'file_attributes' not in resp + assert 'file_permission_key' not in resp + + resp = symbolic_link_file_client.get_symlink() + assert resp is not None + assert resp['etag'] is not None + assert resp['last_modified'] is not None + assert unquote(resp['link_text']) == target + + @FileSharePreparer() + @recorded_by_proxy + def test_create_and_get_symlink_error(self, **kwargs): + premium_storage_file_account_name = kwargs.pop("premium_storage_file_account_name") + + self._setup(premium_storage_file_account_name) + + share_client = self.fsc.get_share_client(self.share_name) + directory_name = self._get_directory_name() + directory_client = share_client.get_directory_client(directory_name) + source_file_name = self._get_file_name('file1') + source_file_client = directory_client.get_file_client(source_file_name) + symbolic_link_file_name = self._get_file_name('file2') + symbolic_link_file_client = directory_client.get_file_client(symbolic_link_file_name) + target = f"{directory_name}/{source_file_name}" + + with pytest.raises(ResourceNotFoundError) as e: + symbolic_link_file_client.create_symlink(target=target) + assert 'ParentNotFound' in e.value.args[0] + + with pytest.raises(ResourceNotFoundError) as e: + symbolic_link_file_client.get_symlink() + assert 'ParentNotFound' in e.value.args[0] diff --git a/sdk/storage/azure-storage-file-share/tests/test_nfs_async.py b/sdk/storage/azure-storage-file-share/tests/test_nfs_async.py index 07d8c36c88b5..4d68bac26e4f 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_nfs_async.py +++ b/sdk/storage/azure-storage-file-share/tests/test_nfs_async.py @@ -5,6 +5,7 @@ # -------------------------------------------------------------------------- import pytest from typing import Any, Dict, Optional, Union +from urllib.parse import unquote from azure.core.exceptions import ( ClientAuthenticationError, @@ -284,3 +285,69 @@ async def test_create_hardlink_error(self, **kwargs: Any): await hard_link_file_client.create_hardlink(target=f"{directory_name}/{source_file_name}") assert 'ParentNotFound' in e.value.args[0] + + @FileSharePreparer() + @recorded_by_proxy_async + async def test_create_and_get_symlink(self, **kwargs): + premium_storage_file_account_name = kwargs.pop("premium_storage_file_account_name") + + await self._setup(premium_storage_file_account_name) + + share_client = self.fsc.get_share_client(self.share_name) + directory_name = self._get_directory_name() + directory_client = await share_client.create_directory(directory_name) + source_file_name = self._get_file_name('file1') + source_file_client = directory_client.get_file_client(source_file_name) + await source_file_client.create_file(size=1024) + symbolic_link_file_name = self._get_file_name('file2') + symbolic_link_file_client = directory_client.get_file_client(symbolic_link_file_name) + metadata = {"test1": "foo", "test2": "bar"} + owner, group = "345", "123" + target = f"{directory_name}/{source_file_name}" + + resp = await symbolic_link_file_client.create_symlink( + target=target, + metadata=metadata, + owner=owner, + group=group + ) + assert resp is not None + assert resp['file_file_type'] == 'SymLink' + assert resp['owner'] == owner + assert resp['group'] == group + assert resp['file_creation_time'] is not None + assert resp['file_last_write_time'] is not None + assert resp['file_id'] is not None + assert resp['file_parent_id'] is not None + assert 'file_attributes' not in resp + assert 'file_permission_key' not in resp + + resp = await symbolic_link_file_client.get_symlink() + assert resp is not None + assert resp['etag'] is not None + assert resp['last_modified'] is not None + assert unquote(resp['link_text']) == target + + @FileSharePreparer() + @recorded_by_proxy_async + async def test_create_and_get_symlink_error(self, **kwargs): + premium_storage_file_account_name = kwargs.pop("premium_storage_file_account_name") + + await self._setup(premium_storage_file_account_name) + + share_client = self.fsc.get_share_client(self.share_name) + directory_name = self._get_directory_name() + directory_client = share_client.get_directory_client(directory_name) + source_file_name = self._get_file_name('file1') + source_file_client = directory_client.get_file_client(source_file_name) + symbolic_link_file_name = self._get_file_name('file2') + symbolic_link_file_client = directory_client.get_file_client(symbolic_link_file_name) + target = f"{directory_name}/{source_file_name}" + + with pytest.raises(ResourceNotFoundError) as e: + await symbolic_link_file_client.create_symlink(target=target) + assert 'ParentNotFound' in e.value.args[0] + + with pytest.raises(ResourceNotFoundError) as e: + await symbolic_link_file_client.get_symlink() + assert 'ParentNotFound' in e.value.args[0] diff --git a/sdk/storage/azure-storage-queue/CHANGELOG.md b/sdk/storage/azure-storage-queue/CHANGELOG.md index 138c589cb226..24ef078bd6c7 100644 --- a/sdk/storage/azure-storage-queue/CHANGELOG.md +++ b/sdk/storage/azure-storage-queue/CHANGELOG.md @@ -1,8 +1,15 @@ # Release History -## 12.13.0b1 (Unreleased) +## 12.13.0b1 (2025-04-08) ### Features Added +- Added support for service version 2025-07-05. +- Added support for OAuth authentication in `QueueClient`'s `get_queue_access_policy` +and `set_queue_access_policy` APIs. + +### Other Changes +- Migrated any previously documented `kwarg` arguments to be named keywords. +Some public types may have been adjusted if they were previously erroneous or incomplete. ## 12.12.0 (2024-09-17) diff --git a/sdk/storage/azure-storage-queue/assets.json b/sdk/storage/azure-storage-queue/assets.json index 9433ba2740aa..f14811cd867b 100644 --- a/sdk/storage/azure-storage-queue/assets.json +++ b/sdk/storage/azure-storage-queue/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-queue", - "Tag": "python/storage/azure-storage-queue_8161cc758c" + "Tag": "python/storage/azure-storage-queue_e30eb0be6d" } diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/__init__.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/__init__.py index afd183c536ce..a743737977f3 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/__init__.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/__init__.py @@ -5,12 +5,18 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._azure_queue_storage import AzureQueueStorage +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._azure_queue_storage import AzureQueueStorage # type: ignore try: from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk @@ -18,6 +24,6 @@ __all__ = [ "AzureQueueStorage", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_configuration.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_configuration.py index c6edbf39a908..4d3d299adffa 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_configuration.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_configuration.py @@ -13,7 +13,7 @@ VERSION = "unknown" -class AzureQueueStorageConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long +class AzureQueueStorageConfiguration: # pylint: disable=too-many-instance-attributes """Configuration for AzureQueueStorage. Note that all parameters used to create this instance are saved as instance diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_serialization.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_serialization.py index 8139854b97bb..7a0232de5ddc 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_serialization.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_serialization.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines # -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. @@ -24,7 +25,6 @@ # # -------------------------------------------------------------------------- -# pylint: skip-file # pyright: reportUnnecessaryTypeIgnoreComment=false from base64 import b64decode, b64encode @@ -48,11 +48,8 @@ IO, Mapping, Callable, - TypeVar, MutableMapping, - Type, List, - Mapping, ) try: @@ -62,13 +59,13 @@ import xml.etree.ElementTree as ET import isodate # type: ignore +from typing_extensions import Self from azure.core.exceptions import DeserializationError, SerializationError from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") -ModelType = TypeVar("ModelType", bound="Model") JSON = MutableMapping[str, Any] @@ -91,6 +88,8 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. + :return: The deserialized data. + :rtype: object """ if hasattr(data, "read"): # Assume a stream @@ -112,7 +111,7 @@ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: try: return json.loads(data_as_str) except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) + raise DeserializationError("JSON is invalid: {}".format(err), err) from err elif "xml" in (content_type or []): try: @@ -155,6 +154,11 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object """ # Try to use content-type from headers if available content_type = None @@ -179,80 +183,31 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], except NameError: _long_type = int - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0.""" - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation.""" - return "Z" - - def dst(self, dt): - """No daylight saving for UTC.""" - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset): - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore +TZ_UTC = datetime.timezone.utc _FLATTEN = re.compile(r"(? None: self.additional_properties: Optional[Dict[str, Any]] = {} - for k in kwargs: + for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) elif k in self._validation and self._validation[k].get("readonly", False): @@ -300,13 +262,23 @@ def __init__(self, **kwargs: Any) -> None: setattr(self, k, kwargs[k]) def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return False def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ return not self.__eq__(other) def __str__(self) -> str: @@ -326,7 +298,11 @@ def is_xml_model(cls) -> bool: @classmethod def _create_xml_node(cls): - """Create XML node.""" + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ try: xml_map = cls._xml_map # type: ignore except AttributeError: @@ -346,7 +322,9 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) def as_dict( self, @@ -380,12 +358,15 @@ def my_key_transformer(key, attr_desc, value): If you want XML serialization, you can pass the kwargs is_xml=True. + :param bool keep_readonly: If you want to serialize the readonly attributes :param function key_transformer: A key transformer function. :returns: A dict JSON compatible object :rtype: dict """ serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) @classmethod def _infer_class_models(cls): @@ -395,30 +376,31 @@ def _infer_class_models(cls): client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} if cls.__name__ not in client_models: raise ValueError("Not Autorest generated code") - except Exception: + except Exception: # pylint: disable=broad-exception-caught # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. client_models = {cls.__name__: cls} return client_models @classmethod - def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def from_dict( - cls: Type[ModelType], + cls, data: Any, key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, - ) -> ModelType: + ) -> Self: """Parse a dict using given key extractor return a model. By default consider key @@ -426,9 +408,11 @@ def from_dict( and last_rest_key_case_insensitive_extractor) :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -448,21 +432,25 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access return result @classmethod def _classify(cls, response, objects): """Check the class _subtype_map for any child classes. We want to ignore any inherited _subtype_maps. - Remove the polymorphic key from the initial data. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class """ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): subtype_value = None if not isinstance(response, ET.Element): rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) else: subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) if subtype_value: @@ -501,11 +489,13 @@ def _decode_attribute_map_key(key): inside the received data. :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str """ return key.replace("\\.", ".") -class Serializer(object): +class Serializer: # pylint: disable=too-many-public-methods """Request object model serializer.""" basic_types = {str: "str", int: "int", bool: "bool", float: "float"} @@ -540,7 +530,7 @@ class Serializer(object): "multiple": lambda x, y: x % y != 0, } - def __init__(self, classes: Optional[Mapping[str, type]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.serialize_type = { "iso-8601": Serializer.serialize_iso, "rfc-1123": Serializer.serialize_rfc, @@ -560,13 +550,16 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None): self.key_transformer = full_restapi_key_transformer self.client_side_validation = True - def _serialize(self, target_obj, data_type=None, **kwargs): + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): """Serialize data into a string according to type. - :param target_obj: The data to be serialized. + :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict - :raises: SerializationError if serialization fails. + :raises SerializationError: if serialization fails. + :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) keep_readonly = kwargs.get("keep_readonly", False) @@ -592,12 +585,14 @@ def _serialize(self, target_obj, data_type=None, **kwargs): serialized = {} if is_xml_model_serialization: - serialized = target_obj._create_xml_node() + serialized = target_obj._create_xml_node() # pylint: disable=protected-access try: - attributes = target_obj._attribute_map + attributes = target_obj._attribute_map # pylint: disable=protected-access for attr, attr_desc in attributes.items(): attr_name = attr - if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): continue if attr_name == "additional_properties" and attr_desc["key"] == "": @@ -633,7 +628,8 @@ def _serialize(self, target_obj, data_type=None, **kwargs): if isinstance(new_attr, list): serialized.extend(new_attr) # type: ignore elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. if "name" not in getattr(orig_attr, "_xml_map", {}): splitted_tag = new_attr.tag.split("}") if len(splitted_tag) == 2: # Namespace @@ -664,17 +660,17 @@ def _serialize(self, target_obj, data_type=None, **kwargs): except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) raise SerializationError(msg) from err - else: - return serialized + return serialized def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body """ # Just in case this is a dict @@ -703,7 +699,7 @@ def body(self, data, data_type, **kwargs): attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor, ] - data = deserializer._deserialize(data_type, data) + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access except DeserializationError as err: raise SerializationError("Unable to build a model: " + str(err)) from err @@ -712,11 +708,13 @@ def body(self, data, data_type, **kwargs): def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. - :param data: The data to be serialized. + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None """ try: output = self.serialize_data(data, data_type, **kwargs) @@ -728,21 +726,20 @@ def url(self, name, data, data_type, **kwargs): output = output.replace("{", quote("{")).replace("}", quote("}")) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return output + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output def query(self, name, data, data_type, **kwargs): """Serialize data intended for a URL query. - :param data: The data to be serialized. + :param str name: The name of the query parameter. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :keyword bool skip_quote: Whether to skip quote the serialized result. - Defaults to False. :rtype: str, list - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter """ try: # Treat the list aside, since we don't want to encode the div separator @@ -759,19 +756,20 @@ def query(self, name, data, data_type, **kwargs): output = str(output) else: output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. - :param data: The data to be serialized. + :param str name: The name of the header. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header """ try: if data_type in ["[str]"]: @@ -780,21 +778,20 @@ def header(self, name, data, data_type, **kwargs): output = self.serialize_data(data, data_type, **kwargs) if data_type == "bool": output = json.dumps(output) - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. - :param data: The data to be serialized. + :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :param bool required: Whether it's essential that the data not be - empty or None - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list """ if data is None: raise ValueError("No value for given attribute") @@ -805,7 +802,7 @@ def serialize_data(self, data, data_type, **kwargs): if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) - elif data_type in self.serialize_type: + if data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class @@ -821,11 +818,10 @@ def serialize_data(self, data, data_type, **kwargs): except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." raise SerializationError(msg.format(data, data_type)) from err - else: - return self._serialize(data, **kwargs) + return self._serialize(data, **kwargs) @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) if custom_serializer: return custom_serializer @@ -841,23 +837,26 @@ def serialize_basic(cls, data, data_type, **kwargs): - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - is_xml bool : If set, use xml_basic_types_serializers - :param data: Object to be serialized. + :param obj data: Object to be serialized. :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec + return eval(data_type)(data) # nosec # pylint: disable=eval-used @classmethod def serialize_unicode(cls, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. - :param data: Object to be serialized. + :param str data: Object to be serialized. :rtype: str + :return: serialized object """ try: # If I received an enum, return its value return data.value @@ -871,8 +870,7 @@ def serialize_unicode(cls, data): return data except NameError: return str(data) - else: - return str(data) + return str(data) def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. @@ -882,15 +880,13 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): serialization_ctxt['type'] should be same as data_type. - is_xml bool : If set, serialize as XML - :param list attr: Object to be serialized. + :param list data: Object to be serialized. :param str iter_type: Type of object in the iterable. - :param bool required: Whether the objects in the iterable must - not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. - :keyword bool do_quote: Whether to quote the serialized result of each iterable element. Defaults to False. :rtype: list, str + :return: serialized iterable """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") @@ -945,9 +941,8 @@ def serialize_dict(self, attr, dict_type, **kwargs): :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. - :param bool required: Whether the objects in the dictionary must - not be None or empty. :rtype: dict + :return: serialized dictionary """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} @@ -971,7 +966,7 @@ def serialize_dict(self, attr, dict_type, **kwargs): return serialized - def serialize_object(self, attr, **kwargs): + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Serialize a generic object. This will be handled as a dictionary. If object passed in is not a basic type (str, int, float, dict, list) it will simply be @@ -979,6 +974,7 @@ def serialize_object(self, attr, **kwargs): :param dict attr: Object to be serialized. :rtype: dict or str + :return: serialized object """ if attr is None: return None @@ -1003,7 +999,7 @@ def serialize_object(self, attr, **kwargs): return self.serialize_decimal(attr) # If it's a model or I know this dependency, serialize as a Model - elif obj_type in self.dependencies.values() or isinstance(attr, Model): + if obj_type in self.dependencies.values() or isinstance(attr, Model): return self._serialize(attr) if obj_type == dict: @@ -1034,56 +1030,61 @@ def serialize_enum(attr, enum_obj=None): try: enum_obj(result) # type: ignore return result - except ValueError: + except ValueError as exc: for enum_value in enum_obj: # type: ignore if enum_value.value.lower() == str(attr).lower(): return enum_value.value error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) + raise SerializationError(error.format(attr, enum_obj)) from exc @staticmethod - def serialize_bytearray(attr, **kwargs): + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument """Serialize bytearray into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ return b64encode(attr).decode() @staticmethod - def serialize_base64(attr, **kwargs): + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument """Serialize str into base-64 string. - :param attr: Object to be serialized. + :param str attr: Object to be serialized. :rtype: str + :return: serialized base64 """ encoded = b64encode(attr).decode("ascii") return encoded.strip("=").replace("+", "-").replace("/", "_") @staticmethod - def serialize_decimal(attr, **kwargs): + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument """Serialize Decimal object to float. - :param attr: Object to be serialized. + :param decimal attr: Object to be serialized. :rtype: float + :return: serialized decimal """ return float(attr) @staticmethod - def serialize_long(attr, **kwargs): + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument """Serialize long (Py2) or int (Py3). - :param attr: Object to be serialized. + :param int attr: Object to be serialized. :rtype: int/long + :return: serialized long """ return _long_type(attr) @staticmethod - def serialize_date(attr, **kwargs): + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str + :return: serialized date """ if isinstance(attr, str): attr = isodate.parse_date(attr) @@ -1091,11 +1092,12 @@ def serialize_date(attr, **kwargs): return t @staticmethod - def serialize_time(attr, **kwargs): + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument """Serialize Time object into ISO-8601 formatted string. :param datetime.time attr: Object to be serialized. :rtype: str + :return: serialized time """ if isinstance(attr, str): attr = isodate.parse_time(attr) @@ -1105,30 +1107,32 @@ def serialize_time(attr, **kwargs): return t @staticmethod - def serialize_duration(attr, **kwargs): + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str + :return: serialized duration """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr) @staticmethod - def serialize_rfc(attr, **kwargs): + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str - :raises: TypeError if format invalid. + :raises TypeError: if format invalid. + :return: serialized rfc """ try: if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() - except AttributeError: - raise TypeError("RFC1123 object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], @@ -1141,12 +1145,13 @@ def serialize_rfc(attr, **kwargs): ) @staticmethod - def serialize_iso(attr, **kwargs): + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str - :raises: SerializationError if format invalid. + :raises SerializationError: if format invalid. + :return: serialized iso """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) @@ -1172,13 +1177,14 @@ def serialize_iso(attr, **kwargs): raise TypeError(msg) from err @staticmethod - def serialize_unix(attr, **kwargs): + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument """Serialize Datetime object into IntTime format. This is represented as seconds. :param Datetime attr: Object to be serialized. :rtype: int - :raises: SerializationError if format invalid + :raises SerializationError: if format invalid + :return: serialied unix """ if isinstance(attr, int): return attr @@ -1186,11 +1192,11 @@ def serialize_unix(attr, **kwargs): if not attr.tzinfo: _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") return int(calendar.timegm(attr.utctimetuple())) - except AttributeError: - raise TypeError("Unix time object must be valid Datetime object.") + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc -def rest_key_extractor(attr, attr_desc, data): +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument key = attr_desc["key"] working_data = data @@ -1211,7 +1217,9 @@ def rest_key_extractor(attr, attr_desc, data): return working_data.get(key) -def rest_key_case_insensitive_extractor(attr, attr_desc, data): +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): key = attr_desc["key"] working_data = data @@ -1232,17 +1240,29 @@ def rest_key_case_insensitive_extractor(attr, attr_desc, data): return attribute_key_case_insensitive_extractor(key, None, working_data) -def last_rest_key_extractor(attr, attr_desc, data): - """Extract the attribute in "data" based on the last part of the JSON path key.""" +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) return attribute_key_extractor(dict_keys[-1], None, data) -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument """Extract the attribute in "data" based on the last part of the JSON path key. This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute """ key = attr_desc["key"] dict_keys = _FLATTEN.split(key) @@ -1279,7 +1299,7 @@ def _extract_name_from_internal_type(internal_type): return xml_name -def xml_key_extractor(attr, attr_desc, data): +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements if isinstance(data, dict): return None @@ -1331,22 +1351,21 @@ def xml_key_extractor(attr, attr_desc, data): if is_iter_type: if is_wrapped: return None # is_wrapped no node, we want None - else: - return [] # not wrapped, assume empty list + return [] # not wrapped, assume empty list return None # Assume it's not there, maybe an optional node. # If is_iter_type and not wrapped, return all found children if is_iter_type: if not is_wrapped: return children - else: # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( - xml_name - ) + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name ) - return list(children[0]) # Might be empty list and that's ok. + ) + return list(children[0]) # Might be empty list and that's ok. # Here it's not a itertype, we should have found one element only or empty if len(children) > 1: @@ -1354,7 +1373,7 @@ def xml_key_extractor(attr, attr_desc, data): return children[0] -class Deserializer(object): +class Deserializer: """Response object model deserializer. :param dict classes: Class type dictionary for deserializing complex types. @@ -1363,9 +1382,9 @@ class Deserializer(object): basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - def __init__(self, classes: Optional[Mapping[str, type]] = None): + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: self.deserialize_type = { "iso-8601": Deserializer.deserialize_iso, "rfc-1123": Deserializer.deserialize_rfc, @@ -1401,27 +1420,29 @@ def __call__(self, target_obj, response_data, content_type=None): :param str target_obj: Target data type to deserialize to. :param requests.Response response_data: REST response object. :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ data = self._unpack_content(response_data, content_type) return self._deserialize(target_obj, data) - def _deserialize(self, target_obj, data): + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] try: - for attr, mapconfig in data._attribute_map.items(): + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access if attr in constants: continue value = getattr(data, attr) @@ -1440,13 +1461,13 @@ def _deserialize(self, target_obj, data): if isinstance(response, str): return self.deserialize_data(data, response) - elif isinstance(response, type) and issubclass(response, Enum): + if isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) if data is None or data is CoreNull: return data try: - attributes = response._attribute_map # type: ignore + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... @@ -1476,9 +1497,8 @@ def _deserialize(self, target_obj, data): except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name # type: ignore raise DeserializationError(msg) from err - else: - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) def _build_additional_properties(self, attribute_map, data): if not self.additional_properties_detection: @@ -1505,6 +1525,8 @@ def _classify_target(self, target, data): :param str target: The target object type to deserialize to. :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple """ if target is None: return None, None @@ -1516,7 +1538,7 @@ def _classify_target(self, target, data): return target, target try: - target = target._classify(data, self.dependencies) # type: ignore + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ # type: ignore @@ -1531,10 +1553,12 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): :param str target_obj: The target object type to deserialize to. :param str/dict data: The response data to deserialize. :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object """ try: return self(target_obj, data, content_type=content_type) - except: + except: # pylint: disable=bare-except _LOGGER.debug( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) @@ -1552,10 +1576,12 @@ def _unpack_content(raw_data, content_type=None): If raw_data is something else, bypass all logic and return it directly. - :param raw_data: Data to be processed. - :param content_type: How to parse if raw_data is a string/bytes. + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. """ # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) @@ -1579,24 +1605,35 @@ def _unpack_content(raw_data, content_type=None): def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. - :param response: The response model class. - :param d_attrs: The deserialized response attributes. + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. """ if callable(response): subtype = getattr(response, "_subtype_map", {}) try: - readonly = [k for k, v in response._validation.items() if v.get("readonly")] - const = [k for k, v in response._validation.items() if v.get("constant")] + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: setattr(response_obj, attr, attrs.get(attr)) if additional_properties: - response_obj.additional_properties = additional_properties + response_obj.additional_properties = additional_properties # type: ignore return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) + raise DeserializationError(msg + str(err)) from err else: try: for attr, value in attrs.items(): @@ -1605,15 +1642,16 @@ def _instantiate_model(self, response, attrs, additional_properties=None): except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) + raise DeserializationError(msg) from exp - def deserialize_data(self, data, data_type): + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. + :rtype: object """ if data is None: return data @@ -1627,7 +1665,11 @@ def deserialize_data(self, data, data_type): if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data - is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) @@ -1647,14 +1689,14 @@ def deserialize_data(self, data, data_type): msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) raise DeserializationError(msg) from err - else: - return self._deserialize(obj_type, data) + return self._deserialize(obj_type, data) def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. :rtype: list """ if attr is None: @@ -1671,6 +1713,7 @@ def deserialize_dict(self, attr, dict_type): :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. :rtype: dict """ if isinstance(attr, list): @@ -1681,13 +1724,14 @@ def deserialize_dict(self, attr, dict_type): attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - def deserialize_object(self, attr, **kwargs): + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. :rtype: dict - :raises: TypeError if non-builtin datatype encountered. + :raises TypeError: if non-builtin datatype encountered. """ if attr is None: return None @@ -1720,11 +1764,10 @@ def deserialize_object(self, attr, **kwargs): pass return deserialized - else: - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) - def deserialize_basic(self, attr, data_type): + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements """Deserialize basic builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as @@ -1732,8 +1775,9 @@ def deserialize_basic(self, attr, data_type): :param str attr: response string to be deserialized. :param str data_type: deserialization data type. + :return: Deserialized basic type. :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. + :raises TypeError: if string format is not valid. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1743,24 +1787,23 @@ def deserialize_basic(self, attr, data_type): if data_type == "str": # None or '', node is empty string. return "" - else: - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None if data_type == "bool": if attr in [True, False, 1, 0]: return bool(attr) - elif isinstance(attr, str): + if isinstance(attr, str): if attr.lower() in ["true", "1"]: return True - elif attr.lower() in ["false", "0"]: + if attr.lower() in ["false", "0"]: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec + return eval(data_type)(attr) # nosec # pylint: disable=eval-used @staticmethod def deserialize_unicode(data): @@ -1768,6 +1811,7 @@ def deserialize_unicode(data): as a string. :param str data: response string to be deserialized. + :return: Deserialized string. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, @@ -1781,8 +1825,7 @@ def deserialize_unicode(data): return data except NameError: return str(data) - else: - return str(data) + return str(data) @staticmethod def deserialize_enum(data, enum_obj): @@ -1794,6 +1837,7 @@ def deserialize_enum(data, enum_obj): :param str data: Response string to be deserialized. If this value is None or invalid it will be returned as-is. :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. :rtype: Enum """ if isinstance(data, enum_obj) or data is None: @@ -1804,9 +1848,9 @@ def deserialize_enum(data, enum_obj): # Workaround. We might consider remove it in the future. try: return list(enum_obj.__members__.values())[data] - except IndexError: + except IndexError as exc: error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) + raise DeserializationError(error.format(data, enum_obj)) from exc try: return enum_obj(str(data)) except ValueError: @@ -1822,8 +1866,9 @@ def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. + :return: Deserialized bytearray :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1834,8 +1879,9 @@ def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. + :return: Deserialized base64 string :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1849,8 +1895,9 @@ def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. - :rtype: Decimal - :raises: DeserializationError if string format invalid. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal """ if isinstance(attr, ET.Element): attr = attr.text @@ -1865,8 +1912,9 @@ def deserialize_long(attr): """Deserialize string into long (Py2) or int (Py3). :param str attr: response string to be deserialized. + :return: Deserialized int :rtype: long or int - :raises: ValueError if string format invalid. + :raises ValueError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1877,8 +1925,9 @@ def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. + :return: Deserialized duration :rtype: TimeDelta - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1887,16 +1936,16 @@ def deserialize_duration(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise DeserializationError(msg) from err - else: - return duration + return duration @staticmethod def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. + :return: Deserialized date :rtype: Date - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1910,8 +1959,9 @@ def deserialize_time(attr): """Deserialize ISO-8601 formatted string into time object. :param str attr: response string to be deserialized. + :return: Deserialized time :rtype: datetime.time - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1924,31 +1974,32 @@ def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: parsed_date = email.utils.parsedate_tz(attr) # type: ignore date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) ) if not date_obj.tzinfo: date_obj = date_obj.astimezone(tz=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1976,8 +2027,7 @@ def deserialize_iso(attr): except (ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj @staticmethod def deserialize_unix(attr): @@ -1985,8 +2035,9 @@ def deserialize_unix(attr): This is represented as seconds. :param int attr: Object to be serialized. + :return: Deserialized datetime :rtype: Datetime - :raises: DeserializationError if format invalid + :raises DeserializationError: if format invalid """ if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore @@ -1996,5 +2047,4 @@ def deserialize_unix(attr): except ValueError as err: msg = "Cannot deserialize to unix datetime object." raise DeserializationError(msg) from err - else: - return date_obj + return date_obj diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/__init__.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/__init__.py index afd183c536ce..a743737977f3 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/__init__.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/__init__.py @@ -5,12 +5,18 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._azure_queue_storage import AzureQueueStorage +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._azure_queue_storage import AzureQueueStorage # type: ignore try: from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk @@ -18,6 +24,6 @@ __all__ = [ "AzureQueueStorage", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_configuration.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_configuration.py index e2fb1add7c77..079c42687e92 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_configuration.py @@ -13,7 +13,7 @@ VERSION = "unknown" -class AzureQueueStorageConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long +class AzureQueueStorageConfiguration: # pylint: disable=too-many-instance-attributes """Configuration for AzureQueueStorage. Note that all parameters used to create this instance are saved as instance diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/__init__.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/__init__.py index 86281334bc6d..e53a2d5483fd 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/__init__.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/__init__.py @@ -5,14 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._service_operations import ServiceOperations -from ._queue_operations import QueueOperations -from ._messages_operations import MessagesOperations -from ._message_id_operations import MessageIdOperations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._service_operations import ServiceOperations # type: ignore +from ._queue_operations import QueueOperations # type: ignore +from ._messages_operations import MessagesOperations # type: ignore +from ._message_id_operations import MessageIdOperations # type: ignore from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ @@ -21,5 +27,5 @@ "MessagesOperations", "MessageIdOperations", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py index b516d3dba419..af54dd8e0a66 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,8 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Optional, Type, TypeVar +from typing import Any, Callable, Dict, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -23,12 +24,14 @@ from azure.core.utils import case_insensitive_dict from ... import models as _models +from ..._serialization import Deserializer, Serializer from ...operations._message_id_operations import build_delete_request, build_update_request +from .._configuration import AzureQueueStorageConfiguration if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -47,13 +50,13 @@ class MessageIdOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureQueueStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def update( # pylint: disable=inconsistent-return-statements + async def update( self, pop_receipt: str, visibilitytimeout: int, @@ -91,7 +94,7 @@ async def update( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -149,7 +152,7 @@ async def update( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements + async def delete( self, pop_receipt: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: """The Delete operation deletes the specified message. @@ -169,7 +172,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py index 2a54dae5143f..9c71b83f7fa7 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_messages_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,8 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Literal, Optional, Type, TypeVar +from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -23,17 +24,19 @@ from azure.core.utils import case_insensitive_dict from ... import models as _models +from ..._serialization import Deserializer, Serializer from ...operations._messages_operations import ( build_clear_request, build_dequeue_request, build_enqueue_request, build_peek_request, ) +from .._configuration import AzureQueueStorageConfiguration if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -52,10 +55,10 @@ class MessagesOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureQueueStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async async def dequeue( @@ -91,7 +94,7 @@ async def dequeue( :rtype: list[~azure.storage.queue.models.DequeuedMessageItem] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -141,7 +144,7 @@ async def dequeue( return deserialized # type: ignore @distributed_trace_async - async def clear( # pylint: disable=inconsistent-return-statements + async def clear( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: """The Clear operation deletes all messages from the specified queue. @@ -158,7 +161,7 @@ async def clear( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -244,7 +247,7 @@ async def enqueue( :rtype: list[~azure.storage.queue.models.EnqueuedMessage] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -326,7 +329,7 @@ async def peek( :rtype: list[~azure.storage.queue.models.PeekedMessageItem] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py index 14be9c32f3f5..d693114f576a 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,8 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Literal, Optional, Type, TypeVar +from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -23,6 +24,7 @@ from azure.core.utils import case_insensitive_dict from ... import models as _models +from ..._serialization import Deserializer, Serializer from ...operations._queue_operations import ( build_create_request, build_delete_request, @@ -31,11 +33,12 @@ build_set_access_policy_request, build_set_metadata_request, ) +from .._configuration import AzureQueueStorageConfiguration if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -54,13 +57,13 @@ class QueueOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureQueueStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def create( # pylint: disable=inconsistent-return-statements + async def create( self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, @@ -87,7 +90,7 @@ async def create( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -132,7 +135,7 @@ async def create( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def delete( # pylint: disable=inconsistent-return-statements + async def delete( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: """operation permanently deletes the specified queue. @@ -149,7 +152,7 @@ async def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -193,7 +196,7 @@ async def delete( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def get_properties( # pylint: disable=inconsistent-return-statements + async def get_properties( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any ) -> None: """Retrieves user-defined metadata and queue properties on the specified queue. Metadata is @@ -211,7 +214,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -261,7 +264,7 @@ async def get_properties( # pylint: disable=inconsistent-return-statements return cls(pipeline_response, None, response_headers) # type: ignore @distributed_trace_async - async def set_metadata( # pylint: disable=inconsistent-return-statements + async def set_metadata( self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, @@ -289,7 +292,7 @@ async def set_metadata( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -354,7 +357,7 @@ async def get_access_policy( :rtype: list[~azure.storage.queue.models.SignedIdentifier] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -404,7 +407,7 @@ async def get_access_policy( return deserialized # type: ignore @distributed_trace_async - async def set_access_policy( # pylint: disable=inconsistent-return-statements + async def set_access_policy( self, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, @@ -427,7 +430,7 @@ async def set_access_policy( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py index d5425a9a8b3d..33a396b72f33 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_service_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,8 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Literal, Optional, Type, TypeVar +from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar +from azure.core import AsyncPipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -23,17 +24,19 @@ from azure.core.utils import case_insensitive_dict from ... import models as _models +from ..._serialization import Deserializer, Serializer from ...operations._service_operations import ( build_get_properties_request, build_get_statistics_request, build_list_queues_segment_request, build_set_properties_request, ) +from .._configuration import AzureQueueStorageConfiguration if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -52,13 +55,13 @@ class ServiceOperations: def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureQueueStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def set_properties( # pylint: disable=inconsistent-return-statements + async def set_properties( self, storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, @@ -82,7 +85,7 @@ async def set_properties( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -152,7 +155,7 @@ async def get_properties( :rtype: ~azure.storage.queue.models.StorageServiceProperties :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -222,7 +225,7 @@ async def get_statistics( :rtype: ~azure.storage.queue.models.StorageServiceStats :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -318,7 +321,7 @@ async def list_queues_segment( :rtype: ~azure.storage.queue.models.ListQueuesSegmentResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/__init__.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/__init__.py index 974cf9c1005d..2c14c2aa0b3f 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/__init__.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/__init__.py @@ -5,28 +5,39 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._models_py3 import AccessPolicy -from ._models_py3 import CorsRule -from ._models_py3 import DequeuedMessageItem -from ._models_py3 import EnqueuedMessage -from ._models_py3 import GeoReplication -from ._models_py3 import ListQueuesSegmentResponse -from ._models_py3 import Logging -from ._models_py3 import Metrics -from ._models_py3 import PeekedMessageItem -from ._models_py3 import QueueItem -from ._models_py3 import QueueMessage -from ._models_py3 import RetentionPolicy -from ._models_py3 import SignedIdentifier -from ._models_py3 import StorageError -from ._models_py3 import StorageServiceProperties -from ._models_py3 import StorageServiceStats +from typing import TYPE_CHECKING -from ._azure_queue_storage_enums import GeoReplicationStatusType -from ._azure_queue_storage_enums import StorageErrorCode +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models_py3 import ( # type: ignore + AccessPolicy, + CorsRule, + DequeuedMessageItem, + EnqueuedMessage, + GeoReplication, + ListQueuesSegmentResponse, + Logging, + Metrics, + PeekedMessageItem, + QueueItem, + QueueMessage, + RetentionPolicy, + SignedIdentifier, + StorageError, + StorageServiceProperties, + StorageServiceStats, +) + +from ._azure_queue_storage_enums import ( # type: ignore + GeoReplicationStatusType, + StorageErrorCode, +) from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ @@ -49,5 +60,5 @@ "GeoReplicationStatusType", "StorageErrorCode", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py index f323591f01f7..5f0c99db5395 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py @@ -1,5 +1,4 @@ # coding=utf-8 -# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -13,7 +12,6 @@ from .. import _serialization if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from .. import models as _models diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/__init__.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/__init__.py index 86281334bc6d..e53a2d5483fd 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/__init__.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/__init__.py @@ -5,14 +5,20 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._service_operations import ServiceOperations -from ._queue_operations import QueueOperations -from ._messages_operations import MessagesOperations -from ._message_id_operations import MessageIdOperations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._service_operations import ServiceOperations # type: ignore +from ._queue_operations import QueueOperations # type: ignore +from ._messages_operations import MessagesOperations # type: ignore +from ._message_id_operations import MessageIdOperations # type: ignore from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ @@ -21,5 +27,5 @@ "MessagesOperations", "MessageIdOperations", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py index c4e742f6bd5e..bb59d48f2e39 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_message_id_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,8 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, Literal, Optional, Type, TypeVar +from typing import Any, Callable, Dict, Literal, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -23,12 +24,13 @@ from azure.core.utils import case_insensitive_dict from .. import models as _models -from .._serialization import Serializer +from .._configuration import AzureQueueStorageConfiguration +from .._serialization import Deserializer, Serializer if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -130,10 +132,10 @@ class MessageIdOperations: def __init__(self, *args, **kwargs): input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureQueueStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def update( # pylint: disable=inconsistent-return-statements @@ -174,7 +176,7 @@ def update( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -252,7 +254,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py index 6d2d0e66b18c..7475f9e1cb20 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,8 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Literal, Optional, Type, TypeVar +from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -23,12 +24,13 @@ from azure.core.utils import case_insensitive_dict from .. import models as _models -from .._serialization import Serializer +from .._configuration import AzureQueueStorageConfiguration +from .._serialization import Deserializer, Serializer if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -207,10 +209,10 @@ class MessagesOperations: def __init__(self, *args, **kwargs): input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureQueueStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def dequeue( @@ -246,7 +248,7 @@ def dequeue( :rtype: list[~azure.storage.queue.models.DequeuedMessageItem] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -313,7 +315,7 @@ def clear( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -399,7 +401,7 @@ def enqueue( :rtype: list[~azure.storage.queue.models.EnqueuedMessage] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -481,7 +483,7 @@ def peek( :rtype: list[~azure.storage.queue.models.PeekedMessageItem] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py index 53024475e6fc..351b13775fc1 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,8 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Literal, Optional, Type, TypeVar +from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -23,12 +24,13 @@ from azure.core.utils import case_insensitive_dict from .. import models as _models -from .._serialization import Serializer +from .._configuration import AzureQueueStorageConfiguration +from .._serialization import Deserializer, Serializer if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -260,10 +262,10 @@ class QueueOperations: def __init__(self, *args, **kwargs): input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureQueueStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def create( # pylint: disable=inconsistent-return-statements @@ -293,7 +295,7 @@ def create( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -355,7 +357,7 @@ def delete( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -417,7 +419,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -495,7 +497,7 @@ def set_metadata( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -560,7 +562,7 @@ def get_access_policy( :rtype: list[~azure.storage.queue.models.SignedIdentifier] :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -633,7 +635,7 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py index 6bbde7d8d8e3..0369181fef37 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_service_operations.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -7,8 +7,9 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys -from typing import Any, Callable, Dict, List, Literal, Optional, Type, TypeVar +from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -23,12 +24,13 @@ from azure.core.utils import case_insensitive_dict from .. import models as _models -from .._serialization import Serializer +from .._configuration import AzureQueueStorageConfiguration +from .._serialization import Deserializer, Serializer if sys.version_info >= (3, 9): from collections.abc import MutableMapping else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports + from typing import MutableMapping # type: ignore T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -203,10 +205,10 @@ class ServiceOperations: def __init__(self, *args, **kwargs): input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: AzureQueueStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def set_properties( # pylint: disable=inconsistent-return-statements @@ -233,7 +235,7 @@ def set_properties( # pylint: disable=inconsistent-return-statements :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -303,7 +305,7 @@ def get_properties( :rtype: ~azure.storage.queue.models.StorageServiceProperties :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -373,7 +375,7 @@ def get_statistics( :rtype: ~azure.storage.queue.models.StorageServiceStats :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -469,7 +471,7 @@ def list_queues_segment( :rtype: ~azure.storage.queue.models.ListQueuesSegmentResponse :raises ~azure.core.exceptions.HttpResponseError: """ - error_map: MutableMapping[int, Type[HttpResponseError]] = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py index 09f2f7211201..750d2f2ddf7b 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py @@ -3,7 +3,6 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only import functools import warnings @@ -33,6 +32,12 @@ if TYPE_CHECKING: from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from ._message_encoding import ( + BinaryBase64DecodePolicy, + BinaryBase64EncodePolicy, + TextBase64DecodePolicy, + TextBase64EncodePolicy + ) from ._models import QueueProperties @@ -70,9 +75,11 @@ class QueueClient(StorageAccountHostsMixin, StorageEncryptionMixin): :keyword message_encode_policy: The encoding policy to use on outgoing messages. Default is not to encode messages. Other options include :class:`TextBase64EncodePolicy`, :class:`BinaryBase64EncodePolicy` or `None`. + :paramtype message_encode_policy: BinaryBase64EncodePolicy or TextBase64EncodePolicy or None :keyword message_decode_policy: The decoding policy to use on incoming messages. Default value is not to decode messages. Other options include :class:`TextBase64DecodePolicy`, :class:`BinaryBase64DecodePolicy` or `None`. + :paramtype message_decode_policy: BinaryBase64DecodePolicy or TextBase64DecodePolicy or None :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.queue.core.windows.net. @@ -90,16 +97,29 @@ def __init__( self, account_url: str, queue_name: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + message_encode_policy: Optional[Union["BinaryBase64EncodePolicy", "TextBase64EncodePolicy"]] = None, + message_decode_policy: Optional[Union["BinaryBase64DecodePolicy", "TextBase64DecodePolicy"]] = None, + audience: Optional[str] = None, **kwargs: Any ) -> None: parsed_url, sas_token = _parse_url(account_url=account_url, queue_name=queue_name, credential=credential) self.queue_name = queue_name self._query_str, credential = self._format_query_string(sas_token, credential) - super(QueueClient, self).__init__(parsed_url, service='queue', credential=credential, **kwargs) - self._message_encode_policy = kwargs.get('message_encode_policy', None) or NoEncodePolicy() - self._message_decode_policy = kwargs.get('message_decode_policy', None) or NoDecodePolicy() + super(QueueClient, self).__init__( + parsed_url, + service='queue', + credential=credential, + secondary_hostname=secondary_hostname, + audience=audience, + **kwargs + ) + self._message_encode_policy = message_encode_policy or NoEncodePolicy() + self._message_decode_policy = message_decode_policy or NoDecodePolicy() self._client = AzureQueueStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client._config.version = get_api_version(api_version) # type: ignore [assignment] self._configure_encryption(kwargs) def _format_url(self, hostname: str) -> str: @@ -110,16 +130,18 @@ def _format_url(self, hostname: str) -> str: :returns: The formatted endpoint URL according to the specified location mode hostname. :rtype: str """ - return _format_url( - queue_name=self.queue_name, - hostname=hostname, - scheme=self.scheme, - query_str=self._query_str) + return _format_url(queue_name=self.queue_name, hostname=hostname, scheme=self.scheme, query_str=self._query_str) @classmethod def from_queue_url( cls, queue_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + message_encode_policy: Optional[Union["BinaryBase64EncodePolicy", "TextBase64EncodePolicy"]] = None, + message_decode_policy: Optional[Union["BinaryBase64DecodePolicy", "TextBase64DecodePolicy"]] = None, + audience: Optional[str] = None, **kwargs: Any ) -> Self: """A client to interact with a specific Queue. @@ -139,6 +161,19 @@ def from_queue_url( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials.TokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword message_encode_policy: The encoding policy to use on outgoing messages. + Default is not to encode messages. Other options include :class:`TextBase64EncodePolicy`, + :class:`BinaryBase64EncodePolicy` or `None`. + :paramtype message_encode_policy: BinaryBase64EncodePolicy or TextBase64EncodePolicy or None + :keyword message_decode_policy: The decoding policy to use on incoming messages. + Default value is not to decode messages. Other options include :class:`TextBase64DecodePolicy`, + :class:`BinaryBase64DecodePolicy` or `None`. + :paramtype message_decode_policy: BinaryBase64DecodePolicy or TextBase64DecodePolicy or None :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.queue.core.windows.net. @@ -146,13 +181,29 @@ def from_queue_url( :rtype: ~azure.storage.queue.QueueClient """ account_url, queue_name = _from_queue_url(queue_url=queue_url) - return cls(account_url, queue_name=queue_name, credential=credential, **kwargs) + return cls( + account_url, + queue_name=queue_name, + credential=credential, + api_version=api_version, + secondary_hostname=secondary_hostname, + message_encode_policy=message_encode_policy, + message_decode_policy=message_decode_policy, + audience=audience, + **kwargs + ) @classmethod def from_connection_string( cls, conn_str: str, queue_name: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + message_encode_policy: Optional[Union["BinaryBase64EncodePolicy", "TextBase64EncodePolicy"]] = None, + message_decode_policy: Optional[Union["BinaryBase64DecodePolicy", "TextBase64DecodePolicy"]] = None, + audience: Optional[str] = None, **kwargs: Any ) -> Self: """Create QueueClient from a Connection String. @@ -175,6 +226,19 @@ def from_connection_string( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials.TokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword message_encode_policy: The encoding policy to use on outgoing messages. + Default is not to encode messages. Other options include :class:`TextBase64EncodePolicy`, + :class:`BinaryBase64EncodePolicy` or `None`. + :paramtype message_encode_policy: BinaryBase64EncodePolicy or TextBase64EncodePolicy or None + :keyword message_decode_policy: The decoding policy to use on incoming messages. + Default value is not to decode messages. Other options include :class:`TextBase64DecodePolicy`, + :class:`BinaryBase64DecodePolicy` or `None`. + :paramtype message_decode_policy: BinaryBase64DecodePolicy or TextBase64DecodePolicy or None :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.queue.core.windows.net. @@ -190,16 +254,24 @@ def from_connection_string( :dedent: 8 :caption: Create the queue client from connection string. """ - account_url, secondary, credential = parse_connection_str( - conn_str, credential, 'queue') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls(account_url, queue_name=queue_name, credential=credential, **kwargs) + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'queue') + return cls( + account_url, + queue_name=queue_name, + credential=credential, + api_version=api_version, + secondary_hostname=secondary_hostname or secondary, + message_encode_policy=message_encode_policy, + message_decode_policy=message_decode_policy, + audience=audience, + **kwargs + ) @distributed_trace def create_queue( self, *, metadata: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Creates a new queue in the storage account. @@ -207,7 +279,7 @@ def create_queue( If a queue with the same name already exists, the operation fails with a `ResourceExistsError`. - :keyword Dict[str,str] metadata: + :keyword Dict[str, str] metadata: A dict containing name-value pairs to associate with the queue as metadata. Note that metadata names preserve the case with which they were created, but are case-insensitive when set or read. @@ -231,7 +303,6 @@ def create_queue( :caption: Create a queue. """ headers = kwargs.pop('headers', {}) - timeout = kwargs.pop('timeout', None) headers.update(add_metadata_headers(metadata)) try: return self._client.queue.create( @@ -239,12 +310,13 @@ def create_queue( timeout=timeout, headers=headers, cls=deserialize_queue_creation, - **kwargs) + **kwargs + ) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def delete_queue(self, **kwargs: Any) -> None: + def delete_queue(self, *, timeout: Optional[int] = None, **kwargs: Any) -> None: """Deletes the specified queue and any messages it contains. When a queue is successfully deleted, it is immediately marked for deletion @@ -272,14 +344,13 @@ def delete_queue(self, **kwargs: Any) -> None: :dedent: 12 :caption: Delete a queue. """ - timeout = kwargs.pop('timeout', None) try: self._client.queue.delete(timeout=timeout, **kwargs) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def get_queue_properties(self, **kwargs: Any) -> "QueueProperties": + def get_queue_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) -> "QueueProperties": """Returns all user-defined metadata for the specified queue. The data returned does not include the queue's list of messages. @@ -298,12 +369,12 @@ def get_queue_properties(self, **kwargs: Any) -> "QueueProperties": :dedent: 12 :caption: Get the properties on the queue. """ - timeout = kwargs.pop('timeout', None) try: response = cast("QueueProperties", self._client.queue.get_properties( timeout=timeout, cls=deserialize_queue_properties, - **kwargs)) + **kwargs + )) except HttpResponseError as error: process_storage_error(error) response.name = self.queue_name @@ -312,16 +383,17 @@ def get_queue_properties(self, **kwargs: Any) -> "QueueProperties": @distributed_trace def set_queue_metadata( self, metadata: Optional[Dict[str, str]] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Sets user-defined metadata on the specified queue. Metadata is associated with the queue as name-value pairs. - :param Optional[Dict[str, Any]] metadata: + :param Optional[Dict[str, str]] metadata: A dict containing name-value pairs to associate with the queue as metadata. - :type metadata: Optional[Dict[str, str]] :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-queue-service-operations. @@ -340,7 +412,6 @@ def set_queue_metadata( :dedent: 12 :caption: Set metadata on the queue. """ - timeout = kwargs.pop('timeout', None) headers = kwargs.pop('headers', {}) headers.update(add_metadata_headers(metadata)) try: @@ -348,12 +419,13 @@ def set_queue_metadata( timeout=timeout, headers=headers, cls=return_response_headers, - **kwargs) + **kwargs + ) except HttpResponseError as error: process_storage_error(error) @distributed_trace - def get_queue_access_policy(self, **kwargs: Any) -> Dict[str, AccessPolicy]: + def get_queue_access_policy(self, *, timeout: Optional[int] = None, **kwargs: Any) -> Dict[str, AccessPolicy]: """Returns details about any stored access policies specified on the queue that may be used with Shared Access Signatures. @@ -366,12 +438,12 @@ def get_queue_access_policy(self, **kwargs: Any) -> Dict[str, AccessPolicy]: :return: A dictionary of access policies associated with the queue. :rtype: Dict[str, ~azure.storage.queue.AccessPolicy] """ - timeout = kwargs.pop('timeout', None) try: _, identifiers = cast(Tuple[Dict, List], self._client.queue.get_access_policy( timeout=timeout, cls=return_headers_and_deserialized, - **kwargs)) + **kwargs + )) except HttpResponseError as error: process_storage_error(error) return {s.id: s.access_policy or AccessPolicy() for s in identifiers} @@ -379,6 +451,8 @@ def get_queue_access_policy(self, **kwargs: Any) -> Dict[str, AccessPolicy]: @distributed_trace def set_queue_access_policy( self, signed_identifiers: Dict[str, AccessPolicy], + *, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Sets stored access policies for the queue that may be used with Shared @@ -416,11 +490,11 @@ def set_queue_access_policy( :dedent: 12 :caption: Set an access policy on the queue. """ - timeout = kwargs.pop('timeout', None) if len(signed_identifiers) > 15: raise ValueError( - 'Too many access policies provided. The server does not support setting ' - 'more than 15 access policies on a single resource.') + "Too many access policies provided. The server does not support setting " + "more than 15 access policies on a single resource." + ) identifiers = [] for key, value in signed_identifiers.items(): if value: @@ -428,10 +502,7 @@ def set_queue_access_policy( value.expiry = serialize_iso(value.expiry) identifiers.append(SignedIdentifier(id=key, access_policy=value)) try: - self._client.queue.set_access_policy( - queue_acl=identifiers or None, - timeout=timeout, - **kwargs) + self._client.queue.set_access_policy(queue_acl=identifiers or None, timeout=timeout, **kwargs) except HttpResponseError as error: process_storage_error(error) @@ -441,6 +512,7 @@ def send_message( *, visibility_timeout: Optional[int] = None, time_to_live: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> QueueMessage: """Adds a new message to the back of the message queue. @@ -479,7 +551,7 @@ def send_message( #other-client--per-operation-configuration>`__. :return: A :class:`~azure.storage.queue.QueueMessage` object. - This object is also populated with the content although it is not + This object is also populated with the content, although it is not returned from the service. :rtype: ~azure.storage.queue.QueueMessage @@ -492,20 +564,21 @@ def send_message( :dedent: 12 :caption: Send messages. """ - timeout = kwargs.pop('timeout', None) if self.key_encryption_key: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) try: self._message_encode_policy.configure( require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, resolver=self.key_resolver_function, - encryption_version=self.encryption_version) + encryption_version=self.encryption_version + ) except TypeError: warnings.warn( "TypeError when calling message_encode_policy.configure. \ @@ -516,7 +589,8 @@ def send_message( self._message_encode_policy.configure( require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - resolver=self.key_resolver_function) + resolver=self.key_resolver_function + ) encoded_content = self._message_encode_policy(content) new_message = GenQueueMessage(message_text=encoded_content) @@ -526,14 +600,15 @@ def send_message( visibilitytimeout=visibility_timeout, message_time_to_live=time_to_live, timeout=timeout, - **kwargs) + **kwargs + ) queue_message = QueueMessage( content=content, id=enqueued[0].message_id, inserted_on=enqueued[0].insertion_time, expires_on=enqueued[0].expiration_time, - pop_receipt = enqueued[0].pop_receipt, - next_visible_on = enqueued[0].time_next_visible + pop_receipt=enqueued[0].pop_receipt, + next_visible_on=enqueued[0].time_next_visible ) return queue_message except HttpResponseError as error: @@ -543,6 +618,7 @@ def send_message( def receive_message( self, *, visibility_timeout: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Optional[QueueMessage]: """Removes one message from the front of the queue. @@ -582,13 +658,13 @@ def receive_message( :dedent: 12 :caption: Receive one message from the queue. """ - timeout = kwargs.pop('timeout', None) if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) self._message_decode_policy.configure( require_encryption=self.require_encryption, @@ -603,8 +679,7 @@ def receive_message( cls=self._message_decode_policy, **kwargs ) - wrapped_message = QueueMessage._from_generated( # pylint: disable=protected-access - message[0]) if message != [] else None + wrapped_message = QueueMessage._from_generated(message[0]) if message != [] else None # pylint: disable=protected-access return wrapped_message except HttpResponseError as error: process_storage_error(error) @@ -615,6 +690,7 @@ def receive_messages( messages_per_page: Optional[int] = None, visibility_timeout: Optional[int] = None, max_messages: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> ItemPaged[QueueMessage]: """Removes one or more messages from the front of the queue. @@ -675,13 +751,13 @@ def receive_messages( :dedent: 12 :caption: Receive messages from the queue. """ - timeout = kwargs.pop('timeout', None) if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) self._message_decode_policy.configure( require_encryption=self.require_encryption, @@ -699,8 +775,12 @@ def receive_messages( if max_messages is not None and messages_per_page is not None: if max_messages < messages_per_page: raise ValueError("max_messages must be greater or equal to messages_per_page") - return ItemPaged(command, results_per_page=messages_per_page, - page_iterator_class=MessagesPaged, max_messages=max_messages) + return ItemPaged( + command, + results_per_page=messages_per_page, + page_iterator_class=MessagesPaged, + max_messages=max_messages + ) except HttpResponseError as error: process_storage_error(error) @@ -711,6 +791,7 @@ def update_message( content: Optional[object] = None, *, visibility_timeout: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> QueueMessage: """Updates the visibility timeout of a message. You can also use this @@ -763,13 +844,13 @@ def update_message( :dedent: 12 :caption: Update a message. """ - timeout = kwargs.pop('timeout', None) if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) if isinstance(message, QueueMessage): message_id = message.id @@ -794,7 +875,8 @@ def update_message( self.require_encryption, self.key_encryption_key, self.key_resolver_function, - encryption_version=self.encryption_version) + encryption_version=self.encryption_version + ) except TypeError: warnings.warn( "TypeError when calling message_encode_policy.configure. \ @@ -805,7 +887,8 @@ def update_message( self._message_encode_policy.configure( self.require_encryption, self.key_encryption_key, - self.key_resolver_function) + self.key_resolver_function + ) encoded_message_text = self._message_encode_policy(message_text) updated = GenQueueMessage(message_text=encoded_message_text) else: @@ -818,15 +901,16 @@ def update_message( pop_receipt=receipt, cls=return_response_headers, queue_message_id=message_id, - **kwargs)) + **kwargs + )) new_message = QueueMessage( content=message_text, id=message_id, inserted_on=inserted_on, dequeue_count=dequeue_count, expires_on=expires_on, - pop_receipt = response['popreceipt'], - next_visible_on = response['time_next_visible'] + pop_receipt=response['popreceipt'], + next_visible_on=response['time_next_visible'] ) return new_message except HttpResponseError as error: @@ -835,6 +919,8 @@ def update_message( @distributed_trace def peek_messages( self, max_messages: Optional[int] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> List[QueueMessage]: """Retrieves one or more messages from the front of the queue, but does @@ -879,13 +965,13 @@ def peek_messages( if max_messages and not 1 <= max_messages <= 32: raise ValueError("Number of messages to peek should be between 1 and 32") - timeout = kwargs.pop('timeout', None) if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) self._message_decode_policy.configure( require_encryption=self.require_encryption, @@ -897,7 +983,8 @@ def peek_messages( number_of_messages=max_messages, timeout=timeout, cls=self._message_decode_policy, - **kwargs) + **kwargs + ) wrapped_messages = [] for peeked in messages: wrapped_messages.append(QueueMessage._from_generated(peeked)) # pylint: disable=protected-access @@ -906,7 +993,7 @@ def peek_messages( process_storage_error(error) @distributed_trace - def clear_messages(self, **kwargs: Any) -> None: + def clear_messages(self, *, timeout: Optional[int] = None, **kwargs: Any) -> None: """Deletes all messages from the specified queue. :keyword int timeout: @@ -925,7 +1012,6 @@ def clear_messages(self, **kwargs: Any) -> None: :dedent: 12 :caption: Clears all messages. """ - timeout = kwargs.pop('timeout', None) try: self._client.messages.clear(timeout=timeout, **kwargs) except HttpResponseError as error: @@ -935,6 +1021,8 @@ def clear_messages(self, **kwargs: Any) -> None: def delete_message( self, message: Union[str, QueueMessage], pop_receipt: Optional[str] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Deletes the specified message. @@ -971,8 +1059,6 @@ def delete_message( :dedent: 12 :caption: Delete a message. """ - timeout = kwargs.pop('timeout', None) - receipt: Optional[str] if isinstance(message, QueueMessage): message_id = message.id diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py index 923f7a799038..7df26b0a0587 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py @@ -3,7 +3,6 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only import functools from typing import ( @@ -97,13 +96,24 @@ class QueueServiceClient(StorageAccountHostsMixin, StorageEncryptionMixin): def __init__( self, account_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, **kwargs: Any ) -> None: parsed_url, sas_token = _parse_url(account_url=account_url, credential=credential) self._query_str, credential = self._format_query_string(sas_token, credential) - super(QueueServiceClient, self).__init__(parsed_url, service='queue', credential=credential, **kwargs) + super(QueueServiceClient, self).__init__( + parsed_url, + service='queue', + credential=credential, + secondary_hostname=secondary_hostname, + audience=audience, + **kwargs + ) self._client = AzureQueueStorage(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client._config.version = get_api_version(api_version) # type: ignore [assignment] self._configure_encryption(kwargs) def _format_url(self, hostname: str) -> str: @@ -120,6 +130,10 @@ def _format_url(self, hostname: str) -> str: def from_connection_string( cls, conn_str: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, **kwargs: Any ) -> Self: """Create QueueServiceClient from a Connection String. @@ -140,6 +154,11 @@ def from_connection_string( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials.TokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + :keyword str secondary_hostname: + The hostname of the secondary endpoint. :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.queue.core.windows.net. @@ -155,14 +174,18 @@ def from_connection_string( :dedent: 8 :caption: Creating the QueueServiceClient with a connection string. """ - account_url, secondary, credential = parse_connection_str( - conn_str, credential, 'queue') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls(account_url, credential=credential, **kwargs) + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'queue') + return cls( + account_url, + credential=credential, + api_version=api_version, + secondary_hostname=secondary_hostname or secondary, + audience=audience, + **kwargs + ) @distributed_trace - def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: + def get_service_stats(self, *, timeout: Optional[int] = None, **kwargs: Any) -> Dict[str, Any]: """Retrieves statistics related to replication for the Queue service. It is only available when read-access geo-redundant replication is enabled for @@ -186,7 +209,6 @@ def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: :return: The queue service stats. :rtype: Dict[str, Any] """ - timeout = kwargs.pop('timeout', None) try: stats = self._client.service.get_statistics( timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) @@ -195,7 +217,7 @@ def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: process_storage_error(error) @distributed_trace - def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: + def get_service_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) -> Dict[str, Any]: """Gets the properties of a storage account's Queue service, including Azure Storage Analytics. @@ -214,7 +236,6 @@ def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: :dedent: 8 :caption: Getting queue service properties. """ - timeout = kwargs.pop('timeout', None) try: service_props = self._client.service.get_properties(timeout=timeout, **kwargs) return service_properties_deserialize(service_props) @@ -227,6 +248,8 @@ def set_service_properties( hour_metrics: Optional["Metrics"] = None, minute_metrics: Optional["Metrics"] = None, cors: Optional[List[CorsRule]] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Sets the properties of a storage account's Queue service, including @@ -263,7 +286,6 @@ def set_service_properties( :dedent: 8 :caption: Setting queue service properties. """ - timeout = kwargs.pop('timeout', None) props = StorageServiceProperties( logging=analytics_logging, hour_metrics=hour_metrics, @@ -279,6 +301,9 @@ def set_service_properties( def list_queues( self, name_starts_with: Optional[str] = None, include_metadata: Optional[bool] = False, + *, + results_per_page: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> ItemPaged["QueueProperties"]: """Returns a generator to list the queues under the specified account. @@ -314,17 +339,18 @@ def list_queues( :dedent: 12 :caption: List queues in the service. """ - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) include = ['metadata'] if include_metadata else None command = functools.partial( self._client.service.list_queues_segment, prefix=name_starts_with, include=include, timeout=timeout, - **kwargs) + **kwargs + ) return ItemPaged( - command, prefix=name_starts_with, results_per_page=results_per_page, + command, + prefix=name_starts_with, + results_per_page=results_per_page, page_iterator_class=QueuePropertiesPaged ) @@ -332,6 +358,8 @@ def list_queues( def create_queue( self, name: str, metadata: Optional[Dict[str, str]] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> QueueClient: """Creates a new queue under the specified account. @@ -358,16 +386,16 @@ def create_queue( :dedent: 8 :caption: Create a queue in the service. """ - timeout = kwargs.pop('timeout', None) queue = self.get_queue_client(name) kwargs.setdefault('merge_span', True) - queue.create_queue( - metadata=metadata, timeout=timeout, **kwargs) + queue.create_queue(metadata=metadata, timeout=timeout, **kwargs) return queue @distributed_trace def delete_queue( self, queue: Union["QueueProperties", str], + *, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Deletes the specified queue and any messages it contains. @@ -397,15 +425,11 @@ def delete_queue( :dedent: 12 :caption: Delete a queue in the service. """ - timeout = kwargs.pop('timeout', None) queue_client = self.get_queue_client(queue) kwargs.setdefault('merge_span', True) queue_client.delete_queue(timeout=timeout, **kwargs) - def get_queue_client( - self, queue: Union["QueueProperties", str], - **kwargs: Any - ) -> QueueClient: + def get_queue_client(self, queue: Union["QueueProperties", str], **kwargs: Any) -> QueueClient: """Get a client to interact with the specified queue. The queue need not already exist. @@ -432,8 +456,8 @@ def get_queue_client( queue_name = queue _pipeline = Pipeline( - transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore # pylint: disable=protected-access + transport=TransportWrapper(self._pipeline._transport), # pylint: disable=protected-access + policies=self._pipeline._impl_policies # type: ignore # pylint: disable=protected-access ) return QueueClient( diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py index 21199c3d5434..ad090b548469 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_serialize.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from typing import Any, Dict +from typing import Optional _SUPPORTED_API_VERSIONS = [ '2019-02-02', @@ -28,11 +28,14 @@ '2023-11-03', '2024-05-04', '2024-08-04', + '2024-11-04', + '2025-01-05', + '2025-05-05', + '2025-07-05', ] -def get_api_version(kwargs: Dict[str, Any]) -> str: - api_version = kwargs.get('api_version', None) +def get_api_version(api_version: Optional[str]) -> str: if api_version and api_version not in _SUPPORTED_API_VERSIONS: versions = '\n'.join(_SUPPORTED_API_VERSIONS) raise ValueError(f"Unsupported API version '{api_version}'. Please select from:\n{versions}") diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py index c213a9bff30b..764bf6b0e8d6 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py @@ -3,7 +3,6 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only import functools import warnings @@ -39,6 +38,12 @@ if TYPE_CHECKING: from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential from azure.core.credentials_async import AsyncTokenCredential + from .._message_encoding import ( + BinaryBase64DecodePolicy, + BinaryBase64EncodePolicy, + TextBase64DecodePolicy, + TextBase64EncodePolicy + ) from .._models import QueueProperties @@ -76,9 +81,11 @@ class QueueClient( # type: ignore [misc] :keyword message_encode_policy: The encoding policy to use on outgoing messages. Default is not to encode messages. Other options include :class:`TextBase64EncodePolicy`, :class:`BinaryBase64EncodePolicy` or `None`. + :paramtype message_encode_policy: BinaryBase64EncodePolicy or TextBase64EncodePolicy or None :keyword message_decode_policy: The decoding policy to use on incoming messages. Default value is not to decode messages. Other options include :class:`TextBase64DecodePolicy`, :class:`BinaryBase64DecodePolicy` or `None`. + :paramtype message_decode_policy: BinaryBase64DecodePolicy or TextBase64DecodePolicy or None :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.queue.core.windows.net. @@ -104,6 +111,12 @@ def __init__( self, account_url: str, queue_name: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + message_encode_policy: Optional[Union["BinaryBase64EncodePolicy", "TextBase64EncodePolicy"]] = None, + message_decode_policy: Optional[Union["BinaryBase64DecodePolicy", "TextBase64DecodePolicy"]] = None, + audience: Optional[str] = None, **kwargs: Any ) -> None: kwargs["retry_policy"] = kwargs.get("retry_policy") or ExponentialRetry(**kwargs) @@ -111,12 +124,19 @@ def __init__( parsed_url, sas_token = _parse_url(account_url=account_url, queue_name=queue_name, credential=credential) self.queue_name = queue_name self._query_str, credential = self._format_query_string(sas_token, credential) - super(QueueClient, self).__init__(parsed_url, service='queue', credential=credential, **kwargs) + super(QueueClient, self).__init__( + parsed_url, + service='queue', + credential=credential, + secondary_hostname=secondary_hostname, + audience=audience, + **kwargs + ) - self._message_encode_policy = kwargs.get('message_encode_policy', None) or NoEncodePolicy() - self._message_decode_policy = kwargs.get('message_decode_policy', None) or NoDecodePolicy() + self._message_encode_policy = message_encode_policy or NoEncodePolicy() + self._message_decode_policy = message_decode_policy or NoDecodePolicy() self._client = AzureQueueStorage(self.url, base_url=self.url, pipeline=self._pipeline, loop=loop) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client._config.version = get_api_version(api_version) # type: ignore [assignment] self._loop = loop self._configure_encryption(kwargs) @@ -132,12 +152,19 @@ def _format_url(self, hostname: str) -> str: queue_name=self.queue_name, hostname=hostname, scheme=self.scheme, - query_str=self._query_str) + query_str=self._query_str + ) @classmethod def from_queue_url( cls, queue_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + message_encode_policy: Optional[Union["BinaryBase64EncodePolicy", "TextBase64EncodePolicy"]] = None, + message_decode_policy: Optional[Union["BinaryBase64DecodePolicy", "TextBase64DecodePolicy"]] = None, + audience: Optional[str] = None, **kwargs: Any ) -> Self: """A client to interact with a specific Queue. @@ -157,6 +184,19 @@ def from_queue_url( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials_async.AsyncTokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword message_encode_policy: The encoding policy to use on outgoing messages. + Default is not to encode messages. Other options include :class:`TextBase64EncodePolicy`, + :class:`BinaryBase64EncodePolicy` or `None`. + :paramtype message_encode_policy: BinaryBase64EncodePolicy or TextBase64EncodePolicy or None + :keyword message_decode_policy: The decoding policy to use on incoming messages. + Default value is not to decode messages. Other options include :class:`TextBase64DecodePolicy`, + :class:`BinaryBase64DecodePolicy` or `None`. + :paramtype message_decode_policy: BinaryBase64DecodePolicy or TextBase64DecodePolicy or None :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.queue.core.windows.net. @@ -164,13 +204,29 @@ def from_queue_url( :rtype: ~azure.storage.queue.QueueClient """ account_url, queue_name = _from_queue_url(queue_url=queue_url) - return cls(account_url, queue_name=queue_name, credential=credential, **kwargs) + return cls( + account_url, + queue_name=queue_name, + credential=credential, + api_version=api_version, + secondary_hostname=secondary_hostname, + message_encode_policy=message_encode_policy, + message_decode_policy=message_decode_policy, + audience=audience, + **kwargs + ) @classmethod def from_connection_string( cls, conn_str: str, queue_name: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + message_encode_policy: Optional[Union["BinaryBase64EncodePolicy", "TextBase64EncodePolicy"]] = None, + message_decode_policy: Optional[Union["BinaryBase64DecodePolicy", "TextBase64DecodePolicy"]] = None, + audience: Optional[str] = None, **kwargs: Any ) -> Self: """Create QueueClient from a Connection String. @@ -193,6 +249,19 @@ def from_connection_string( ~azure.core.credentials.AzureSasCredential or ~azure.core.credentials_async.AsyncTokenCredential or str or dict[str, str] or None + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword message_encode_policy: The encoding policy to use on outgoing messages. + Default is not to encode messages. Other options include :class:`TextBase64EncodePolicy`, + :class:`BinaryBase64EncodePolicy` or `None`. + :paramtype message_encode_policy: BinaryBase64EncodePolicy or TextBase64EncodePolicy or None + :keyword message_decode_policy: The decoding policy to use on incoming messages. + Default value is not to decode messages. Other options include :class:`TextBase64DecodePolicy`, + :class:`BinaryBase64DecodePolicy` or `None`. + :paramtype message_decode_policy: BinaryBase64DecodePolicy or TextBase64DecodePolicy or None :keyword str audience: The audience to use when requesting tokens for Azure Active Directory authentication. Only has an effect when credential is of type TokenCredential. The value could be https://storage.azure.com/ (default) or https://.queue.core.windows.net. @@ -208,16 +277,24 @@ def from_connection_string( :dedent: 8 :caption: Create the queue client from connection string. """ - account_url, secondary, credential = parse_connection_str( - conn_str, credential, 'queue') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls(account_url, queue_name=queue_name, credential=credential, **kwargs) + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'queue') + return cls( + account_url, + queue_name=queue_name, + credential=credential, + api_version=api_version, + secondary_hostname=secondary_hostname or secondary, + message_encode_policy=message_encode_policy, + message_decode_policy=message_decode_policy, + audience=audience, + **kwargs + ) @distributed_trace_async async def create_queue( self, *, metadata: Optional[Dict[str, str]] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Creates a new queue in the storage account. @@ -225,7 +302,7 @@ async def create_queue( If a queue with the same name already exists, the operation fails with a `ResourceExistsError`. - :keyword dict(str,str) metadata: + :keyword Dict[str, str] metadata: A dict containing name-value pairs to associate with the queue as metadata. Note that metadata names preserve the case with which they were created, but are case-insensitive when set or read. @@ -248,18 +325,21 @@ async def create_queue( :dedent: 12 :caption: Create a queue. """ - timeout = kwargs.pop('timeout', None) headers = kwargs.pop("headers", {}) headers.update(add_metadata_headers(metadata)) try: return await self._client.queue.create( - metadata=metadata, timeout=timeout, headers=headers, cls=deserialize_queue_creation, **kwargs + metadata=metadata, + timeout=timeout, + headers=headers, + cls=deserialize_queue_creation, + **kwargs ) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def delete_queue(self, **kwargs: Any) -> None: + async def delete_queue(self, *, timeout: Optional[int] = None, **kwargs: Any) -> None: """Deletes the specified queue and any messages it contains. When a queue is successfully deleted, it is immediately marked for deletion @@ -287,14 +367,13 @@ async def delete_queue(self, **kwargs: Any) -> None: :dedent: 16 :caption: Delete a queue. """ - timeout = kwargs.pop('timeout', None) try: await self._client.queue.delete(timeout=timeout, **kwargs) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_queue_properties(self, **kwargs: Any) -> "QueueProperties": + async def get_queue_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) -> "QueueProperties": """Returns all user-defined metadata for the specified queue. The data returned does not include the queue's list of messages. @@ -313,10 +392,11 @@ async def get_queue_properties(self, **kwargs: Any) -> "QueueProperties": :dedent: 16 :caption: Get the properties on the queue. """ - timeout = kwargs.pop('timeout', None) try: response = cast("QueueProperties", await (self._client.queue.get_properties( - timeout=timeout, cls=deserialize_queue_properties, **kwargs + timeout=timeout, + cls=deserialize_queue_properties, + **kwargs ))) except HttpResponseError as error: process_storage_error(error) @@ -326,13 +406,15 @@ async def get_queue_properties(self, **kwargs: Any) -> "QueueProperties": @distributed_trace_async async def set_queue_metadata( self, metadata: Optional[Dict[str, str]] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: """Sets user-defined metadata on the specified queue. Metadata is associated with the queue as name-value pairs. - :param Optional[Dict[str, Any]] metadata: + :param Optional[Dict[str, str]] metadata: A dict containing name-value pairs to associate with the queue as metadata. :keyword int timeout: @@ -353,18 +435,20 @@ async def set_queue_metadata( :dedent: 16 :caption: Set metadata on the queue. """ - timeout = kwargs.pop('timeout', None) headers = kwargs.pop("headers", {}) headers.update(add_metadata_headers(metadata)) try: return await self._client.queue.set_metadata( - timeout=timeout, headers=headers, cls=return_response_headers, **kwargs + timeout=timeout, + headers=headers, + cls=return_response_headers, + **kwargs ) except HttpResponseError as error: process_storage_error(error) @distributed_trace_async - async def get_queue_access_policy(self, **kwargs: Any) -> Dict[str, AccessPolicy]: + async def get_queue_access_policy(self, *, timeout: Optional[int] = None, **kwargs: Any) -> Dict[str, AccessPolicy]: """Returns details about any stored access policies specified on the queue that may be used with Shared Access Signatures. @@ -377,10 +461,11 @@ async def get_queue_access_policy(self, **kwargs: Any) -> Dict[str, AccessPolicy :return: A dictionary of access policies associated with the queue. :rtype: dict(str, ~azure.storage.queue.AccessPolicy) """ - timeout = kwargs.pop('timeout', None) try: _, identifiers = cast(Tuple[Dict, List], await self._client.queue.get_access_policy( - timeout=timeout, cls=return_headers_and_deserialized, **kwargs + timeout=timeout, + cls=return_headers_and_deserialized, + **kwargs )) except HttpResponseError as error: process_storage_error(error) @@ -389,6 +474,8 @@ async def get_queue_access_policy(self, **kwargs: Any) -> Dict[str, AccessPolicy @distributed_trace_async async def set_queue_access_policy( self, signed_identifiers: Dict[str, AccessPolicy], + *, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Sets stored access policies for the queue that may be used with Shared @@ -426,7 +513,6 @@ async def set_queue_access_policy( :dedent: 16 :caption: Set an access policy on the queue. """ - timeout = kwargs.pop('timeout', None) if len(signed_identifiers) > 15: raise ValueError( "Too many access policies provided. The server does not support setting " @@ -449,6 +535,7 @@ async def send_message( *, visibility_timeout: Optional[int] = None, time_to_live: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> "QueueMessage": """Adds a new message to the back of the message queue. @@ -487,7 +574,7 @@ async def send_message( #other-client--per-operation-configuration>`__. :return: A :class:`~azure.storage.queue.QueueMessage` object. - This object is also populated with the content although it is not + This object is also populated with the content, although it is not returned from the service. :rtype: ~azure.storage.queue.QueueMessage @@ -500,20 +587,21 @@ async def send_message( :dedent: 16 :caption: Send messages. """ - timeout = kwargs.pop('timeout', None) if self.key_encryption_key: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) try: self._message_encode_policy.configure( require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, resolver=self.key_resolver_function, - encryption_version=self.encryption_version) + encryption_version=self.encryption_version + ) except TypeError: warnings.warn( "TypeError when calling message_encode_policy.configure. \ @@ -524,7 +612,8 @@ async def send_message( self._message_encode_policy.configure( require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, - resolver=self.key_resolver_function) + resolver=self.key_resolver_function + ) encoded_content = self._message_encode_policy(content) new_message = GenQueueMessage(message_text=encoded_content) @@ -541,8 +630,8 @@ async def send_message( id=enqueued[0].message_id, inserted_on=enqueued[0].insertion_time, expires_on=enqueued[0].expiration_time, - pop_receipt = enqueued[0].pop_receipt, - next_visible_on = enqueued[0].time_next_visible + pop_receipt=enqueued[0].pop_receipt, + next_visible_on=enqueued[0].time_next_visible ) return queue_message except HttpResponseError as error: @@ -552,6 +641,7 @@ async def send_message( async def receive_message( self, *, visibility_timeout: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> Optional[QueueMessage]: """Removes one message from the front of the queue. @@ -591,13 +681,13 @@ async def receive_message( :dedent: 12 :caption: Receive one message from the queue. """ - timeout = kwargs.pop('timeout', None) if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) self._message_decode_policy.configure( require_encryption=self.require_encryption, @@ -612,8 +702,7 @@ async def receive_message( cls=self._message_decode_policy, **kwargs ) - wrapped_message = QueueMessage._from_generated( # pylint: disable=protected-access - message[0]) if message != [] else None + wrapped_message = QueueMessage._from_generated(message[0]) if message != [] else None # pylint: disable=protected-access return wrapped_message except HttpResponseError as error: process_storage_error(error) @@ -624,6 +713,7 @@ def receive_messages( messages_per_page: Optional[int] = None, visibility_timeout: Optional[int] = None, max_messages: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> AsyncItemPaged[QueueMessage]: """Removes one or more messages from the front of the queue. @@ -674,13 +764,13 @@ def receive_messages( :dedent: 16 :caption: Receive messages from the queue. """ - timeout = kwargs.pop('timeout', None) if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) self._message_decode_policy.configure( require_encryption=self.require_encryption, @@ -698,8 +788,12 @@ def receive_messages( if max_messages is not None and messages_per_page is not None: if max_messages < messages_per_page: raise ValueError("max_messages must be greater or equal to messages_per_page") - return AsyncItemPaged(command, results_per_page=messages_per_page, - page_iterator_class=MessagesPaged, max_messages=max_messages) + return AsyncItemPaged( + command, + results_per_page=messages_per_page, + page_iterator_class=MessagesPaged, + max_messages=max_messages + ) except HttpResponseError as error: process_storage_error(error) @@ -710,6 +804,7 @@ async def update_message( content: Optional[object] = None, *, visibility_timeout: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> QueueMessage: """Updates the visibility timeout of a message. You can also use this @@ -762,13 +857,13 @@ async def update_message( :dedent: 16 :caption: Update a message. """ - timeout = kwargs.pop('timeout', None) if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) if isinstance(message, QueueMessage): message_id = message.id @@ -827,8 +922,8 @@ async def update_message( inserted_on=inserted_on, dequeue_count=dequeue_count, expires_on=expires_on, - pop_receipt = response['popreceipt'], - next_visible_on = response['time_next_visible'] + pop_receipt=response['popreceipt'], + next_visible_on=response['time_next_visible'] ) return new_message except HttpResponseError as error: @@ -837,6 +932,8 @@ async def update_message( @distributed_trace_async async def peek_messages( self, max_messages: Optional[int] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> List[QueueMessage]: """Retrieves one or more messages from the front of the queue, but does @@ -881,13 +978,13 @@ async def peek_messages( if max_messages and not 1 <= max_messages <= 32: raise ValueError("Number of messages to peek should be between 1 and 32") - timeout = kwargs.pop('timeout', None) if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, - kwargs) + kwargs + ) self._message_decode_policy.configure( require_encryption=self.require_encryption, @@ -896,7 +993,10 @@ async def peek_messages( ) try: messages = await self._client.messages.peek( - number_of_messages=max_messages, timeout=timeout, cls=self._message_decode_policy, **kwargs + number_of_messages=max_messages, + timeout=timeout, + cls=self._message_decode_policy, + **kwargs ) wrapped_messages = [] for peeked in messages: @@ -906,7 +1006,7 @@ async def peek_messages( process_storage_error(error) @distributed_trace_async - async def clear_messages(self, **kwargs: Any) -> None: + async def clear_messages(self, *, timeout: Optional[int] = None, **kwargs: Any) -> None: """Deletes all messages from the specified queue. :keyword int timeout: @@ -925,7 +1025,6 @@ async def clear_messages(self, **kwargs: Any) -> None: :dedent: 16 :caption: Clears all messages. """ - timeout = kwargs.pop('timeout', None) try: await self._client.messages.clear(timeout=timeout, **kwargs) except HttpResponseError as error: @@ -935,6 +1034,8 @@ async def clear_messages(self, **kwargs: Any) -> None: async def delete_message( self, message: Union[str, QueueMessage], pop_receipt: Optional[str] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Deletes the specified message. @@ -971,8 +1072,6 @@ async def delete_message( :dedent: 16 :caption: Delete a message. """ - timeout = kwargs.pop('timeout', None) - receipt: Optional[str] if isinstance(message, QueueMessage): message_id = message.id diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py index 7999b47b3962..af5cd1a9dcda 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py @@ -3,7 +3,6 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -# pylint: disable=docstring-keyword-should-match-keyword-only import functools from typing import ( @@ -96,15 +95,26 @@ class QueueServiceClient( # type: ignore [misc] def __init__( self, account_url: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, **kwargs: Any ) -> None: kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) loop = kwargs.pop('loop', None) parsed_url, sas_token = _parse_url(account_url=account_url, credential=credential) self._query_str, credential = self._format_query_string(sas_token, credential) - super(QueueServiceClient, self).__init__(parsed_url, service='queue', credential=credential, **kwargs) + super(QueueServiceClient, self).__init__( + parsed_url, + service='queue', + credential=credential, + secondary_hostname=secondary_hostname, + audience=audience, + **kwargs + ) self._client = AzureQueueStorage(self.url, base_url=self.url, pipeline=self._pipeline, loop=loop) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._client._config.version = get_api_version(api_version) # type: ignore [assignment] self._loop = loop self._configure_encryption(kwargs) @@ -122,6 +132,10 @@ def _format_url(self, hostname: str) -> str: def from_connection_string( cls, conn_str: str, credential: Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"]] = None, # pylint: disable=line-too-long + *, + api_version: Optional[str] = None, + secondary_hostname: Optional[str] = None, + audience: Optional[str] = None, **kwargs: Any ) -> Self: """Create QueueServiceClient from a Connection String. @@ -139,6 +153,14 @@ def from_connection_string( should be the storage account key. :type credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]] + :keyword str api_version: + The Storage API version to use for requests. Default value is the most recent service version that is + compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. + :keyword str secondary_hostname: + The hostname of the secondary endpoint. + :keyword str audience: The audience to use when requesting tokens for Azure Active Directory + authentication. Only has an effect when credential is of type TokenCredential. The value could be + https://storage.azure.com/ (default) or https://.queue.core.windows.net. :returns: A Queue service client. :rtype: ~azure.storage.queue.QueueClient @@ -151,14 +173,18 @@ def from_connection_string( :dedent: 8 :caption: Creating the QueueServiceClient with a connection string. """ - account_url, secondary, credential = parse_connection_str( - conn_str, credential, 'queue') - if 'secondary_hostname' not in kwargs: - kwargs['secondary_hostname'] = secondary - return cls(account_url, credential=credential, **kwargs) + account_url, secondary, credential = parse_connection_str(conn_str, credential, 'queue') + return cls( + account_url, + credential=credential, + api_version=api_version, + secondary_hostname=secondary_hostname or secondary, + audience=audience, + **kwargs + ) @distributed_trace_async - async def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: + async def get_service_stats(self, *, timeout: Optional[int] = None, **kwargs: Any) -> Dict[str, Any]: """Retrieves statistics related to replication for the Queue service. It is only available when read-access geo-redundant replication is enabled for @@ -182,7 +208,6 @@ async def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: :return: The queue service stats. :rtype: Dict[str, Any] """ - timeout = kwargs.pop('timeout', None) try: stats = await self._client.service.get_statistics( timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs) @@ -191,7 +216,7 @@ async def get_service_stats(self, **kwargs: Any) -> Dict[str, Any]: process_storage_error(error) @distributed_trace_async - async def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: + async def get_service_properties(self, *, timeout: Optional[int] = None, **kwargs: Any) -> Dict[str, Any]: """Gets the properties of a storage account's Queue service, including Azure Storage Analytics. @@ -210,7 +235,6 @@ async def get_service_properties(self, **kwargs: Any) -> Dict[str, Any]: :dedent: 12 :caption: Getting queue service properties. """ - timeout = kwargs.pop('timeout', None) try: service_props = await self._client.service.get_properties(timeout=timeout, **kwargs) return service_properties_deserialize(service_props) @@ -223,6 +247,8 @@ async def set_service_properties( hour_metrics: Optional["Metrics"] = None, minute_metrics: Optional["Metrics"] = None, cors: Optional[List[CorsRule]] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Sets the properties of a storage account's Queue service, including @@ -259,7 +285,6 @@ async def set_service_properties( :dedent: 12 :caption: Setting queue service properties. """ - timeout = kwargs.pop('timeout', None) props = StorageServiceProperties( logging=analytics_logging, hour_metrics=hour_metrics, @@ -275,6 +300,9 @@ async def set_service_properties( def list_queues( self, name_starts_with: Optional[str] = None, include_metadata: Optional[bool] = False, + *, + results_per_page: Optional[int] = None, + timeout: Optional[int] = None, **kwargs: Any ) -> AsyncItemPaged: """Returns a generator to list the queues under the specified account. @@ -310,17 +338,18 @@ def list_queues( :dedent: 16 :caption: List queues in the service. """ - results_per_page = kwargs.pop('results_per_page', None) - timeout = kwargs.pop('timeout', None) include = ['metadata'] if include_metadata else None command = functools.partial( self._client.service.list_queues_segment, prefix=name_starts_with, include=include, timeout=timeout, - **kwargs) + **kwargs + ) return AsyncItemPaged( - command, prefix=name_starts_with, results_per_page=results_per_page, + command, + prefix=name_starts_with, + results_per_page=results_per_page, page_iterator_class=QueuePropertiesPaged ) @@ -328,6 +357,8 @@ def list_queues( async def create_queue( self, name: str, metadata: Optional[Dict[str, str]] = None, + *, + timeout: Optional[int] = None, **kwargs: Any ) -> QueueClient: """Creates a new queue under the specified account. @@ -354,16 +385,16 @@ async def create_queue( :dedent: 12 :caption: Create a queue in the service. """ - timeout = kwargs.pop('timeout', None) queue = self.get_queue_client(name) kwargs.setdefault('merge_span', True) - await queue.create_queue( - metadata=metadata, timeout=timeout, **kwargs) + await queue.create_queue(metadata=metadata, timeout=timeout, **kwargs) return queue @distributed_trace_async async def delete_queue( self, queue: Union["QueueProperties", str], + *, + timeout: Optional[int] = None, **kwargs: Any ) -> None: """Deletes the specified queue and any messages it contains. @@ -393,15 +424,11 @@ async def delete_queue( :dedent: 16 :caption: Delete a queue in the service. """ - timeout = kwargs.pop('timeout', None) queue_client = self.get_queue_client(queue) kwargs.setdefault('merge_span', True) await queue_client.delete_queue(timeout=timeout, **kwargs) - def get_queue_client( - self, queue: Union["QueueProperties", str], - **kwargs: Any - ) -> QueueClient: + def get_queue_client(self, queue: Union["QueueProperties", str], **kwargs: Any) -> QueueClient: """Get a client to interact with the specified queue. The queue need not already exist. @@ -428,8 +455,8 @@ def get_queue_client( queue_name = queue _pipeline = AsyncPipeline( - transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access - policies=self._pipeline._impl_policies # type: ignore # pylint: disable=protected-access + transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable=protected-access + policies=self._pipeline._impl_policies # type: ignore # pylint: disable=protected-access ) return QueueClient( diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_client.py b/sdk/storage/azure-storage-queue/tests/test_queue_client.py index 5c9f0d3f82e2..e237c6fb766f 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_client.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_client.py @@ -599,6 +599,21 @@ def test_closing_pipeline_client_simple(self, **kwargs): self.account_url(storage_account_name, "queue"), credential=storage_account_key, queue_name='queue') service.close() + @QueuePreparer() + @recorded_by_proxy + def test_get_and_set_queue_access_policy_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + self.setUp() + + # Arrange + service_client = QueueServiceClient(self.account_url(storage_account_name, "queue"), self.token_credential) + queue_client = service_client.get_queue_client(self.get_resource_name("pyqueuesync")) + queue_client.create_queue() + + # Act / Assert + queue_client.set_queue_access_policy(signed_identifiers={}) + acl = queue_client.get_queue_access_policy() + assert acl is not None # ------------------------------------------------------------------------------ if __name__ == '__main__': diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py index a46a62218107..074fc0712d07 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py @@ -562,6 +562,21 @@ async def test_closing_pipeline_client_simple(self, **kwargs): self.account_url(storage_account_name, "queue"), credential=storage_account_key, queue_name='queue') await service.close() + @QueuePreparer() + @recorded_by_proxy_async + async def test_get_and_set_queue_access_policy_oauth(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + self.setUp() + + # Arrange + service_client = QueueServiceClient(self.account_url(storage_account_name, "queue"), self.token_credential) + queue_client = service_client.get_queue_client(self.get_resource_name("pyqueueasync")) + await queue_client.create_queue() + + # Act / Assert + await queue_client.set_queue_access_policy(signed_identifiers={}) + acl = await queue_client.get_queue_access_policy() + assert acl is not None # ------------------------------------------------------------------------------ if __name__ == '__main__':