diff --git a/sdk/storage/azure-storage-blob/CHANGELOG.md b/sdk/storage/azure-storage-blob/CHANGELOG.md index 9bead9a7c824..558142d2830c 100644 --- a/sdk/storage/azure-storage-blob/CHANGELOG.md +++ b/sdk/storage/azure-storage-blob/CHANGELOG.md @@ -1,8 +1,21 @@ # Release History -## 12.30.0b1 (Unreleased) +## 12.30.0b1 (2026-03-30) ### Features Added +- Added support for service version 2026-06-06. +- Added support for connection strings and `account_url`s to accept URLs with `-ipv6` and `-dualstack` suffixes +for `BlobServiceClient`, `ContainerClient`, and `BlobClient`. +- Added support for `create` permission in `BlobSasPermissions` for `stage_block`, +`stage_block_from_url`, and `commit_block_list`. +- Added support for a new `Smart` access tier to `StandardBlobTier` used in `BlobClient.set_standard_blob_tier`, +which is optimized to automatically determine the most cost-effective access with no performance impact. +When set, `BlobProperties.smart_access_tier` will reveal the service's current access +tier choice between `Hot`, `Cool`, and `Archive`. +- Added support for `is_directory` keyword in `generate_blob_sas` that generates directory-level SAS for blobs. + +### Other Changes +- Consolidated the behavior of `max_concurrency=None` by defaulting to the shared `DEFAULT_MAX_CONCURRENCY` constant. ## 12.29.0b1 (2026-01-27) diff --git a/sdk/storage/azure-storage-blob/assets.json b/sdk/storage/azure-storage-blob/assets.json index d95af10042c5..f7f288f51754 100644 --- a/sdk/storage/azure-storage-blob/assets.json +++ b/sdk/storage/azure-storage-blob/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-blob", - "Tag": "python/storage/azure-storage-blob_bd8f6233a4" + "Tag": "python/storage/azure-storage-blob_a6eb270886" } diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py index 54abb33c80a1..6c5fdd76c1a5 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_blob_client.py @@ -1932,16 +1932,18 @@ def acquire_lease(self, lease_duration: int =-1, lease_id: Optional[str] = None, def set_standard_blob_tier(self, standard_blob_tier: Union[str, "StandardBlobTier"], **kwargs: Any) -> None: """This operation sets the tier on a block blob. - A block blob's tier determines Hot/Cool/Archive storage type. + A block blob's tier determines Hot/Cool/Archive/Smart storage type. This operation does not update the blob's ETag. :param standard_blob_tier: Indicates the tier to be set on the blob. Options include 'Hot', 'Cool', - 'Archive'. The hot tier is optimized for storing data that is accessed + 'Archive', 'Smart'. The hot tier is optimized for storing data that is accessed frequently. The cool storage tier is optimized for storing data that is infrequently accessed and stored for at least a month. The archive tier is optimized for storing data that is rarely accessed and stored - for at least six months with flexible latency requirements. + for at least six months with flexible latency requirements. The smart + tier is optimized automatically to determine the most cost-effective access + tier with no performance impact. :type standard_blob_tier: str or ~azure.storage.blob.StandardBlobTier :keyword ~azure.storage.blob.RehydratePriority rehydrate_priority: Indicates the priority with which to rehydrate an archived blob diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_deserialize.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_deserialize.py index 19ec4c07e338..f986fbde0e7e 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_deserialize.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_deserialize.py @@ -174,6 +174,7 @@ def get_blob_properties_from_generated_code(generated: "BlobItemInternal") -> Bl blob.deleted_time = generated.properties.deleted_time blob.remaining_retention_days = generated.properties.remaining_retention_days blob.blob_tier = generated.properties.access_tier # type: ignore [assignment] + blob.smart_access_tier = generated.properties.smart_access_tier blob.rehydrate_priority = generated.properties.rehydrate_priority blob.blob_tier_inferred = generated.properties.access_tier_inferred blob.archive_status = generated.properties.archive_status diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py index 58ac1c5b1b2b..a2f50ebc91ec 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_download.py @@ -873,7 +873,7 @@ def content_as_bytes(self, max_concurrency=None): This method is deprecated, use func:`readall` instead. - :param int max_concurrency: + :param Optional[int] max_concurrency: The number of parallel connections with which to download. :return: The contents of the file as bytes. :rtype: bytes @@ -896,7 +896,7 @@ def content_as_text(self, max_concurrency=None, encoding="UTF-8"): This method is deprecated, use func:`readall` instead. - :param int max_concurrency: + :param Optional[int] max_concurrency: The number of parallel connections with which to download. :param str encoding: Test encoding to decode the downloaded bytes. Default is UTF-8. @@ -924,7 +924,7 @@ def download_to_stream(self, stream, max_concurrency=None): The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. - :param int max_concurrency: + :param Optional[int] max_concurrency: The number of parallel connections with which to download. :return: The properties of the downloaded blob. :rtype: Any diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py index 5645f6b8cdb3..bfa47cb66d34 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_blob_operations.py @@ -582,6 +582,9 @@ async def get_properties( response_headers["x-ms-access-tier-change-time"] = self._deserialize( "rfc-1123", response.headers.get("x-ms-access-tier-change-time") ) + response_headers["x-ms-smart-access-tier"] = self._deserialize( + "str", response.headers.get("x-ms-smart-access-tier") + ) response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) response_headers["x-ms-is-current-version"] = self._deserialize( "bool", response.headers.get("x-ms-is-current-version") @@ -2177,8 +2180,8 @@ async def start_copy_from_url( # pylint: disable=too-many-locals information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived blob. Known values are: "High" and "Standard". Default value is None. @@ -2356,8 +2359,8 @@ async def copy_from_url( # pylint: disable=too-many-locals information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -2624,8 +2627,8 @@ async def set_tier( update the blob's ETag. :param tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", "P10", - "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and "Cold". - Required. + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Cold", and + "Smart". Required. :type tier: str or ~azure.storage.blob.models.AccessTierRequired :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py index 6356da0264ec..cd5bfa6bff2a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/aio/operations/_block_blob_operations.py @@ -110,8 +110,8 @@ async def upload( # pylint: disable=too-many-locals information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -343,8 +343,8 @@ async def put_blob_from_url( # pylint: disable=too-many-locals information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -930,8 +930,8 @@ async def commit_block_list( # pylint: disable=too-many-locals information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py index 471d5924df20..8938f0fd9e97 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_azure_blob_storage_enums.py @@ -29,6 +29,7 @@ class AccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): ARCHIVE = "Archive" PREMIUM = "Premium" COLD = "Cold" + SMART = "Smart" class AccessTierOptional(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -49,6 +50,7 @@ class AccessTierOptional(str, Enum, metaclass=CaseInsensitiveEnumMeta): COOL = "Cool" ARCHIVE = "Archive" COLD = "Cold" + SMART = "Smart" class AccessTierRequired(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -69,6 +71,7 @@ class AccessTierRequired(str, Enum, metaclass=CaseInsensitiveEnumMeta): COOL = "Cool" ARCHIVE = "Archive" COLD = "Cold" + SMART = "Smart" class AccountKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -87,6 +90,7 @@ class ArchiveStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): REHYDRATE_PENDING_TO_HOT = "rehydrate-pending-to-hot" REHYDRATE_PENDING_TO_COOL = "rehydrate-pending-to-cool" REHYDRATE_PENDING_TO_COLD = "rehydrate-pending-to-cold" + REHYDRATE_PENDING_TO_SMART = "rehydrate-pending-to-smart" class BlobCopySourceTags(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py index e3cb9c5b99eb..3534891fba4e 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models_py3.py @@ -606,13 +606,16 @@ class BlobPropertiesInternal(_serialization.Model): :ivar remaining_retention_days: :vartype remaining_retention_days: int :ivar access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", - "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", and "Cold". + "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", "Cold", and "Smart". :vartype access_tier: str or ~azure.storage.blob.models.AccessTier :ivar access_tier_inferred: :vartype access_tier_inferred: bool :ivar archive_status: Known values are: "rehydrate-pending-to-hot", - "rehydrate-pending-to-cool", and "rehydrate-pending-to-cold". + "rehydrate-pending-to-cool", "rehydrate-pending-to-cold", and "rehydrate-pending-to-smart". :vartype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :ivar smart_access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", + "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", "Cold", and "Smart". + :vartype smart_access_tier: str or ~azure.storage.blob.models.AccessTier :ivar customer_provided_key_sha256: :vartype customer_provided_key_sha256: str :ivar encryption_scope: The name of the encryption scope under which the blob is encrypted. @@ -674,6 +677,7 @@ class BlobPropertiesInternal(_serialization.Model): "access_tier": {"key": "AccessTier", "type": "str"}, "access_tier_inferred": {"key": "AccessTierInferred", "type": "bool"}, "archive_status": {"key": "ArchiveStatus", "type": "str"}, + "smart_access_tier": {"key": "SmartAccessTier", "type": "str"}, "customer_provided_key_sha256": {"key": "CustomerProvidedKeySha256", "type": "str"}, "encryption_scope": {"key": "EncryptionScope", "type": "str"}, "access_tier_change_time": {"key": "AccessTierChangeTime", "type": "rfc-1123"}, @@ -720,6 +724,7 @@ def __init__( # pylint: disable=too-many-locals access_tier: Optional[Union[str, "_models.AccessTier"]] = None, access_tier_inferred: Optional[bool] = None, archive_status: Optional[Union[str, "_models.ArchiveStatus"]] = None, + smart_access_tier: Optional[Union[str, "_models.AccessTier"]] = None, customer_provided_key_sha256: Optional[str] = None, encryption_scope: Optional[str] = None, access_tier_change_time: Optional[datetime.datetime] = None, @@ -788,13 +793,16 @@ def __init__( # pylint: disable=too-many-locals :keyword remaining_retention_days: :paramtype remaining_retention_days: int :keyword access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", - "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", and "Cold". + "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", "Cold", and "Smart". :paramtype access_tier: str or ~azure.storage.blob.models.AccessTier :keyword access_tier_inferred: :paramtype access_tier_inferred: bool :keyword archive_status: Known values are: "rehydrate-pending-to-hot", - "rehydrate-pending-to-cool", and "rehydrate-pending-to-cold". + "rehydrate-pending-to-cool", "rehydrate-pending-to-cold", and "rehydrate-pending-to-smart". :paramtype archive_status: str or ~azure.storage.blob.models.ArchiveStatus + :keyword smart_access_tier: Known values are: "P4", "P6", "P10", "P15", "P20", "P30", "P40", + "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Premium", "Cold", and "Smart". + :paramtype smart_access_tier: str or ~azure.storage.blob.models.AccessTier :keyword customer_provided_key_sha256: :paramtype customer_provided_key_sha256: str :keyword encryption_scope: The name of the encryption scope under which the blob is encrypted. @@ -851,6 +859,7 @@ def __init__( # pylint: disable=too-many-locals self.access_tier = access_tier self.access_tier_inferred = access_tier_inferred self.archive_status = archive_status + self.smart_access_tier = smart_access_tier self.customer_provided_key_sha256 = customer_provided_key_sha256 self.encryption_scope = encryption_scope self.access_tier_change_time = access_tier_change_time diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py index cb8e18ed1e0d..b2db8100471a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_blob_operations.py @@ -2052,6 +2052,9 @@ def get_properties( # pylint: disable=inconsistent-return-statements response_headers["x-ms-access-tier-change-time"] = self._deserialize( "rfc-1123", response.headers.get("x-ms-access-tier-change-time") ) + response_headers["x-ms-smart-access-tier"] = self._deserialize( + "str", response.headers.get("x-ms-smart-access-tier") + ) response_headers["x-ms-version-id"] = self._deserialize("str", response.headers.get("x-ms-version-id")) response_headers["x-ms-is-current-version"] = self._deserialize( "bool", response.headers.get("x-ms-is-current-version") @@ -3647,8 +3650,8 @@ def start_copy_from_url( # pylint: disable=inconsistent-return-statements,too-m information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param rehydrate_priority: Optional: Indicates the priority with which to rehydrate an archived blob. Known values are: "High" and "Standard". Default value is None. @@ -3826,8 +3829,8 @@ def copy_from_url( # pylint: disable=inconsistent-return-statements,too-many-lo information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -4094,8 +4097,8 @@ def set_tier( # pylint: disable=inconsistent-return-statements update the blob's ETag. :param tier: Indicates the tier to be set on the blob. Known values are: "P4", "P6", "P10", - "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and "Cold". - Required. + "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Cold", and + "Smart". Required. :type tier: str or ~azure.storage.blob.models.AccessTierRequired :param snapshot: The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py index f4a3ca253280..a80c4bb9852b 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_generated/operations/_block_blob_operations.py @@ -769,8 +769,8 @@ def upload( # pylint: disable=inconsistent-return-statements,too-many-locals information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -1002,8 +1002,8 @@ def put_blob_from_url( # pylint: disable=inconsistent-return-statements,too-man information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -1589,8 +1589,8 @@ def commit_block_list( # pylint: disable=inconsistent-return-statements,too-man information. Default value is None. :type metadata: dict[str, str] :param tier: Optional. Indicates the tier to be set on the blob. Known values are: "P4", "P6", - "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", and - "Cold". Default value is None. + "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", + "Cold", and "Smart". Default value is None. :type tier: str or ~azure.storage.blob.models.AccessTierOptional :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py index b849e8948808..e66cf49ba25a 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_models.py @@ -89,6 +89,7 @@ class StandardBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): COOL = 'Cool' #: Cool COLD = 'Cold' #: Cold HOT = 'Hot' #: Hot + SMART = 'Smart' #: Smart class PremiumPageBlobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -1392,7 +1393,11 @@ class BlobProperties(DictMixin): is optimized for storing data that is infrequently accessed and stored for at least a month. The archive tier is optimized for storing data that is rarely accessed and stored for at least six months - with flexible latency requirements.""" + with flexible latency requirements. The smart tier is optimized + automatically to determine the most cost-effective access tier + with no performance impact.""" + smart_access_tier: Optional[str] + """Smart access tier of blob.""" rehydrate_priority: Optional[str] """Indicates the priority with which to rehydrate an archived blob""" blob_tier_change_time: Optional["datetime"] @@ -1459,6 +1464,7 @@ def __init__(self, **kwargs: Any) -> None: self.content_settings = ContentSettings(**kwargs) self.lease = LeaseProperties(**kwargs) self.blob_tier = kwargs.get('x-ms-access-tier') + self.smart_access_tier = kwargs.get('x-ms-smart-access-tier') self.rehydrate_priority = kwargs.get('x-ms-rehydrate-priority') self.blob_tier_change_time = kwargs.get('x-ms-access-tier-change-time') self.blob_tier_inferred = kwargs.get('x-ms-access-tier-inferred') diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client.py index 5441488d86a9..8fd641acd2c2 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared/base_client.py @@ -73,6 +73,38 @@ "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, } +_SECONDARY_SUFFIX = "-secondary" +_KNOWN_FEATURE_SUFFIXES = {"-ipv6", "-dualstack"} + + +def _construct_endpoints(netloc: str, account_part: str) -> Tuple[str, str, str]: + """ + Construct primary and secondary hostnames from a storage account URL's netloc. + + :param str netloc: The network location in a URL. + :param str account_part: The account part after parsing the URL. + :return: The account name, primary hostname, and secondary hostname. + :rtype: Tuple[str, str, str] + """ + domain_suffix = netloc[len(account_part):] + secondary_idx = account_part.find(_SECONDARY_SUFFIX) + + # Case where customer provides secondary URL + if secondary_idx >= 0: + account_name = account_part[:secondary_idx] + primary_hostname = secondary_hostname = f"{account_part}{domain_suffix}" + else: + feature_suffix = "" + account_name = account_part + for suffix in _KNOWN_FEATURE_SUFFIXES: + if account_name.endswith(suffix): + feature_suffix = suffix + account_name = account_name[: -len(suffix)] + break + primary_hostname = f"{account_part}{domain_suffix}" + secondary_hostname = f"{account_name}{_SECONDARY_SUFFIX}{feature_suffix}{domain_suffix}" + + return account_name, primary_hostname, secondary_hostname class StorageAccountHostsMixin(object): @@ -115,21 +147,26 @@ def __init__( self._is_localhost = True self.account_name = parsed_url.path.strip("/") + secondary_hostname = "" + if len(account) > 1: + self.account_name, primary_hostname, secondary_hostname = _construct_endpoints( + parsed_url.netloc, account[0] + ) + else: + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") + self.credential = _format_shared_key_credential(self.account_name, credential) if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"): raise ValueError("Token credential is only supported with HTTPS.") - secondary_hostname = "" if hasattr(self.credential, "account_name"): + if not self.account_name: + secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" self.account_name = self.credential.account_name - secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" if not self._hosts: - if len(account) > 1: - secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary") if kwargs.get("secondary_hostname"): secondary_hostname = kwargs["secondary_hostname"] - primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} self._sdk_moniker = f"storage-{service}/{VERSION}" diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py index 5298d40eaf34..866b6ea51ef2 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/_shared_access_signature.py @@ -26,19 +26,19 @@ class BlobQueryStringConstants(object): class BlobSharedAccessSignature(SharedAccessSignature): - ''' + """ Provides a factory for creating blob and container access signature tokens with a common account name and account key. Users can either use the factory or can construct the appropriate service and use the - generate_*_shared_access_signature method directly. - ''' + generate_*_sas method directly. + """ def __init__( self, account_name: str, account_key: Optional[str] = None, user_delegation_key: Optional[UserDelegationKey] = None ) -> None: - ''' + """ :param str account_name: The storage account name used to generate the shared access signatures. :param Optional[str] account_key: @@ -47,7 +47,7 @@ def __init__( Instead of an account key, the user could pass in a user delegation key. A user delegation key can be obtained from the service by authenticating with an AAD identity; this can be accomplished by calling get_user_delegation_key on any Blob service object. - ''' + """ super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION) self.user_delegation_key = user_delegation_key @@ -70,10 +70,11 @@ def generate_blob( user_delegation_oid: Optional[str] = None, request_headers: Optional[Dict[str, str]] = None, request_query_params: Optional[Dict[str, str]] = None, + is_directory: Optional[bool] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: - ''' + """ Generates a shared access signature for the blob or one of its snapshots. Use the returned signature with the sas_token parameter of any BlobService. @@ -149,13 +150,17 @@ def generate_blob( :param Dict[str, str] request_query_params: Specifies a set of query parameters and their corresponding values that must be present in the request when using this SAS. + :param Optional[bool] is_directory: + Specifies whether the `blob_name` is a virtual directory. If set, the `blob_name` is treated + to be a virtual directory name for a Directory SAS. When set, do not prefix or suffix the `blob_name` + with `/`. If not set, the `blob_name` is assumed to be a blob name for a Blob SAS. :param sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. :type sts_hook: Optional[Callable[[str], None]] :return: A Shared Access Signature (sas) token. :rtype: str - ''' + """ resource_path = container_name + '/' + blob_name sas = _BlobSharedAccessHelper() @@ -165,7 +170,7 @@ def generate_blob( resource = 'bs' if snapshot else 'b' resource = 'bv' if version_id else resource - resource = 'd' if kwargs.pop("is_directory", None) else resource + resource = 'd' if is_directory else resource sas.add_resource(resource) sas.add_timestamp(snapshot or version_id) @@ -173,6 +178,10 @@ def generate_blob( content_encoding, content_language, content_type) sas.add_encryption_scope(**kwargs) + + if is_directory: + sas.add_directory_depth(blob_name, kwargs.pop('sdd', None)) + sas.add_info_for_hns_account(**kwargs) sas.add_resource_signature( self.account_name, @@ -207,7 +216,7 @@ def generate_container( sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: - ''' + """ Generates a shared access signature for the container. Use the returned signature with the sas_token parameter of any BlobService. @@ -279,7 +288,7 @@ def generate_container( :type sts_hook: Optional[Callable[[str], None]] :return: A Shared Access Signature (sas) token. :rtype: str - ''' + """ sas = _BlobSharedAccessHelper() sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version) sas.add_id(policy_id) @@ -310,8 +319,17 @@ class _BlobSharedAccessHelper(_SharedAccessHelper): def add_timestamp(self, timestamp): self._add_query(BlobQueryStringConstants.SIGNED_TIMESTAMP, timestamp) + def add_directory_depth(self, blob_name, sdd): + # sdd may be provided from Datalake + # If not provided, it will be manually computed from blob_name + if sdd is None: + if blob_name in ["", "/"]: + sdd = 0 + else: + sdd = len(blob_name.strip("/").split("/")) + self._add_query(QueryStringConstants.SIGNED_DIRECTORY_DEPTH, str(sdd)) + def add_info_for_hns_account(self, **kwargs): - self._add_query(QueryStringConstants.SIGNED_DIRECTORY_DEPTH, kwargs.pop('sdd', None)) self._add_query(QueryStringConstants.SIGNED_AUTHORIZED_OID, kwargs.pop('preauthorized_agent_object_id', None)) self._add_query(QueryStringConstants.SIGNED_UNAUTHORIZED_OID, kwargs.pop('agent_object_id', None)) self._add_query(QueryStringConstants.SIGNED_CORRELATION_ID, kwargs.pop('correlation_id', None)) @@ -650,6 +668,7 @@ def generate_blob_sas( user_delegation_oid: Optional[str] = None, request_headers: Optional[Dict[str, str]] = None, request_query_params: Optional[Dict[str, str]] = None, + is_directory: Optional[bool] = None, sts_hook: Optional[Callable[[str], None]] = None, **kwargs: Any ) -> str: @@ -746,6 +765,10 @@ def generate_blob_sas( :keyword Dict[str, str] request_query_params: If specified, both the correct query parameter(s) and corresponding values must be present, or the request will fail. + :keyword Optional[bool] is_directory: + Specifies whether the `blob_name` is a virtual directory. If set, the `blob_name` is treated + to be a virtual directory name for a Directory SAS. When set, do not prefix or suffix the `blob_name` + with `/`. If not set, the `blob_name` is assumed to be a blob name for a Blob SAS. :keyword sts_hook: For debugging purposes only. If provided, the hook is called with the string to sign that was used to generate the SAS. @@ -782,6 +805,7 @@ def generate_blob_sas( user_delegation_oid=user_delegation_oid, request_headers=request_headers, request_query_params=request_query_params, + is_directory=is_directory, sts_hook=sts_hook, **kwargs ) diff --git a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py index 842b6dbc4320..30cbb0c68fbf 100644 --- a/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py +++ b/sdk/storage/azure-storage-blob/azure/storage/blob/aio/_download_async.py @@ -816,7 +816,7 @@ async def content_as_bytes(self, max_concurrency=None): This method is deprecated, use func:`readall` instead. - :param int max_concurrency: + :param Optional[int] max_concurrency: The number of parallel connections with which to download. :return: The contents of the file as bytes. :rtype: bytes @@ -839,7 +839,7 @@ async def content_as_text(self, max_concurrency=None, encoding="UTF-8"): This method is deprecated, use func:`readall` instead. - :param int max_concurrency: + :param Optional[int] max_concurrency: The number of parallel connections with which to download. :param str encoding: Test encoding to decode the downloaded bytes. Default is UTF-8. @@ -867,7 +867,7 @@ async def download_to_stream(self, stream, max_concurrency=None): The stream to download to. This can be an open file-handle, or any writable stream. The stream must be seekable if the download uses more than one parallel connection. - :param int max_concurrency: + :param Optional[int] max_concurrency: The number of parallel connections with which to download. :return: The properties of the downloaded blob. :rtype: Any diff --git a/sdk/storage/azure-storage-blob/swagger/README.md b/sdk/storage/azure-storage-blob/swagger/README.md index a01f9a614515..dd8889803d24 100644 --- a/sdk/storage/azure-storage-blob/swagger/README.md +++ b/sdk/storage/azure-storage-blob/swagger/README.md @@ -16,7 +16,7 @@ autorest --v3 --python ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2026-04-06/blob.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2026-06-06/blob.json output-folder: ../azure/storage/blob/_generated namespace: azure.storage.blob no-namespace-folders: true diff --git a/sdk/storage/azure-storage-blob/tests/conftest.py b/sdk/storage/azure-storage-blob/tests/conftest.py index 5925851aebb9..28fec90b76af 100644 --- a/sdk/storage/azure-storage-blob/tests/conftest.py +++ b/sdk/storage/azure-storage-blob/tests/conftest.py @@ -31,3 +31,7 @@ def add_sanitizers(test_proxy): add_general_regex_sanitizer(regex=r'"EncryptionLibrary": "Python .*?"', value='"EncryptionLibrary": "Python x.x.x"') add_uri_regex_sanitizer(regex=r"\.preprod\.", value=".") + add_uri_regex_sanitizer( + regex=r"(?<=[?&]sktid=)[^&#]+", + value="00000000-0000-0000-0000-000000000000", + ) diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_client.py b/sdk/storage/azure-storage-blob/tests/test_blob_client.py index 5ad959336629..58bf7eaaf217 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_client.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_client.py @@ -15,6 +15,7 @@ BlobServiceClient, ContainerClient, generate_account_sas, + LocationMode, ResourceTypes, VERSION, ) @@ -37,13 +38,22 @@ class TestStorageClient(StorageRecordedTestCase): # --Helpers----------------------------------------------------------------- - def validate_standard_account_endpoints(self, service, url_type, name, storage_account_key): + def validate_standard_account_endpoints(self, service, url_type, account_name, storage_account_key): assert service is not None - assert service.account_name == name - assert service.credential.account_name == name + assert service.account_name == account_name + assert service.credential.account_name == account_name assert service.credential.account_key == storage_account_key.secret - assert '{}.{}.core.windows.net'.format(name, url_type) in service.url - assert '{}-secondary.{}.core.windows.net'.format(name, url_type) in service.secondary_endpoint + assert f"{account_name}.{url_type}.core.windows.net" in service.url + assert f"{account_name}-secondary.{url_type}.core.windows.net" in service.secondary_endpoint + + def validate_ipv6_account_endpoints(self, service, account_name, account_key, primary_endpoint, secondary_endpoint): + assert service is not None + assert service.scheme == "https" + assert service.account_name == account_name + assert service.credential.account_name == account_name + assert service.credential.account_key == account_key + assert service._hosts[LocationMode.PRIMARY] == primary_endpoint + assert service._hosts[LocationMode.SECONDARY] == secondary_endpoint def generate_fake_sas_token(self): fake_key = "a" * 30 + "b" * 30 @@ -299,6 +309,102 @@ def test_create_service_with_socket_timeout(self, **kwargs): assert service._client._client._pipeline._transport.connection_config.timeout == 22 assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + @pytest.mark.parametrize( + "account_url, expected_primary, expected_secondary", [ + ( + "https://myaccount.blob.core.windows.net/", + "myaccount.blob.core.windows.net", + "myaccount-secondary.blob.core.windows.net", + ), + ( + "https://myaccount-secondary.blob.core.windows.net/", + "myaccount-secondary.blob.core.windows.net", + "myaccount-secondary.blob.core.windows.net", + ), + ( + "https://myaccount-dualstack.blob.core.windows.net/", + "myaccount-dualstack.blob.core.windows.net", + "myaccount-secondary-dualstack.blob.core.windows.net", + ), + ( + "https://myaccount-ipv6.blob.core.windows.net/", + "myaccount-ipv6.blob.core.windows.net", + "myaccount-secondary-ipv6.blob.core.windows.net", + ), + ( + "https://myaccount-secondary-dualstack.blob.core.windows.net/", + "myaccount-secondary-dualstack.blob.core.windows.net", + "myaccount-secondary-dualstack.blob.core.windows.net", + ), + ( + "https://myaccount-secondary-ipv6.blob.core.windows.net/", + "myaccount-secondary-ipv6.blob.core.windows.net", + "myaccount-secondary-ipv6.blob.core.windows.net", + ), + ] + ) + @BlobPreparer() + def test_create_service_ipv6(self, account_url, expected_primary, expected_secondary, **kwargs): + storage_account_name = "myaccount" + storage_account_key = kwargs.pop("storage_account_key") + + container_name, blob_name = "foo", "bar" + + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=storage_account_key.secret, + container_name=container_name, + blob_name=blob_name + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + conn_str = ( + "DefaultEndpointsProtocol=https;" + f"AccountName={storage_account_name};" + f"AccountKey={storage_account_key.secret};" + f"BlobEndpoint={account_url};" + ) + service = service_type.from_connection_string( + conn_str, + credential=storage_account_key.secret, + container_name=container_name, + blob_name=blob_name + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + service = BlobClient.from_blob_url( + blob_url=f"{account_url}/{container_name}/{blob_name}-secondary", + credential=storage_account_key.secret + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + @BlobPreparer() + def test_create_service_ipv6_custom_domain(self): + token_credential = self.get_credential(BlobServiceClient) + + hostname= "github.com" + account_url = f"https://{hostname}" + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=token_credential, + container_name="foo", + blob_name="bar" + ) + assert service is not None + assert service.scheme == "https" + assert service.account_name is None + assert service.credential is not None + assert service._hosts[LocationMode.PRIMARY] == hostname + assert service._hosts[LocationMode.SECONDARY] == "" + # --Connection String Test Cases -------------------------------------------- @BlobPreparer() diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_client_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_client_async.py index 3873f8d00b27..61766a33895d 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_client_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_client_async.py @@ -12,6 +12,7 @@ from azure.storage.blob import ( AccountSasPermissions, generate_account_sas, + LocationMode, ResourceTypes, VERSION, ) @@ -47,6 +48,15 @@ def validate_standard_account_endpoints(self, service, url_type, account_name, a assert '{}.{}.core.windows.net'.format(account_name, url_type) in service.url assert '{}-secondary.{}.core.windows.net'.format(account_name, url_type) in service.secondary_endpoint + def validate_ipv6_account_endpoints(self, service, account_name, account_key, primary_endpoint, secondary_endpoint): + assert service is not None + assert service.scheme == "https" + assert service.account_name == account_name + assert service.credential.account_name == account_name + assert service.credential.account_key == account_key + assert service._hosts[LocationMode.PRIMARY] == primary_endpoint + assert service._hosts[LocationMode.SECONDARY] == secondary_endpoint + def generate_fake_sas_token(self): fake_key = "a" * 30 + "b" * 30 @@ -287,6 +297,102 @@ def test_create_service_with_socket_timeout(self, **kwargs): assert service._client._client._pipeline._transport.connection_config.timeout == 22 assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + @pytest.mark.parametrize( + "account_url, expected_primary, expected_secondary", [ + ( + "https://myaccount.blob.core.windows.net/", + "myaccount.blob.core.windows.net", + "myaccount-secondary.blob.core.windows.net", + ), + ( + "https://myaccount-secondary.blob.core.windows.net/", + "myaccount-secondary.blob.core.windows.net", + "myaccount-secondary.blob.core.windows.net", + ), + ( + "https://myaccount-dualstack.blob.core.windows.net/", + "myaccount-dualstack.blob.core.windows.net", + "myaccount-secondary-dualstack.blob.core.windows.net", + ), + ( + "https://myaccount-ipv6.blob.core.windows.net/", + "myaccount-ipv6.blob.core.windows.net", + "myaccount-secondary-ipv6.blob.core.windows.net", + ), + ( + "https://myaccount-secondary-dualstack.blob.core.windows.net/", + "myaccount-secondary-dualstack.blob.core.windows.net", + "myaccount-secondary-dualstack.blob.core.windows.net", + ), + ( + "https://myaccount-secondary-ipv6.blob.core.windows.net/", + "myaccount-secondary-ipv6.blob.core.windows.net", + "myaccount-secondary-ipv6.blob.core.windows.net", + ), + ] + ) + @BlobPreparer() + def test_create_service_ipv6(self, account_url, expected_primary, expected_secondary, **kwargs): + storage_account_name = "myaccount" + storage_account_key = kwargs.pop("storage_account_key") + + container_name, blob_name = "foo", "bar" + + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=storage_account_key.secret, + container_name=container_name, + blob_name=blob_name + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + conn_str = ( + "DefaultEndpointsProtocol=https;" + f"AccountName={storage_account_name};" + f"AccountKey={storage_account_key.secret};" + f"BlobEndpoint={account_url};" + ) + service = service_type.from_connection_string( + conn_str, + credential=storage_account_key.secret, + container_name=container_name, + blob_name=blob_name + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + service = BlobClient.from_blob_url( + blob_url=f"{account_url}/{container_name}/{blob_name}-secondary", + credential=storage_account_key.secret + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + @BlobPreparer() + def test_create_service_ipv6_custom_domain(self): + token_credential = self.get_credential(BlobServiceClient, is_async=True) + + hostname = "github.com" + account_url = f"https://{hostname}" + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=token_credential, + container_name="foo", + blob_name="bar" + ) + assert service is not None + assert service.scheme == "https" + assert service.account_name is None + assert service.credential is not None + assert service._hosts[LocationMode.PRIMARY] == hostname + assert service._hosts[LocationMode.SECONDARY] == "" + # --Connection String Test Cases -------------------------------------------- @BlobPreparer() def test_create_service_with_connection_string_key(self, **kwargs): diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_storage_account.py b/sdk/storage/azure-storage-blob/tests/test_blob_storage_account.py index 1f93d40bcdf9..9c2b87345c53 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_storage_account.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_storage_account.py @@ -50,7 +50,7 @@ def test_standard_blob_tier_set_tier_api(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key.secret) self._setup(bsc) - tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] + tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot, StandardBlobTier.SMART] for tier in tiers: blob_name = self.get_resource_name(tier.value) diff --git a/sdk/storage/azure-storage-blob/tests/test_blob_storage_account_async.py b/sdk/storage/azure-storage-blob/tests/test_blob_storage_account_async.py index bb5638227afd..2ca5484aaac7 100644 --- a/sdk/storage/azure-storage-blob/tests/test_blob_storage_account_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_blob_storage_account_async.py @@ -51,7 +51,7 @@ async def test_standard_blob_tier_set_tier_api(self, **kwargs): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key.secret) await self._setup(bsc) - tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] + tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot, StandardBlobTier.SMART] for tier in tiers: blob_name = self.get_resource_name(tier.value) diff --git a/sdk/storage/azure-storage-blob/tests/test_block_blob.py b/sdk/storage/azure-storage-blob/tests/test_block_blob.py index c01b3afd88ca..8110ac6f8acf 100644 --- a/sdk/storage/azure-storage-blob/tests/test_block_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_block_blob.py @@ -2001,4 +2001,112 @@ def test_put_block_blob_with_none_concurrency(self, **kwargs): content = blob.download_blob().readall() assert data == content + @BlobPreparer() + @recorded_by_proxy + def test_stage_block_from_url_uds(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + variables = kwargs.pop("variables", {}) + + token_credential = self.get_credential(BlobServiceClient) + service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) + container_name, blob_name = self.get_resource_name('oauthcontainer'), self.get_resource_name('oauthblob') + container = service.create_container(container_name) + blob = container.get_blob_client(blob_name) + + start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) + expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = service.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) + dst_sas = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + blob.blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(create=True), + expiry=expiry, + ) + dst_blob = BlobClient.from_blob_url(f"{blob.url}?{dst_sas}") + + src_blob_name = self.get_resource_name('oauthblob2') + src_sas = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + src_blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(read=True, add=True, create=True, write=True, delete=True), + expiry=expiry, + ) + data = b"abc123" + src_blob = BlobClient.from_blob_url(f"{container.url}/{src_blob_name}?{src_sas}") + src_blob.upload_blob(data) + + dst_blob.stage_block_from_url('1', src_blob.url) + + return variables + + @BlobPreparer() + @recorded_by_proxy + def test_commit_block_list_uds(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + variables = kwargs.pop("variables", {}) + + token_credential = self.get_credential(BlobServiceClient) + service = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=token_credential) + container_name, blob_name = self.get_resource_name('oauthcontainer'), self.get_resource_name('oauthblob') + container = service.create_container(container_name) + blob = container.get_blob_client(blob_name) + + start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) + expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = service.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) + sas = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + blob.blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(create=True), + expiry=expiry, + ) + + identity_blob = BlobClient.from_blob_url(f"{blob.url}?{sas}") + identity_blob.stage_block('1', b'AAA') + identity_blob.stage_block('2', b'BBB') + identity_blob.stage_block('3', b'CCC') + block_list = [BlobBlock(block_id='3'), BlobBlock(block_id='2'), BlobBlock(block_id='1')] + identity_blob.commit_block_list(block_list=block_list) + + return variables + + @BlobPreparer() + @recorded_by_proxy + def test_smart_access_tier(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + self._setup(storage_account_name, storage_account_key) + + data = b"abc123" * 4 + blob1 = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) + blob1.upload_blob(data, standard_blob_tier=StandardBlobTier.SMART, overwrite=True) + props = blob1.get_blob_properties() + assert props.blob_tier == StandardBlobTier.SMART + assert props.smart_access_tier is not None + + blob2 = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) + blob2.upload_blob(data, standard_blob_tier=StandardBlobTier.COOL, overwrite=True) + props = blob2.get_blob_properties() + assert props.blob_tier == StandardBlobTier.COOL + assert props.smart_access_tier is None + + blob2.set_standard_blob_tier(standard_blob_tier=StandardBlobTier.SMART) + props = blob2.get_blob_properties() + assert props.blob_tier == StandardBlobTier.SMART + assert props.smart_access_tier is not None + + for blob in self.bsc.get_container_client(self.container_name).list_blobs(): + assert blob.blob_tier == StandardBlobTier.SMART + assert blob.smart_access_tier is not None + #------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py index af2e4ec14b64..100a53a70e08 100644 --- a/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_block_blob_async.py @@ -2119,4 +2119,119 @@ async def test_put_block_blob_with_none_concurrency(self, **kwargs): content = await (await blob.download_blob()).readall() assert data == content + @BlobPreparer() + @recorded_by_proxy_async + async def test_stage_block_from_url_uds(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + variables = kwargs.pop("variables", {}) + + token_credential = self.get_credential(BlobServiceClient, is_async=True) + service = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=token_credential + ) + container_name, blob_name = self.get_resource_name('oauthcontainer'), self.get_resource_name('oauthblob') + container = await service.create_container(container_name) + blob = container.get_blob_client(blob_name) + + start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) + expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = await service.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) + dst_sas = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + blob.blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(create=True), + expiry=expiry, + ) + dst_blob = BlobClient.from_blob_url(f"{blob.url}?{dst_sas}") + + src_blob_name = self.get_resource_name('oauthblob2') + src_sas = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + src_blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(read=True, add=True, create=True, write=True, delete=True), + expiry=expiry, + ) + data = b"abc123" + src_blob = BlobClient.from_blob_url(f"{container.url}/{src_blob_name}?{src_sas}") + await src_blob.upload_blob(data) + + await dst_blob.stage_block_from_url('1', src_blob.url) + + return variables + + @BlobPreparer() + @recorded_by_proxy_async + async def test_commit_block_list_uds(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + variables = kwargs.pop("variables", {}) + + token_credential = self.get_credential(BlobServiceClient, is_async=True) + service = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=token_credential + ) + container_name, blob_name = self.get_resource_name('oauthcontainer'), self.get_resource_name('oauthblob') + container = await service.create_container(container_name) + blob = container.get_blob_client(blob_name) + + start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) + expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = await service.get_user_delegation_key(key_start_time=start, key_expiry_time=expiry) + sas = self.generate_sas( + generate_blob_sas, + blob.account_name, + blob.container_name, + blob.blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(create=True), + expiry=expiry, + ) + + identity_blob = BlobClient.from_blob_url(f"{blob.url}?{sas}") + await blob.stage_block('1', b'AAA') + await blob.stage_block('2', b'BBB') + await blob.stage_block('3', b'CCC') + block_list = [BlobBlock(block_id='3'), BlobBlock(block_id='2'), BlobBlock(block_id='1')] + await identity_blob.commit_block_list(block_list=block_list) + + return variables + + @BlobPreparer() + @recorded_by_proxy_async + async def test_smart_access_tier(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + await self._setup(storage_account_name, storage_account_key) + + data = b"abc123" * 4 + blob1 = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) + await blob1.upload_blob(data, standard_blob_tier=StandardBlobTier.SMART, overwrite=True) + props = await blob1.get_blob_properties() + assert props.blob_tier == StandardBlobTier.SMART + assert props.smart_access_tier is not None + + blob2 = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) + await blob2.upload_blob(data, standard_blob_tier=StandardBlobTier.COOL, overwrite=True) + props = await blob2.get_blob_properties() + assert props.blob_tier == StandardBlobTier.COOL + assert props.smart_access_tier is None + + await blob2.set_standard_blob_tier(standard_blob_tier=StandardBlobTier.SMART) + props = await blob2.get_blob_properties() + assert props.blob_tier == StandardBlobTier.SMART + assert props.smart_access_tier is not None + + cc = self.bsc.get_container_client(self.container_name) + async for blob in cc.list_blobs(): + assert blob.blob_tier == StandardBlobTier.SMART + assert blob.smart_access_tier is not None + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob.py b/sdk/storage/azure-storage-blob/tests/test_common_blob.py index fa5aa739fa7e..652a5cb5a0df 100644 --- a/sdk/storage/azure-storage-blob/tests/test_common_blob.py +++ b/sdk/storage/azure-storage-blob/tests/test_common_blob.py @@ -3811,4 +3811,107 @@ def test_blob_cross_tenant_delegation_sas(self, **kwargs): content = identity_blob.download_blob().readall() assert content == data - # ------------------------------------------------------------------------------ \ No newline at end of file + @BlobPreparer() + @recorded_by_proxy + def test_smart_rehydrate(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + self._setup(storage_account_name, storage_account_key) + blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) + blob.upload_blob(b"abc123", overwrite=True) + blob.set_standard_blob_tier(standard_blob_tier=StandardBlobTier.ARCHIVE) + blob.set_standard_blob_tier( + standard_blob_tier=StandardBlobTier.SMART, + rehydrate_priority=RehydratePriority.HIGH + ) + + props = blob.get_blob_properties() + assert props is not None + assert props.archive_status == "rehydrate-pending-to-smart" + + @BlobPreparer() + @recorded_by_proxy + def test_blob_fns_directory(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + variables = kwargs.pop("variables", {}) + + token_credential = self.get_credential(BlobServiceClient) + service = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=token_credential + ) + container_name = self.get_resource_name("directorysascontainer") + + try: + service.create_container(container_name) + + start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) + expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = service.get_user_delegation_key(start, expiry) + + for blob_name in ["foo", "foo/bar", "foo/bar/hello"]: + token = self.generate_sas( + generate_blob_sas, + account_name=storage_account_name, + container_name=container_name, + blob_name=blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True), + expiry=expiry, + is_directory=True, + ) + + exact_blob = service.get_blob_client(container_name, blob_name) + BlobClient.from_blob_url(exact_blob.url, credential=token).upload_blob(b"data", overwrite=True) + + # Blob whose name has the SAS directory name as a prefix should also succeed + child_blob = service.get_blob_client(container_name, blob_name + "/test") + BlobClient.from_blob_url(child_blob.url, credential=token).upload_blob(b"data", overwrite=True) + finally: + service.delete_container(container_name) + + return variables + + @BlobPreparer() + @recorded_by_proxy + def test_blob_fns_directory_fail(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + variables = kwargs.pop("variables", {}) + + token_credential = self.get_credential(BlobServiceClient) + service = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=token_credential + ) + container_name = self.get_resource_name("directorysascontainer") + + try: + service.create_container(container_name) + + start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) + expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = service.get_user_delegation_key(start, expiry) + + blob_name = "foo/bar/baz/" + token = self.generate_sas( + generate_blob_sas, + account_name=storage_account_name, + container_name=container_name, + blob_name=blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True), + expiry=expiry, + is_directory=True, + ) + + non_prefix_blob = service.get_blob_client(container_name, "foo/bar") + non_prefix_blob_with_sas = BlobClient.from_blob_url(non_prefix_blob.url, credential=token) + with pytest.raises(HttpResponseError): + non_prefix_blob_with_sas.upload_blob(b"data", overwrite=True) + finally: + service.delete_container(container_name) + + return variables + + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py index 8ab6bd6c32b6..feca11c74ee2 100644 --- a/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py +++ b/sdk/storage/azure-storage-blob/tests/test_common_blob_async.py @@ -3749,4 +3749,109 @@ async def test_blob_cross_tenant_delegation_sas(self, **kwargs): content = await (await identity_blob.download_blob()).readall() assert content == data + @BlobPreparer() + @recorded_by_proxy_async + async def test_smart_rehydrate(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + await self._setup(storage_account_name, storage_account_key) + blob = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) + await blob.upload_blob(b"abc123", overwrite=True) + await blob.set_standard_blob_tier(standard_blob_tier=StandardBlobTier.ARCHIVE) + await blob.set_standard_blob_tier( + standard_blob_tier=StandardBlobTier.SMART, + rehydrate_priority=RehydratePriority.HIGH + ) + + props = await blob.get_blob_properties() + assert props is not None + assert props.archive_status == "rehydrate-pending-to-smart" + + @BlobPreparer() + @recorded_by_proxy_async + async def test_blob_fns_directory(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + variables = kwargs.pop("variables", {}) + + token_credential = self.get_credential(BlobServiceClient, is_async=True) + service = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=token_credential + ) + container_name = self.get_resource_name("directorysascontainer") + + try: + await service.create_container(container_name) + + start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) + expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = await service.get_user_delegation_key(start, expiry) + + for blob_name in ["foo", "foo/bar", "foo/bar/hello"]: + token = self.generate_sas( + generate_blob_sas, + account_name=storage_account_name, + container_name=container_name, + blob_name=blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True), + expiry=expiry, + is_directory=True, + ) + + exact_blob = service.get_blob_client(container_name, blob_name) + await BlobClient.from_blob_url( + exact_blob.url, credential=token).upload_blob(b"data", overwrite=True) + + # Blob whose name has the SAS directory name as a prefix should also succeed + child_blob = service.get_blob_client(container_name, blob_name + "/test") + await BlobClient.from_blob_url( + child_blob.url, credential=token).upload_blob(b"data", overwrite=True) + finally: + await service.delete_container(container_name) + + return variables + + @BlobPreparer() + @recorded_by_proxy_async + async def test_blob_fns_directory_fail(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + variables = kwargs.pop("variables", {}) + + token_credential = self.get_credential(BlobServiceClient, is_async=True) + service = BlobServiceClient( + self.account_url(storage_account_name, "blob"), + credential=token_credential + ) + container_name = self.get_resource_name("directorysascontainer") + + try: + await service.create_container(container_name) + + start = self.get_datetime_variable(variables, 'start', datetime.utcnow()) + expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1)) + user_delegation_key = await service.get_user_delegation_key(start, expiry) + + blob_name = "foo/bar/baz/" + token = self.generate_sas( + generate_blob_sas, + account_name=storage_account_name, + container_name=container_name, + blob_name=blob_name, + user_delegation_key=user_delegation_key, + permission=BlobSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True), + expiry=expiry, + is_directory=True, + ) + + non_prefix_blob = service.get_blob_client(container_name, "foo/bar") + non_prefix_blob_with_sas = BlobClient.from_blob_url(non_prefix_blob.url, credential=token) + with pytest.raises(HttpResponseError): + await non_prefix_blob_with_sas.upload_blob(b"data", overwrite=True) + finally: + await service.delete_container(container_name) + + return variables + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md index e0f8965fbe2e..84023d4cb254 100644 --- a/sdk/storage/azure-storage-file-datalake/CHANGELOG.md +++ b/sdk/storage/azure-storage-file-datalake/CHANGELOG.md @@ -1,8 +1,16 @@ # Release History -## 12.25.0b1 (Unreleased) +## 12.25.0b1 (2026-03-30) ### Features Added +- Added support for service version 2026-06-06. +- Added support for connection strings and `account_url`s to accept URLs with `-ipv6` and `-dualstack` suffixes +for `DataLakeServiceClient`, `FileSystemClient`, `DataLakeDirectoryClient`, and `DataLakeFileClient`. +- Added support for `DataLakeDirectoryClient` and `DataLakeFileClient`'s `set_tags` and `get_tags` APIs +to conditionally set and get tags associated with a directory or file client, respectively. + +### Other Changes +- Consolidated the behavior of `max_concurrency=None` by defaulting to the shared `DEFAULT_MAX_CONCURRENCY` constant. ## 12.24.0b1 (2026-01-27) diff --git a/sdk/storage/azure-storage-file-datalake/assets.json b/sdk/storage/azure-storage-file-datalake/assets.json index 15743cc2805e..3d5483d66f97 100644 --- a/sdk/storage/azure-storage-file-datalake/assets.json +++ b/sdk/storage/azure-storage-file-datalake/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-file-datalake", - "Tag": "python/storage/azure-storage-file-datalake_3d29de0db8" + "Tag": "python/storage/azure-storage-file-datalake_c0870501f2" } diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py index b30a532af67e..a8cc3d21f39f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py @@ -116,8 +116,13 @@ def __init__( # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" - self._client = AzureDataLakeStorageRESTAPI(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._api_version = get_api_version(kwargs) + self._client = AzureDataLakeStorageRESTAPI( + self.url, + version=self._api_version, + base_url=self.url, + pipeline=self._pipeline + ) def __enter__(self) -> Self: self._client.__enter__() diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py index 8a5f6d643152..2178be109500 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py @@ -157,11 +157,11 @@ def close(self) -> None: def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI: client = AzureDataLakeStorageRESTAPI( url, + version=self._api_version, base_url=url, file_system=self.file_system_name, pipeline=self._pipeline ) - client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client def _format_url(self, hostname: str) -> str: @@ -968,6 +968,7 @@ def _undelete_path( ) path_client = AzureDataLakeStorageRESTAPI( url, + version=self._api_version, filesystem=self.file_system_name, path=deleted_path_name, pipeline=pipeline diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py index 6304ac3e09a4..ccbd94a2d0f6 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_azure_data_lake_storage_restapi.py @@ -32,6 +32,8 @@ class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param base_url: Service URL. Required. Default value is "". :type base_url: str :param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies @@ -41,16 +43,13 @@ class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version :keyword resource: The value must be "filesystem" for all filesystem operations. Default value is "filesystem". Note that overriding this default value may result in unsupported behavior. :paramtype resource: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str """ def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", x_ms_lease_duration: Optional[int] = None, **kwargs: Any + self, url: str, version: str, base_url: str = "", x_ms_lease_duration: Optional[int] = None, **kwargs: Any ) -> None: self._config = AzureDataLakeStorageRESTAPIConfiguration( - url=url, x_ms_lease_duration=x_ms_lease_duration, **kwargs + url=url, version=version, x_ms_lease_duration=x_ms_lease_duration, **kwargs ) _policies = kwargs.pop("policies", None) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py index 0f8ea82cd0da..a1a941bed54f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/_configuration.py @@ -22,6 +22,8 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 for infinite lease. Default value is None. @@ -29,22 +31,20 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst :keyword resource: The value must be "filesystem" for all filesystem operations. Default value is "filesystem". Note that overriding this default value may result in unsupported behavior. :paramtype resource: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str """ - def __init__(self, url: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None: + def __init__(self, url: str, version: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None: resource: Literal["filesystem"] = kwargs.pop("resource", "filesystem") - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") if url is None: raise ValueError("Parameter 'url' must not be None.") + if version is None: + raise ValueError("Parameter 'version' must not be None.") self.url = url + self.version = version self.x_ms_lease_duration = x_ms_lease_duration self.resource = resource - self.version = version kwargs.setdefault("sdk_moniker", "azuredatalakestoragerestapi/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py index 60f2078853b1..b15761157c8b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_azure_data_lake_storage_restapi.py @@ -32,6 +32,8 @@ class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param base_url: Service URL. Required. Default value is "". :type base_url: str :param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies @@ -41,16 +43,13 @@ class AzureDataLakeStorageRESTAPI: # pylint: disable=client-accepts-api-version :keyword resource: The value must be "filesystem" for all filesystem operations. Default value is "filesystem". Note that overriding this default value may result in unsupported behavior. :paramtype resource: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str """ def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, url: str, base_url: str = "", x_ms_lease_duration: Optional[int] = None, **kwargs: Any + self, url: str, version: str, base_url: str = "", x_ms_lease_duration: Optional[int] = None, **kwargs: Any ) -> None: self._config = AzureDataLakeStorageRESTAPIConfiguration( - url=url, x_ms_lease_duration=x_ms_lease_duration, **kwargs + url=url, version=version, x_ms_lease_duration=x_ms_lease_duration, **kwargs ) _policies = kwargs.pop("policies", None) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py index 95fdeeffc2bc..944d6e735243 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_configuration.py @@ -22,6 +22,8 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst :param url: The URL of the service account, container, or blob that is the target of the desired operation. Required. :type url: str + :param version: Specifies the version of the operation to use for this request. Required. + :type version: str :param x_ms_lease_duration: The lease duration is required to acquire a lease, and specifies the duration of the lease in seconds. The lease duration must be between 15 and 60 seconds or -1 for infinite lease. Default value is None. @@ -29,22 +31,20 @@ class AzureDataLakeStorageRESTAPIConfiguration: # pylint: disable=too-many-inst :keyword resource: The value must be "filesystem" for all filesystem operations. Default value is "filesystem". Note that overriding this default value may result in unsupported behavior. :paramtype resource: str - :keyword version: Specifies the version of the operation to use for this request. Default value - is "2026-02-06". Note that overriding this default value may result in unsupported behavior. - :paramtype version: str """ - def __init__(self, url: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None: + def __init__(self, url: str, version: str, x_ms_lease_duration: Optional[int] = None, **kwargs: Any) -> None: resource: Literal["filesystem"] = kwargs.pop("resource", "filesystem") - version: Literal["2026-02-06"] = kwargs.pop("version", "2026-02-06") if url is None: raise ValueError("Parameter 'url' must not be None.") + if version is None: + raise ValueError("Parameter 'version' must not be None.") self.url = url + self.version = version self.x_ms_lease_duration = x_ms_lease_duration self.resource = resource - self.version = version kwargs.setdefault("sdk_moniker", "azuredatalakestoragerestapi/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py index 3dc6fe583f50..88edb5316012 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_file_system_operations.py @@ -110,11 +110,11 @@ async def create( _request = build_create_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, properties=properties, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -208,13 +208,13 @@ async def set_properties( _request = build_set_properties_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, properties=properties, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -281,10 +281,10 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -375,12 +375,12 @@ async def delete( _request = build_delete_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -485,6 +485,7 @@ async def list_paths( _request = build_list_paths_request( url=self._config.url, recursive=recursive, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, continuation=continuation, @@ -493,7 +494,6 @@ async def list_paths( upn=upn, begin_from=begin_from, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -599,6 +599,7 @@ async def list_blob_hierarchy_segment( _request = build_list_blob_hierarchy_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, delimiter=delimiter, marker=marker, @@ -609,7 +610,6 @@ async def list_blob_hierarchy_segment( request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py index bb1ce5ce7e4e..79f220bed0bd 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_path_operations.py @@ -254,6 +254,7 @@ async def create( # pylint: disable=too-many-locals _request = build_create_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, resource=resource, @@ -289,7 +290,6 @@ async def create( # pylint: disable=too-many-locals expiry_options=expiry_options, expires_on=expires_on, encryption_context=encryption_context, - version=self._config.version, headers=_headers, params=_params, ) @@ -522,6 +522,7 @@ async def update( # pylint: disable=too-many-locals url=self._config.url, action=action, mode=mode, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, max_records=max_records, @@ -550,7 +551,6 @@ async def update( # pylint: disable=too-many-locals structured_body_type=structured_body_type, structured_content_length=structured_content_length, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -638,7 +638,7 @@ async def lease( the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease. Known values - are: "acquire", "break", "change", "renew", "release", and "break". Required. + are: "acquire", "break", "change", "renew", and "release". Required. :type x_ms_lease_action: str or ~azure.storage.filedatalake.models.PathLeaseAction :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -694,6 +694,7 @@ async def lease( _request = build_lease_request( url=self._config.url, x_ms_lease_action=x_ms_lease_action, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, x_ms_lease_break_period=x_ms_lease_break_period, @@ -704,7 +705,6 @@ async def lease( if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, x_ms_lease_duration=self._config.x_ms_lease_duration, - version=self._config.version, headers=_headers, params=_params, ) @@ -835,6 +835,7 @@ async def read( _request = build_read_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, range=range, @@ -847,7 +848,6 @@ async def read( encryption_key=_encryption_key, encryption_key_sha256=_encryption_key_sha256, encryption_algorithm=_encryption_algorithm, # type: ignore - version=self._config.version, headers=_headers, params=_params, ) @@ -1025,6 +1025,7 @@ async def get_properties( _request = build_get_properties_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, action=action, @@ -1034,7 +1035,6 @@ async def get_properties( if_none_match=_if_none_match, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, - version=self._config.version, headers=_headers, params=_params, ) @@ -1176,6 +1176,7 @@ async def delete( _request = build_delete_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, recursive=recursive, @@ -1186,7 +1187,6 @@ async def delete( if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, paginated=paginated, - version=self._config.version, headers=_headers, params=_params, ) @@ -1300,6 +1300,7 @@ async def set_access_control( _request = build_set_access_control_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, owner=owner, @@ -1312,7 +1313,6 @@ async def set_access_control( if_unmodified_since=_if_unmodified_since, request_id_parameter=request_id_parameter, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1418,6 +1418,7 @@ async def set_access_control_recursive( _request = build_set_access_control_recursive_request( url=self._config.url, mode=mode, + version=self._config.version, timeout=timeout, continuation=continuation, force_flag=force_flag, @@ -1425,7 +1426,6 @@ async def set_access_control_recursive( acl=acl, request_id_parameter=request_id_parameter, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1595,6 +1595,7 @@ async def flush_data( # pylint: disable=too-many-locals _request = build_flush_data_request( url=self._config.url, + version=self._config.version, timeout=timeout, position=position, retain_uncommitted_data=retain_uncommitted_data, @@ -1619,7 +1620,6 @@ async def flush_data( # pylint: disable=too-many-locals encryption_key_sha256=_encryption_key_sha256, encryption_algorithm=_encryption_algorithm, # type: ignore action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -1774,6 +1774,7 @@ async def append_data( # pylint: disable=too-many-locals _request = build_append_data_request( url=self._config.url, + version=self._config.version, position=position, timeout=timeout, content_length=content_length, @@ -1792,7 +1793,6 @@ async def append_data( # pylint: disable=too-many-locals structured_content_length=structured_content_length, action=action, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1886,11 +1886,11 @@ async def set_expiry( _request = build_set_expiry_request( url=self._config.url, expiry_options=expiry_options, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, expires_on=expires_on, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -1966,11 +1966,11 @@ async def undelete( _request = build_undelete_request( url=self._config.url, + version=self._config.version, timeout=timeout, undelete_source=undelete_source, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py index 83b9459d6ede..1f97459e2812 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/operations/_service_operations.py @@ -113,13 +113,13 @@ def prepare_request(next_link=None): _request = build_list_file_systems_request( url=self._config.url, + version=self._config.version, prefix=prefix, continuation=continuation, max_results=max_results, request_id_parameter=request_id_parameter, timeout=timeout, resource=resource, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py index c9bb43b5e4a0..f9fe6b949142 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/models/_azure_data_lake_storage_restapi_enums.py @@ -55,7 +55,6 @@ class PathLeaseAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): CHANGE = "change" RENEW = "renew" RELEASE = "release" - BREAK_ENUM = "break" class PathRenameMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py index a6bd831c6b1f..abc58c7a7ba4 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_file_system_operations.py @@ -38,6 +38,7 @@ def build_create_request( url: str, *, + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, properties: Optional[str] = None, @@ -47,7 +48,6 @@ def build_create_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -77,6 +77,7 @@ def build_create_request( def build_set_properties_request( url: str, *, + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, properties: Optional[str] = None, @@ -88,7 +89,6 @@ def build_set_properties_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -120,13 +120,12 @@ def build_set_properties_request( def build_get_properties_request( - url: str, *, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any + url: str, *, version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -154,6 +153,7 @@ def build_get_properties_request( def build_delete_request( url: str, *, + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, if_modified_since: Optional[datetime.datetime] = None, @@ -164,7 +164,6 @@ def build_delete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -197,6 +196,7 @@ def build_list_paths_request( url: str, *, recursive: bool, + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, continuation: Optional[str] = None, @@ -210,7 +210,6 @@ def build_list_paths_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["filesystem"] = kwargs.pop("resource", _params.pop("resource", "filesystem")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -249,6 +248,7 @@ def build_list_paths_request( def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long url: str, *, + version: str, prefix: Optional[str] = None, delimiter: Optional[str] = None, marker: Optional[str] = None, @@ -264,7 +264,6 @@ def build_list_blob_hierarchy_segment_request( # pylint: disable=name-too-long restype: Literal["container"] = kwargs.pop("restype", _params.pop("restype", "container")) comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/xml") # Construct URL @@ -373,11 +372,11 @@ def create( # pylint: disable=inconsistent-return-statements _request = build_create_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, properties=properties, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -471,13 +470,13 @@ def set_properties( # pylint: disable=inconsistent-return-statements _request = build_set_properties_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, properties=properties, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -544,10 +543,10 @@ def get_properties( # pylint: disable=inconsistent-return-statements _request = build_get_properties_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -638,12 +637,12 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -748,6 +747,7 @@ def list_paths( _request = build_list_paths_request( url=self._config.url, recursive=recursive, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, continuation=continuation, @@ -756,7 +756,6 @@ def list_paths( upn=upn, begin_from=begin_from, resource=self._config.resource, - version=self._config.version, headers=_headers, params=_params, ) @@ -862,6 +861,7 @@ def list_blob_hierarchy_segment( _request = build_list_blob_hierarchy_segment_request( url=self._config.url, + version=self._config.version, prefix=prefix, delimiter=delimiter, marker=marker, @@ -872,7 +872,6 @@ def list_blob_hierarchy_segment( request_id_parameter=request_id_parameter, restype=restype, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py index ccff697bed0e..40a2b8902804 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_path_operations.py @@ -40,6 +40,7 @@ def build_create_request( # pylint: disable=too-many-locals,too-many-statements,too-many-branches url: str, *, + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, resource: Optional[Union[str, _models.PathResourceType]] = None, @@ -80,7 +81,6 @@ def build_create_request( # pylint: disable=too-many-locals,too-many-statements _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -182,6 +182,7 @@ def build_update_request( # pylint: disable=too-many-locals,too-many-statements action: Union[str, _models.PathUpdateAction], mode: Union[str, _models.PathSetAccessControlRecursiveMode], content: IO[bytes], + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, max_records: Optional[int] = None, @@ -215,7 +216,6 @@ def build_update_request( # pylint: disable=too-many-locals,too-many-statements _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -299,6 +299,7 @@ def build_lease_request( url: str, *, x_ms_lease_action: Union[str, _models.PathLeaseAction], + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, x_ms_lease_break_period: Optional[int] = None, @@ -314,7 +315,6 @@ def build_lease_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -360,6 +360,7 @@ def build_lease_request( def build_read_request( url: str, *, + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, range: Optional[str] = None, @@ -377,7 +378,6 @@ def build_read_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -428,6 +428,7 @@ def build_read_request( def build_get_properties_request( url: str, *, + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, action: Optional[Union[str, _models.PathGetPropertiesAction]] = None, @@ -442,7 +443,6 @@ def build_get_properties_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -483,6 +483,7 @@ def build_get_properties_request( def build_delete_request( url: str, *, + version: str, request_id_parameter: Optional[str] = None, timeout: Optional[int] = None, recursive: Optional[bool] = None, @@ -498,7 +499,6 @@ def build_delete_request( _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -541,6 +541,7 @@ def build_delete_request( def build_set_access_control_request( url: str, *, + version: str, timeout: Optional[int] = None, lease_id: Optional[str] = None, owner: Optional[str] = None, @@ -558,7 +559,6 @@ def build_set_access_control_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) action: Literal["setAccessControl"] = kwargs.pop("action", _params.pop("action", "setAccessControl")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -605,6 +605,7 @@ def build_set_access_control_recursive_request( # pylint: disable=name-too-long url: str, *, mode: Union[str, _models.PathSetAccessControlRecursiveMode], + version: str, timeout: Optional[int] = None, continuation: Optional[str] = None, force_flag: Optional[bool] = None, @@ -619,7 +620,6 @@ def build_set_access_control_recursive_request( # pylint: disable=name-too-long action: Literal["setAccessControlRecursive"] = kwargs.pop( "action", _params.pop("action", "setAccessControlRecursive") ) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -656,6 +656,7 @@ def build_set_access_control_recursive_request( # pylint: disable=name-too-long def build_flush_data_request( # pylint: disable=too-many-locals url: str, *, + version: str, timeout: Optional[int] = None, position: Optional[int] = None, retain_uncommitted_data: Optional[bool] = None, @@ -685,7 +686,6 @@ def build_flush_data_request( # pylint: disable=too-many-locals _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) action: Literal["flush"] = kwargs.pop("action", _params.pop("action", "flush")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -758,6 +758,7 @@ def build_append_data_request( # pylint: disable=too-many-locals url: str, *, content: IO[bytes], + version: str, position: Optional[int] = None, timeout: Optional[int] = None, content_length: Optional[int] = None, @@ -781,7 +782,6 @@ def build_append_data_request( # pylint: disable=too-many-locals action: Literal["append"] = kwargs.pop("action", _params.pop("action", "append")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -848,6 +848,7 @@ def build_set_expiry_request( url: str, *, expiry_options: Union[str, _models.PathExpiryOptions], + version: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, expires_on: Optional[str] = None, @@ -857,7 +858,6 @@ def build_set_expiry_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["expiry"] = kwargs.pop("comp", _params.pop("comp", "expiry")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -888,6 +888,7 @@ def build_set_expiry_request( def build_undelete_request( url: str, *, + version: str, timeout: Optional[int] = None, undelete_source: Optional[str] = None, request_id_parameter: Optional[str] = None, @@ -897,7 +898,6 @@ def build_undelete_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -1131,6 +1131,7 @@ def create( # pylint: disable=inconsistent-return-statements,too-many-locals _request = build_create_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, resource=resource, @@ -1166,7 +1167,6 @@ def create( # pylint: disable=inconsistent-return-statements,too-many-locals expiry_options=expiry_options, expires_on=expires_on, encryption_context=encryption_context, - version=self._config.version, headers=_headers, params=_params, ) @@ -1399,6 +1399,7 @@ def update( # pylint: disable=too-many-locals url=self._config.url, action=action, mode=mode, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, max_records=max_records, @@ -1427,7 +1428,6 @@ def update( # pylint: disable=too-many-locals structured_body_type=structured_body_type, structured_content_length=structured_content_length, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -1515,7 +1515,7 @@ def lease( # pylint: disable=inconsistent-return-statements the current lease ID in "x-ms-lease-id" and the new lease ID in "x-ms-proposed-lease-id" to change the lease ID of an active lease. Use "renew" and specify the "x-ms-lease-id" to renew an existing lease. Use "release" and specify the "x-ms-lease-id" to release a lease. Known values - are: "acquire", "break", "change", "renew", "release", and "break". Required. + are: "acquire", "break", "change", "renew", and "release". Required. :type x_ms_lease_action: str or ~azure.storage.filedatalake.models.PathLeaseAction :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. Default @@ -1571,6 +1571,7 @@ def lease( # pylint: disable=inconsistent-return-statements _request = build_lease_request( url=self._config.url, x_ms_lease_action=x_ms_lease_action, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, x_ms_lease_break_period=x_ms_lease_break_period, @@ -1581,7 +1582,6 @@ def lease( # pylint: disable=inconsistent-return-statements if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, x_ms_lease_duration=self._config.x_ms_lease_duration, - version=self._config.version, headers=_headers, params=_params, ) @@ -1712,6 +1712,7 @@ def read( _request = build_read_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, range=range, @@ -1724,7 +1725,6 @@ def read( encryption_key=_encryption_key, encryption_key_sha256=_encryption_key_sha256, encryption_algorithm=_encryption_algorithm, # type: ignore - version=self._config.version, headers=_headers, params=_params, ) @@ -1902,6 +1902,7 @@ def get_properties( # pylint: disable=inconsistent-return-statements _request = build_get_properties_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, action=action, @@ -1911,7 +1912,6 @@ def get_properties( # pylint: disable=inconsistent-return-statements if_none_match=_if_none_match, if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, - version=self._config.version, headers=_headers, params=_params, ) @@ -2053,6 +2053,7 @@ def delete( # pylint: disable=inconsistent-return-statements _request = build_delete_request( url=self._config.url, + version=self._config.version, request_id_parameter=request_id_parameter, timeout=timeout, recursive=recursive, @@ -2063,7 +2064,6 @@ def delete( # pylint: disable=inconsistent-return-statements if_modified_since=_if_modified_since, if_unmodified_since=_if_unmodified_since, paginated=paginated, - version=self._config.version, headers=_headers, params=_params, ) @@ -2177,6 +2177,7 @@ def set_access_control( # pylint: disable=inconsistent-return-statements _request = build_set_access_control_request( url=self._config.url, + version=self._config.version, timeout=timeout, lease_id=_lease_id, owner=owner, @@ -2189,7 +2190,6 @@ def set_access_control( # pylint: disable=inconsistent-return-statements if_unmodified_since=_if_unmodified_since, request_id_parameter=request_id_parameter, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2295,6 +2295,7 @@ def set_access_control_recursive( _request = build_set_access_control_recursive_request( url=self._config.url, mode=mode, + version=self._config.version, timeout=timeout, continuation=continuation, force_flag=force_flag, @@ -2302,7 +2303,6 @@ def set_access_control_recursive( acl=acl, request_id_parameter=request_id_parameter, action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2472,6 +2472,7 @@ def flush_data( # pylint: disable=inconsistent-return-statements,too-many-local _request = build_flush_data_request( url=self._config.url, + version=self._config.version, timeout=timeout, position=position, retain_uncommitted_data=retain_uncommitted_data, @@ -2496,7 +2497,6 @@ def flush_data( # pylint: disable=inconsistent-return-statements,too-many-local encryption_key_sha256=_encryption_key_sha256, encryption_algorithm=_encryption_algorithm, # type: ignore action=action, - version=self._config.version, headers=_headers, params=_params, ) @@ -2651,6 +2651,7 @@ def append_data( # pylint: disable=inconsistent-return-statements,too-many-loca _request = build_append_data_request( url=self._config.url, + version=self._config.version, position=position, timeout=timeout, content_length=content_length, @@ -2669,7 +2670,6 @@ def append_data( # pylint: disable=inconsistent-return-statements,too-many-loca structured_content_length=structured_content_length, action=action, content_type=content_type, - version=self._config.version, content=_content, headers=_headers, params=_params, @@ -2763,11 +2763,11 @@ def set_expiry( # pylint: disable=inconsistent-return-statements _request = build_set_expiry_request( url=self._config.url, expiry_options=expiry_options, + version=self._config.version, timeout=timeout, request_id_parameter=request_id_parameter, expires_on=expires_on, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) @@ -2843,11 +2843,11 @@ def undelete( # pylint: disable=inconsistent-return-statements _request = build_undelete_request( url=self._config.url, + version=self._config.version, timeout=timeout, undelete_source=undelete_source, request_id_parameter=request_id_parameter, comp=comp, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py index 49f604f39e94..d0f5d710b7ac 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/operations/_service_operations.py @@ -38,6 +38,7 @@ def build_list_file_systems_request( url: str, *, + version: str, prefix: Optional[str] = None, continuation: Optional[str] = None, max_results: Optional[int] = None, @@ -49,7 +50,6 @@ def build_list_file_systems_request( _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) resource: Literal["account"] = kwargs.pop("resource", _params.pop("resource", "account")) - version: Literal["2026-02-06"] = kwargs.pop("version", _headers.pop("x-ms-version", "2026-02-06")) accept = _headers.pop("Accept", "application/json") # Construct URL @@ -160,13 +160,13 @@ def prepare_request(next_link=None): _request = build_list_file_systems_request( url=self._config.url, + version=self._config.version, prefix=prefix, continuation=continuation, max_results=max_results, request_id_parameter=request_id_parameter, timeout=timeout, resource=resource, - version=self._config.version, headers=_headers, params=_params, ) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py index ee3ee10f5080..0a5660a8be26 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_models.py @@ -252,6 +252,8 @@ class FileSystemSasPermissions: Append data to a file in the directory. :keyword bool create: Write a new file, snapshot a file, or copy a file to a new file. + :keyword bool tags: + Indicates that reading and writing Tags are permitted. :keyword bool move: Move any file in the directory to a new location. Note the move operation can optionally be restricted to the child file or directory owner or the parent directory owner if the said parameter is included in the token @@ -278,6 +280,8 @@ class FileSystemSasPermissions: """Append data to a file in the directory.""" create: Optional[bool] = None """Write a new file, snapshot a file, or copy a file to a new file.""" + tags: Optional[bool] = None + """Indicates that reading and writing Tags are permitted.""" move: Optional[bool] = None """Move any file in the directory to a new location. Note the move operation can optionally be restricted to the child file or directory owner or the parent directory owner if the said parameter is included in the token @@ -304,6 +308,7 @@ def __init__( self.list = list self.add = kwargs.pop('add', None) self.create = kwargs.pop('create', None) + self.tags = kwargs.pop('tags', None) self.move = kwargs.pop('move', None) self.execute = kwargs.pop('execute', None) self.manage_ownership = kwargs.pop('manage_ownership', None) @@ -314,6 +319,7 @@ def __init__( ('w' if self.write else '') + ('d' if self.delete else '') + ('l' if self.list else '') + + ('t' if self.tags else '') + ('m' if self.move else '') + ('e' if self.execute else '') + ('o' if self.manage_ownership else '') + @@ -341,13 +347,14 @@ def from_string(cls, permission: str) -> Self: p_write = 'w' in permission p_delete = 'd' in permission p_list = 'l' in permission + p_tags = 't' in permission p_move = 'm' in permission p_execute = 'e' in permission p_manage_ownership = 'o' in permission p_manage_access_control = 'p' in permission - parsed = cls(read=p_read, write=p_write, delete=p_delete, - list=p_list, add=p_add, create=p_create, move=p_move, + parsed = cls(read=p_read, write=p_write, delete=p_delete, list=p_list, + tags=p_tags, add=p_add, create=p_create, move=p_move, execute=p_execute, manage_ownership=p_manage_ownership, manage_access_control=p_manage_access_control) return parsed @@ -369,6 +376,8 @@ class DirectorySasPermissions: Append data to a file in the directory. :keyword bool list: List any files in the directory. Implies Execute. + :keyword bool tags: + Indicates that reading and writing Tags are permitted. :keyword bool move: Move any file in the directory to a new location. Note the move operation can optionally be restricted to the child file or directory owner or the parent directory owner if the said parameter is included in the token @@ -395,6 +404,8 @@ class DirectorySasPermissions: """Append data to a file in the directory.""" list: Optional[bool] = False """List any files in the directory. Implies Execute.""" + tags: Optional[bool] = None + """Indicates that reading and writing Tags are permitted.""" move: Optional[bool] = False """Move any file in the directory to a new location. Note the move operation can optionally be restricted to the child file or directory owner or the parent directory owner if the said parameter is included in the token @@ -421,6 +432,7 @@ def __init__( self.delete = delete self.add = kwargs.pop('add', None) self.list = kwargs.pop('list', None) + self.tags = kwargs.pop('tags', None) self.move = kwargs.pop('move', None) self.execute = kwargs.pop('execute', None) self.manage_ownership = kwargs.pop('manage_ownership', None) @@ -431,6 +443,7 @@ def __init__( ('w' if self.write else '') + ('d' if self.delete else '') + ('l' if self.list else '') + + ('t' if self.tags else '') + ('m' if self.move else '') + ('e' if self.execute else '') + ('o' if self.manage_ownership else '') + @@ -458,13 +471,14 @@ def from_string(cls, permission: str) -> Self: p_write = 'w' in permission p_delete = 'd' in permission p_list = 'l' in permission + p_tags = 't' in permission p_move = 'm' in permission p_execute = 'e' in permission p_manage_ownership = 'o' in permission p_manage_access_control = 'p' in permission - parsed = cls(read=p_read, create=p_create, write=p_write, delete=p_delete, add=p_add, - list=p_list, move=p_move, execute=p_execute, manage_ownership=p_manage_ownership, + parsed = cls(read=p_read, create=p_create, write=p_write, delete=p_delete, add=p_add, list=p_list, + tags=p_tags, move=p_move, execute=p_execute, manage_ownership=p_manage_ownership, manage_access_control=p_manage_access_control) return parsed @@ -483,6 +497,8 @@ class FileSasPermissions: Delete the file. :keyword bool add: Append data to the file. + :keyword bool tags: + Indicates that reading and writing Tags are permitted. :keyword bool move: Move any file in the directory to a new location. Note the move operation can optionally be restricted to the child file or directory owner or the parent directory owner if the said parameter is included in the token @@ -507,6 +523,8 @@ class FileSasPermissions: """Delete the file.""" add: Optional[bool] = None """Append data to the file.""" + tags: Optional[bool] = None + """Indicates that reading and writing Tags are permitted.""" move: Optional[bool] = None """Move any file in the directory to a new location. Note the move operation can optionally be restricted to the child file or directory owner or the parent directory owner if the said parameter is included in the token @@ -532,6 +550,7 @@ def __init__( self.write = write self.delete = delete self.add = kwargs.pop('add', None) + self.tags = kwargs.pop('tags', None) self.move = kwargs.pop('move', None) self.execute = kwargs.pop('execute', None) self.manage_ownership = kwargs.pop('manage_ownership', None) @@ -541,6 +560,7 @@ def __init__( ('c' if self.create else '') + ('w' if self.write else '') + ('d' if self.delete else '') + + ('t' if self.tags else '') + ('m' if self.move else '') + ('e' if self.execute else '') + ('o' if self.manage_ownership else '') + @@ -567,13 +587,14 @@ def from_string(cls, permission: str) -> Self: p_create = 'c' in permission p_write = 'w' in permission p_delete = 'd' in permission + p_tags = 't' in permission p_move = 'm' in permission p_execute = 'e' in permission p_manage_ownership = 'o' in permission p_manage_access_control = 'p' in permission parsed = cls(read=p_read, create=p_create, write=p_write, delete=p_delete, add=p_add, - move=p_move, execute=p_execute, manage_ownership=p_manage_ownership, + tags=p_tags, move=p_move, execute=p_execute, manage_ownership=p_manage_ownership, manage_access_control=p_manage_access_control) return parsed diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py index 7d74cf5e0016..54e1ecf13492 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py @@ -163,12 +163,12 @@ def close(self) -> None: def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI: client = AzureDataLakeStorageRESTAPI( url, + version=self._api_version, base_url=url, file_system=self.file_system_name, path=self.path_name, pipeline=self._pipeline ) - client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client def _format_url(self, hostname: str) -> str: @@ -962,3 +962,102 @@ def acquire_lease( lease = DataLakeLeaseClient(self, lease_id=lease_id) lease.acquire(lease_duration=lease_duration, **kwargs) return lease + + @distributed_trace + def set_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: + """ + The Set Tags operation enables users to set tags on a path. + Each call to this operation replaces all existing tags attached to the path. + To remove all tags from the path, call this operation with no tags set. + + :param tags: + Name-value pairs associated with the path as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + :type tags: Dict[str, str] + :keyword str version_id: + The version id parameter is an opaque DateTime value that, when present, + specifies the version of the path to add tags to. + :keyword bool validate_content: + If true, calculates an MD5 hash of the tags content. The storage service checks the + hash of the content that has arrived with the hash that was sent. This is primarily + valuable for detecting bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the path. + :keyword str if_tags_match_condition: + Specify a SQL where clause on path tags to operate only on destination path with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the path has an active lease. Value can be a DataLakeLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `_. + :return: Path-updated property dict (Etag and last modified) + :rtype: Dict[str, Any] + """ + return self._blob_client.set_blob_tags(tags, **kwargs) + + @distributed_trace + def get_tags(self, **kwargs: Any) -> Dict[str, str]: + """The Get Tags operation enables users to get tags on a path. + + :keyword Optional[str] version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the path to add tags to. + :keyword str if_tags_match_condition: + Specify a SQL where clause on path tags to operate only on destination path with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the path has an active lease. Value can be a DataLakeLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `_. + :return: Key value pairs of path tags. + :rtype: Dict[str, str] + """ + return self._blob_client.get_blob_tags(**kwargs) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.pyi b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.pyi index 3207de0d220b..b91eff42ff5f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.pyi +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.pyi @@ -233,3 +233,33 @@ class PathClient(StorageAccountHostsMixin): timeout: Optional[int] = None, **kwargs: Any ) -> DataLakeLeaseClient: ... + @distributed_trace + def set_tags( + self, + tags: Optional[Dict[str, str]] = None, + *, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[DataLakeLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: ... + @distributed_trace + def get_tags( + self, + *, + version_id: Optional[str] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[DataLakeLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, str]: ... diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client.py index 5441488d86a9..57095ca402aa 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_shared/base_client.py @@ -73,6 +73,38 @@ "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, } +_SECONDARY_SUFFIX = "-secondary" +_KNOWN_FEATURE_SUFFIXES = {"-ipv6", "-dualstack"} + + +def _construct_endpoints(netloc: str, account_part: str) -> Tuple[str, str, str]: + """ + Construct primary and secondary hostnames from a storage account URL's netloc. + + :param str netloc: The network location in a URL. + :param str account_part: The account part after parsing the URL. + :return: The account name, primary hostname, and secondary hostname. + :rtype: Tuple[str, str, str] + """ + domain_suffix = netloc[len(account_part):] + secondary_idx = account_part.find(_SECONDARY_SUFFIX) + + # Case where customer provides secondary URL + if secondary_idx >= 0: + account_name = account_part[:secondary_idx] + primary_hostname = secondary_hostname = f"{account_part}{domain_suffix}" + else: + feature_suffix = "" + account_name = account_part + for suffix in _KNOWN_FEATURE_SUFFIXES: + if account_name.endswith(suffix): + feature_suffix = suffix + account_name = account_name[: -len(suffix)] + break + primary_hostname = f"{account_part}{domain_suffix}" + secondary_hostname = f"{account_name}{_SECONDARY_SUFFIX}{feature_suffix}{domain_suffix}" + + return account_name, primary_hostname, secondary_hostname class StorageAccountHostsMixin(object): @@ -115,21 +147,28 @@ def __init__( self._is_localhost = True self.account_name = parsed_url.path.strip("/") + secondary_hostname = "" + if len(account) > 1: + self.account_name, primary_hostname, secondary_hostname = _construct_endpoints( + parsed_url.netloc, account[0] + ) + else: + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") + self.credential = _format_shared_key_credential(self.account_name, credential) if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"): raise ValueError("Token credential is only supported with HTTPS.") - secondary_hostname = "" if hasattr(self.credential, "account_name"): + if not self.account_name: + secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" self.account_name = self.credential.account_name - secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" if not self._hosts: - if len(account) > 1: - secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary") if kwargs.get("secondary_hostname"): secondary_hostname = kwargs["secondary_hostname"] - primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") + if not primary_hostname: + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} self._sdk_moniker = f"storage-{service}/{VERSION}" diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py index 5ff3b28659a3..673782455886 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py @@ -121,8 +121,13 @@ def __init__( # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" - self._client = AzureDataLakeStorageRESTAPI(self.url, base_url=self.url, pipeline=self._pipeline) - self._client._config.version = get_api_version(kwargs) # type: ignore [assignment] + self._api_version = get_api_version(kwargs) + self._client = AzureDataLakeStorageRESTAPI( + self.url, + version=self._api_version, + base_url=self.url, + pipeline=self._pipeline + ) self._loop = kwargs.get('loop', None) async def __aenter__(self) -> Self: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py index 735a6d410132..64355b81c310 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py @@ -165,11 +165,11 @@ async def close(self) -> None: # type: ignore def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI: client = AzureDataLakeStorageRESTAPI( url, + version=self._api_version, base_url=url, file_system=self.file_system_name, pipeline=self._pipeline ) - client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client def _format_url(self, hostname: str) -> str: @@ -972,6 +972,7 @@ async def _undelete_path( ) path_client = AzureDataLakeStorageRESTAPI( url, + version=self._api_version, filesystem=self.file_system_name, path=deleted_path_name, pipeline=pipeline diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py index 27affaff74f3..210be85bbb2c 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py @@ -166,12 +166,12 @@ async def close(self) -> None: # type: ignore def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI: client = AzureDataLakeStorageRESTAPI( url, + version=self._api_version, base_url=url, file_system=self.file_system_name, path=self.path_name, pipeline=self._pipeline ) - client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access return client def _format_url(self, hostname: str) -> str: @@ -960,3 +960,102 @@ async def acquire_lease( lease = DataLakeLeaseClient(self, lease_id=lease_id) await lease.acquire(lease_duration=lease_duration, **kwargs) return lease + + @distributed_trace_async + async def set_tags(self, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> Dict[str, Any]: + """ + The Set Tags operation enables users to set tags on a path. + Each call to this operation replaces all existing tags attached to the path. + To remove all tags from the path, call this operation with no tags set. + + :param tags: + Name-value pairs associated with the path as tag. Tags are case-sensitive. + The tag set may contain at most 10 tags. Tag keys must be between 1 and 128 characters, + and tag values must be between 0 and 256 characters. + Valid tag key and value characters include: lowercase and uppercase letters, digits (0-9), + space (' '), plus (+), minus (-), period (.), solidus (/), colon (:), equals (=), underscore (_) + :type tags: Dict[str, str] + :keyword str version_id: + The version id parameter is an opaque DateTime value that, when present, + specifies the version of the path to add tags to. + :keyword bool validate_content: + If true, calculates an MD5 hash of the tags content. The storage service checks the + hash of the content that has arrived with the hash that was sent. This is primarily + valuable for detecting bitflips on the wire if using http instead of https, as https (the default), + will already validate. Note that this MD5 hash is not stored with the path. + :keyword str if_tags_match_condition: + Specify a SQL where clause on path tags to operate only on destination path with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the path has an active lease. Value can be a DataLakeLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.filedatalake.aio.DataLakeLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `_. + :return: Path-updated property dict (Etag and last modified) + :rtype: Dict[str, Any] + """ + return await self._blob_client.set_blob_tags(tags, **kwargs) + + @distributed_trace_async + async def get_tags(self, **kwargs: Any) -> Dict[str, str]: + """The Get Tags operation enables users to get tags on a path. + + :keyword Optional[str] version_id: + The version id parameter is an opaque DateTime + value that, when present, specifies the version of the path to add tags to. + :keyword str if_tags_match_condition: + Specify a SQL where clause on path tags to operate only on destination path with a matching value. + eg. ``\"\\\"tagname\\\"='my tag'\"`` + :keyword lease: + Required if the path has an active lease. Value can be a DataLakeLeaseClient object + or the lease ID as a string. + :paramtype lease: ~azure.storage.filedatalake.aio.DataLakeLeaseClient or str + :keyword ~datetime.datetime if_modified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only + if the resource has been modified since the specified time. + :keyword ~datetime.datetime if_unmodified_since: + A DateTime value. Azure expects the date value passed in to be UTC. + If timezone is included, any non-UTC datetimes will be converted to UTC. + If a date is passed in without timezone info, it is assumed to be UTC. + Specify this header to perform the operation only if + the resource has not been modified since the specified date/time. + :keyword str etag: + An ETag value, or the wildcard character (*). Used to check if the resource has changed, + and act according to the condition specified by the `match_condition` parameter. + :keyword ~azure.core.MatchConditions match_condition: + The match condition to use upon the etag. + :keyword int timeout: + Sets the server-side timeout for the operation in seconds. For more details see + https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-blob-service-operations. + This value is not tracked or validated on the client. To configure client-side network timesouts + see `here `_. + :return: Key value pairs of path tags. + :rtype: Dict[str, str] + """ + return await self._blob_client.get_blob_tags(**kwargs) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.pyi b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.pyi index 1f2b2a9b2e29..28084c7fc960 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.pyi +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.pyi @@ -238,3 +238,33 @@ class PathClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin): # ty timeout: Optional[int] = None, **kwargs: Any ) -> DataLakeLeaseClient: ... + @distributed_trace_async + async def set_tags( + self, + tags: Optional[Dict[str, str]] = None, + *, + version_id: Optional[str] = None, + validate_content: Optional[bool] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[DataLakeLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, Any]: ... + @distributed_trace_async + async def get_tags( + self, + *, + version_id: Optional[str] = None, + if_tags_match_condition: Optional[str] = None, + lease: Optional[Union[DataLakeLeaseClient, str]] = None, + if_modified_since: Optional[datetime] = None, + if_unmodified_since: Optional[datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + timeout: Optional[int] = None, + **kwargs: Any + ) -> Dict[str, str]: ... diff --git a/sdk/storage/azure-storage-file-datalake/swagger/README.md b/sdk/storage/azure-storage-file-datalake/swagger/README.md index 006da7eae4d3..21777a2b10f6 100644 --- a/sdk/storage/azure-storage-file-datalake/swagger/README.md +++ b/sdk/storage/azure-storage-file-datalake/swagger/README.md @@ -16,7 +16,7 @@ autorest --v3 --python ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Azure.Storage.Files.DataLake/stable/2026-02-06/DataLakeStorage.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Azure.Storage.Files.DataLake/stable/2026-06-06/DataLakeStorage.json output-folder: ../azure/storage/filedatalake/_generated namespace: azure.storage.filedatalake no-namespace-folders: true diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file.py b/sdk/storage/azure-storage-file-datalake/tests/test_file.py index f36f6a57852f..54b2533ee935 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file.py @@ -4,6 +4,7 @@ # license information. # -------------------------------------------------------------------------- import tempfile +import time import unittest from datetime import datetime, timedelta from math import ceil @@ -1875,6 +1876,59 @@ def callback(request): props = identity_file.get_file_properties(raw_request_hook=callback) assert props is not None + @DataLakePreparer() + @recorded_by_proxy + def test_data_lake_tags(self, **kwargs): + datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") + datalake_storage_account_key = kwargs.pop("datalake_storage_account_key") + + self._setUp(datalake_storage_account_name, datalake_storage_account_key) + directory_name = self._get_directory_reference() + self._create_directory_and_return_client(directory_name) + file_name = self._get_file_reference() + file_client = self.dsc.get_file_client(self.file_system_name, directory_name + '/' + file_name) + first_resp = file_client.create_file() + + early = file_client.get_file_properties().last_modified + first_tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} + second_tags = {"tag4": "fourthtag", "tag5": "fifthtag", "tag6": "sixthtag"} + + if self.is_live: + time.sleep(10) + + with pytest.raises(ResourceModifiedError): + file_client.set_tags(first_tags, if_modified_since=early) + with pytest.raises(ResourceModifiedError): + file_client.get_tags(if_modified_since=early) + with pytest.raises(ResourceModifiedError): + file_client.set_tags(first_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfModified) + + file_client.set_tags(first_tags, if_unmodified_since=early) + tags = file_client.get_tags(if_unmodified_since=early) + assert tags == first_tags + + file_client.set_tags(second_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfNotModified) + tags = file_client.get_tags(etag=first_resp['etag'], match_condition=MatchConditions.IfNotModified) + assert tags == second_tags + + data = b"abc123" + file_client.upload_data(data, length=len(data), overwrite=True) + + with pytest.raises(ResourceModifiedError): + file_client.set_tags(first_tags, if_unmodified_since=early) + with pytest.raises(ResourceModifiedError): + file_client.get_tags(if_unmodified_since=early) + with pytest.raises(ResourceModifiedError): + file_client.set_tags(first_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfNotModified) + + file_client.set_tags(first_tags, if_modified_since=early) + tags = file_client.get_tags(if_modified_since=early) + assert tags == first_tags + + file_client.set_tags(second_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfModified) + tags = file_client.get_tags(etag=first_resp['etag'], match_condition=MatchConditions.IfModified) + assert tags == second_tags + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py index aaae375b0dc6..cd00a5d081d1 100644 --- a/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py +++ b/sdk/storage/azure-storage-file-datalake/tests/test_file_async.py @@ -5,6 +5,7 @@ # -------------------------------------------------------------------------- import asyncio import tempfile +import time import unittest from datetime import datetime, timedelta from math import ceil @@ -1779,6 +1780,59 @@ def callback(request): props = identity_file.get_file_properties(raw_request_hook=callback) assert props is not None + @DataLakePreparer() + @recorded_by_proxy_async + async def test_data_lake_tags(self, **kwargs): + datalake_storage_account_name = kwargs.pop("datalake_storage_account_name") + datalake_storage_account_key = kwargs.pop("datalake_storage_account_key") + + await self._setUp(datalake_storage_account_name, datalake_storage_account_key) + directory_name = self._get_directory_reference() + await self._create_directory_and_return_client(directory_name) + file_name = self._get_file_reference() + file_client = self.dsc.get_file_client(self.file_system_name, directory_name + '/' + file_name) + first_resp = await file_client.create_file() + + early = (await file_client.get_file_properties()).last_modified + first_tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} + second_tags = {"tag4": "fourthtag", "tag5": "fifthtag", "tag6": "sixthtag"} + + if self.is_live: + time.sleep(10) + + with pytest.raises(ResourceModifiedError): + await file_client.set_tags(first_tags, if_modified_since=early) + with pytest.raises(ResourceModifiedError): + await file_client.get_tags(if_modified_since=early) + with pytest.raises(ResourceModifiedError): + await file_client.set_tags(first_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfModified) + + await file_client.set_tags(first_tags, if_unmodified_since=early) + tags = await file_client.get_tags(if_unmodified_since=early) + assert tags == first_tags + + await file_client.set_tags(second_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfNotModified) + tags = await file_client.get_tags(etag=first_resp['etag'], match_condition=MatchConditions.IfNotModified) + assert tags == second_tags + + data = b"abc123" + await file_client.upload_data(data, length=len(data), overwrite=True) + + with pytest.raises(ResourceModifiedError): + await file_client.set_tags(first_tags, if_unmodified_since=early) + with pytest.raises(ResourceModifiedError): + await file_client.get_tags(if_unmodified_since=early) + with pytest.raises(ResourceModifiedError): + await file_client.set_tags(first_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfNotModified) + + await file_client.set_tags(first_tags, if_modified_since=early) + tags = await file_client.get_tags(if_modified_since=early) + assert tags == first_tags + + await file_client.set_tags(second_tags, etag=first_resp['etag'], match_condition=MatchConditions.IfModified) + tags = await file_client.get_tags(etag=first_resp['etag'], match_condition=MatchConditions.IfModified) + assert tags == second_tags + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-share/CHANGELOG.md b/sdk/storage/azure-storage-file-share/CHANGELOG.md index 1d22c6452247..59a3eabdd064 100644 --- a/sdk/storage/azure-storage-file-share/CHANGELOG.md +++ b/sdk/storage/azure-storage-file-share/CHANGELOG.md @@ -1,8 +1,18 @@ # Release History -## 12.26.0b1 (Unreleased) +## 12.26.0b1 (2026-03-30) ### Features Added +- Added support for service version 2026-06-06. +- Added support for the keyword `file_property_semantics` in `ShareClient`'s `create_directory` and `DirectoryClient`'s +`create_directory` APIs, which specifies permissions to be configured upon directory creation. +- Added support for the keyword `data` to `FileClient`'s `create_file` API, which specifies the +optional initial data to be uploaded (up to 4MB). +- Added support for connection strings and `account_url`s to accept URLs with `-ipv6` and `-dualstack` suffixes +for `ShareClient`, `ShareDirectoryClient`, and `ShareFileClient`. + +### Other Changes +- Consolidated the behavior of `max_concurrency=None` by defaulting to the shared `DEFAULT_MAX_CONCURRENCY` constant. ## 12.25.0b1 (2026-01-27) diff --git a/sdk/storage/azure-storage-file-share/assets.json b/sdk/storage/azure-storage-file-share/assets.json index 8abfb36115df..f8436a861d3c 100644 --- a/sdk/storage/azure-storage-file-share/assets.json +++ b/sdk/storage/azure-storage-file-share/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/storage/azure-storage-file-share", - "Tag": "python/storage/azure-storage-file-share_1fb2aaa99c" + "Tag": "python/storage/azure-storage-file-share_4afd6de033" } diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py index 223b7fbf6360..5191ff9392ab 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.py @@ -346,6 +346,16 @@ def create_directory(self, **kwargs: Any) -> Dict[str, Any]: NFS only. The owning group of the directory. :keyword str file_mode: NFS only. The file mode of the directory. + :keyword file_property_semantics: + SMB only. Specifies permissions to be configured. Default value is None. + If not specified or None is passed, New will be the default. Possible values are: + + New - forcefully add the ARCHIVE attribute flag and alter the permissions specified in + x-ms-file-permission to inherit missing permissions from the parent. + + Restore - apply changes without further modification. + + :paramtype file_property_semantics: Optional[Literal["New", "Restore"]] :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.pyi index ba76bd85b530..2f5e9f679c58 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_directory_client.pyi @@ -121,6 +121,7 @@ class ShareDirectoryClient(StorageAccountHostsMixin): owner: Optional[str] = None, group: Optional[str] = None, file_mode: Optional[str] = None, + file_property_semantics: Optional[Literal["New", "Restore"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: ... diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py index fd432be34b30..78bfa03a6945 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.py @@ -437,6 +437,18 @@ def create_file( NFS only. The owning group of the file. :keyword str file_mode: NFS only. The file mode of the file. + :keyword file_property_semantics: + SMB only. Specifies permissions to be configured. Default value is None. + If not specified or None is passed, New will be the default. Possible values are: + + New - forcefully add the ARCHIVE attribute flag and alter the permissions specified in + x-ms-file-permission to inherit missing permissions from the parent. + + Restore - apply changes without further modification. + + :paramtype file_property_semantics: Optional[Literal["New", "Restore"]] + :keyword data: Optional initial data to upload, up to 4MB. + :paramtype data: bytes :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.pyi index 7136bf292c2a..22d645d975cc 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_file_client.pyi @@ -132,6 +132,8 @@ class ShareFileClient(StorageAccountHostsMixin): owner: Optional[str] = None, group: Optional[str] = None, file_mode: Optional[str] = None, + file_property_semantics: Optional[Literal["New", "Restore"]] = None, + data: Optional[bytes] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: ... diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py index 76311af25265..1f6bc63e53c0 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/aio/operations/_file_operations.py @@ -95,12 +95,14 @@ async def create( # pylint: disable=too-many-locals content_md5: Optional[bytes] = None, file_property_semantics: Optional[Union[str, _models.FilePropertySemantics]] = None, content_length: Optional[int] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, file_http_headers: Optional[_models.FileHTTPHeaders] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, optionalbody: Optional[IO[bytes]] = None, **kwargs: Any ) -> None: - """Creates a new file or replaces a file. Note it only initializes the file with no content. + """Creates a new file or replaces a file. Can also initialize the file with content. :param file_content_length: Specifies the maximum size for the file, up to 4 TB. Required. :type file_content_length: int @@ -167,6 +169,13 @@ async def create( # pylint: disable=too-many-locals When the x-ms-write header is set to clear, the value of this header must be set to zero. Default value is None. :type content_length: int + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int :param file_http_headers: Parameter group. Default value is None. :type file_http_headers: ~azure.storage.fileshare.models.FileHTTPHeaders :param lease_access_conditions: Parameter group. Default value is None. @@ -240,6 +249,8 @@ async def create( # pylint: disable=too-many-locals content_md5=content_md5, file_property_semantics=file_property_semantics, content_length=content_length, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, file_type_constant=file_type_constant, @@ -297,6 +308,9 @@ async def create( # pylint: disable=too-many-locals response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type")) response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py index 176dfe9bda33..22ca6948e8b4 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_generated/operations/_file_operations.py @@ -64,6 +64,8 @@ def build_create_request( # pylint: disable=too-many-locals,too-many-statements content_md5: Optional[bytes] = None, file_property_semantics: Optional[Union[str, _models.FilePropertySemantics]] = None, content_length: Optional[int] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, content: Optional[IO[bytes]] = None, allow_trailing_dot: Optional[bool] = None, file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None, @@ -146,6 +148,12 @@ def build_create_request( # pylint: disable=too-many-locals,too-many-statements ) if content_length is not None: _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int") + if structured_body_type is not None: + _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str") + if structured_content_length is not None: + _headers["x-ms-structured-content-length"] = _SERIALIZER.header( + "structured_content_length", structured_content_length, "int" + ) if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -1347,12 +1355,14 @@ def create( # pylint: disable=inconsistent-return-statements,too-many-locals content_md5: Optional[bytes] = None, file_property_semantics: Optional[Union[str, _models.FilePropertySemantics]] = None, content_length: Optional[int] = None, + structured_body_type: Optional[str] = None, + structured_content_length: Optional[int] = None, file_http_headers: Optional[_models.FileHTTPHeaders] = None, lease_access_conditions: Optional[_models.LeaseAccessConditions] = None, optionalbody: Optional[IO[bytes]] = None, **kwargs: Any ) -> None: - """Creates a new file or replaces a file. Note it only initializes the file with no content. + """Creates a new file or replaces a file. Can also initialize the file with content. :param file_content_length: Specifies the maximum size for the file, up to 4 TB. Required. :type file_content_length: int @@ -1419,6 +1429,13 @@ def create( # pylint: disable=inconsistent-return-statements,too-many-locals When the x-ms-write header is set to clear, the value of this header must be set to zero. Default value is None. :type content_length: int + :param structured_body_type: Required if the request body is a structured message. Specifies + the message schema version and properties. Default value is None. + :type structured_body_type: str + :param structured_content_length: Required if the request body is a structured message. + Specifies the length of the blob/file content inside the message body. Will always be smaller + than Content-Length. Default value is None. + :type structured_content_length: int :param file_http_headers: Parameter group. Default value is None. :type file_http_headers: ~azure.storage.fileshare.models.FileHTTPHeaders :param lease_access_conditions: Parameter group. Default value is None. @@ -1492,6 +1509,8 @@ def create( # pylint: disable=inconsistent-return-statements,too-many-locals content_md5=content_md5, file_property_semantics=file_property_semantics, content_length=content_length, + structured_body_type=structured_body_type, + structured_content_length=structured_content_length, allow_trailing_dot=self._config.allow_trailing_dot, file_request_intent=self._config.file_request_intent, file_type_constant=file_type_constant, @@ -1549,6 +1568,9 @@ def create( # pylint: disable=inconsistent-return-statements,too-many-locals response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type")) response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5")) response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["x-ms-structured-body"] = self._deserialize( + "str", response.headers.get("x-ms-structured-body") + ) if cls: return cls(pipeline_response, None, response_headers) # type: ignore diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py index 1330b66fcf9e..f5815256d02a 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.py @@ -982,6 +982,16 @@ def create_directory(self, directory_name: str, **kwargs: Any) -> ShareDirectory NFS only. The owning group of the directory. :keyword str file_mode: NFS only. The file mode of the directory. + :keyword file_property_semantics: + SMB only. Specifies permissions to be configured. Default value is None. + If not specified or None is passed, New will be the default. Possible values are: + + New - forcefully add the ARCHIVE attribute flag and alter the permissions specified in + x-ms-file-permission to inherit missing permissions from the parent. + + Restore - apply changes without further modification. + + :paramtype file_property_semantics: Optional[Literal["New", "Restore"]] :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.pyi index ae8d73ad95e1..a5c105bae781 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_share_client.pyi @@ -227,6 +227,7 @@ class ShareClient(StorageAccountHostsMixin): owner: Optional[str] = None, group: Optional[str] = None, file_mode: Optional[str] = None, + file_property_semantics: Optional[Literal["New", "Restore"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> ShareDirectoryClient: ... diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/base_client.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/base_client.py index 5441488d86a9..57095ca402aa 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/base_client.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/_shared/base_client.py @@ -73,6 +73,38 @@ "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, } +_SECONDARY_SUFFIX = "-secondary" +_KNOWN_FEATURE_SUFFIXES = {"-ipv6", "-dualstack"} + + +def _construct_endpoints(netloc: str, account_part: str) -> Tuple[str, str, str]: + """ + Construct primary and secondary hostnames from a storage account URL's netloc. + + :param str netloc: The network location in a URL. + :param str account_part: The account part after parsing the URL. + :return: The account name, primary hostname, and secondary hostname. + :rtype: Tuple[str, str, str] + """ + domain_suffix = netloc[len(account_part):] + secondary_idx = account_part.find(_SECONDARY_SUFFIX) + + # Case where customer provides secondary URL + if secondary_idx >= 0: + account_name = account_part[:secondary_idx] + primary_hostname = secondary_hostname = f"{account_part}{domain_suffix}" + else: + feature_suffix = "" + account_name = account_part + for suffix in _KNOWN_FEATURE_SUFFIXES: + if account_name.endswith(suffix): + feature_suffix = suffix + account_name = account_name[: -len(suffix)] + break + primary_hostname = f"{account_part}{domain_suffix}" + secondary_hostname = f"{account_name}{_SECONDARY_SUFFIX}{feature_suffix}{domain_suffix}" + + return account_name, primary_hostname, secondary_hostname class StorageAccountHostsMixin(object): @@ -115,21 +147,28 @@ def __init__( self._is_localhost = True self.account_name = parsed_url.path.strip("/") + secondary_hostname = "" + if len(account) > 1: + self.account_name, primary_hostname, secondary_hostname = _construct_endpoints( + parsed_url.netloc, account[0] + ) + else: + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") + self.credential = _format_shared_key_credential(self.account_name, credential) if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"): raise ValueError("Token credential is only supported with HTTPS.") - secondary_hostname = "" if hasattr(self.credential, "account_name"): + if not self.account_name: + secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" self.account_name = self.credential.account_name - secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" if not self._hosts: - if len(account) > 1: - secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary") if kwargs.get("secondary_hostname"): secondary_hostname = kwargs["secondary_hostname"] - primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") + if not primary_hostname: + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} self._sdk_moniker = f"storage-{service}/{VERSION}" diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py index 4fc04bb91d26..b4fb7ff46333 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.py @@ -352,6 +352,16 @@ async def create_directory(self, **kwargs: Any) -> Dict[str, Any]: NFS only. The owning group of the directory. :keyword str file_mode: NFS only. The file mode of the directory. + :keyword file_property_semantics: + SMB only. Specifies permissions to be configured. Default value is None. + If not specified or None is passed, New will be the default. Possible values are: + + New - forcefully add the ARCHIVE attribute flag and alter the permissions specified in + x-ms-file-permission to inherit missing permissions from the parent. + + Restore - apply changes without further modification. + + :paramtype file_property_semantics: Optional[Literal["New", "Restore"]] :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.pyi index 70e0078868e0..ee32af55d216 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_directory_client_async.pyi @@ -126,6 +126,7 @@ class ShareDirectoryClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMix owner: Optional[str] = None, group: Optional[str] = None, file_mode: Optional[str] = None, + file_property_semantics: Optional[Literal["New", "Restore"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: ... diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py index 1ea648202ecc..45b1a96fefb9 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.py @@ -432,6 +432,18 @@ async def create_file( NFS only. The owning group of the file. :keyword str file_mode: NFS only. The file mode of the file. + :keyword file_property_semantics: + SMB only. Specifies permissions to be configured. Default value is None. + If not specified or None is passed, New will be the default. Possible values are: + + New - forcefully add the ARCHIVE attribute flag and alter the permissions specified in + x-ms-file-permission to inherit missing permissions from the parent. + + Restore - apply changes without further modification. + + :paramtype file_property_semantics: Optional[Literal["New", "Restore"]] + :keyword data: Optional initial data to upload, up to 4MB. + :paramtype data: bytes :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.pyi index 45639c916d5d..3f70def69364 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_file_client_async.pyi @@ -133,6 +133,8 @@ class ShareFileClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin): owner: Optional[str] = None, group: Optional[str] = None, file_mode: Optional[str] = None, + file_property_semantics: Optional[Literal["New", "Restore"]] = None, + data: Optional[bytes] = None, timeout: Optional[int] = None, **kwargs: Any ) -> Dict[str, Any]: ... diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py index 2d2cdaf0a3f0..8f6be194dc5c 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.py @@ -978,6 +978,16 @@ async def create_directory(self, directory_name: str, **kwargs: Any) -> ShareDir NFS only. The owning group of the directory. :keyword str file_mode: NFS only. The file mode of the directory. + :keyword file_property_semantics: + SMB only. Specifies permissions to be configured. Default value is None. + If not specified or None is passed, New will be the default. Possible values are: + + New - forcefully add the ARCHIVE attribute flag and alter the permissions specified in + x-ms-file-permission to inherit missing permissions from the parent. + + Restore - apply changes without further modification. + + :paramtype file_property_semantics: Optional[Literal["New", "Restore"]] :keyword int timeout: Sets the server-side timeout for the operation in seconds. For more details see https://learn.microsoft.com/rest/api/storageservices/setting-timeouts-for-file-service-operations. diff --git a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.pyi b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.pyi index 98c078e74da8..e03fec9e8d7e 100644 --- a/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.pyi +++ b/sdk/storage/azure-storage-file-share/azure/storage/fileshare/aio/_share_client_async.pyi @@ -232,6 +232,7 @@ class ShareClient(AsyncStorageAccountHostsMixin, StorageAccountHostsMixin): # t owner: Optional[str] = None, group: Optional[str] = None, file_mode: Optional[str] = None, + file_property_semantics: Optional[Literal["New", "Restore"]] = None, timeout: Optional[int] = None, **kwargs: Any ) -> ShareDirectoryClient: ... diff --git a/sdk/storage/azure-storage-file-share/swagger/README.md b/sdk/storage/azure-storage-file-share/swagger/README.md index 6910c5517eca..b0a4352a5550 100644 --- a/sdk/storage/azure-storage-file-share/swagger/README.md +++ b/sdk/storage/azure-storage-file-share/swagger/README.md @@ -16,7 +16,7 @@ autorest --v3 --python ### Settings ``` yaml -input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.FileStorage/stable/2026-04-06/file.json +input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.FileStorage/stable/2026-06-06/file.json output-folder: ../azure/storage/fileshare/_generated namespace: azure.storage.fileshare no-namespace-folders: true diff --git a/sdk/storage/azure-storage-file-share/tests/conftest.py b/sdk/storage/azure-storage-file-share/tests/conftest.py index 66cd7dd21ae2..cb5378cb35f9 100644 --- a/sdk/storage/azure-storage-file-share/tests/conftest.py +++ b/sdk/storage/azure-storage-file-share/tests/conftest.py @@ -15,6 +15,7 @@ add_header_regex_sanitizer, add_oauth_response_sanitizer, add_uri_string_sanitizer, + add_uri_regex_sanitizer, test_proxy ) @@ -33,3 +34,7 @@ def add_sanitizers(test_proxy): add_body_regex_sanitizer(regex=r".*?", value="0.0.0.0:0") add_uri_string_sanitizer(target=".preprod.", value=".") + add_uri_regex_sanitizer( + regex=r"(?<=[?&]sktid=)[^&#]+", + value="00000000-0000-0000-0000-000000000000", + ) diff --git a/sdk/storage/azure-storage-file-share/tests/test_directory.py b/sdk/storage/azure-storage-file-share/tests/test_directory.py index d5e59e77beef..118d9f04f002 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_directory.py +++ b/sdk/storage/azure-storage-file-share/tests/test_directory.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- +import os import unittest from datetime import datetime, timedelta, timezone @@ -1477,6 +1478,29 @@ def test_file_permission_format_directory(self, **kwargs): new_directory_client.delete_directory() + @FileSharePreparer() + @recorded_by_proxy + def test_create_directory_semantics(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + self._setup(storage_account_name, storage_account_key) + share_client = self.fsc.get_share_client(self.share_name) + + directory = share_client.create_directory('dir1', file_property_semantics=None) + props = directory.get_directory_properties() + assert props is not None + + directory = share_client.create_directory('dir2', file_property_semantics='New') + props = directory.get_directory_properties() + assert props is not None + + directory = share_client.create_directory( + 'dir3', file_property_semantics='Restore', file_permission=TEST_FILE_PERMISSIONS + ) + props = directory.get_directory_properties() + assert props is not None + # ------------------------------------------------------------------------------ if __name__ == '__main__': unittest.main() diff --git a/sdk/storage/azure-storage-file-share/tests/test_directory_async.py b/sdk/storage/azure-storage-file-share/tests/test_directory_async.py index aab82f8447bd..23a0f3845df2 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_directory_async.py +++ b/sdk/storage/azure-storage-file-share/tests/test_directory_async.py @@ -5,8 +5,9 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -import unittest import asyncio +import os +import unittest from datetime import datetime, timedelta, timezone import pytest @@ -1580,4 +1581,27 @@ async def test_file_permission_format_directory(self, **kwargs): await new_directory_client.delete_directory() + @FileSharePreparer() + @recorded_by_proxy_async + async def test_create_directory_semantics(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + await self._setup(storage_account_name, storage_account_key) + share_client = self.fsc.get_share_client(self.share_name) + + directory = await share_client.create_directory('dir1', file_property_semantics=None) + props = await directory.get_directory_properties() + assert props is not None + + directory = await share_client.create_directory('dir2', file_property_semantics='New') + props = await directory.get_directory_properties() + assert props is not None + + directory = await share_client.create_directory( + 'dir3', file_property_semantics='Restore', file_permission=TEST_FILE_PERMISSIONS + ) + props = await directory.get_directory_properties() + assert props is not None + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-file-share/tests/test_file.py b/sdk/storage/azure-storage-file-share/tests/test_file.py index 72104b1b7efd..0c03d1fccc29 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_file.py +++ b/sdk/storage/azure-storage-file-share/tests/test_file.py @@ -4060,4 +4060,64 @@ def test_download_file_with_none_max_concurrency(self, **kwargs): assert content == data + @FileSharePreparer() + @recorded_by_proxy + def test_create_file_semantics(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + self._setup(storage_account_name, storage_account_key) + storage_account_key = storage_account_key.secret + file_name = self._get_file_reference() + + file1 = ShareFileClient( + self.account_url(storage_account_name, "file"), + share_name=self.share_name, + file_path=file_name + "file1", + credential=storage_account_key + ) + file1.create_file(1024, file_property_semantics=None) + props = file1.get_file_properties() + assert props is not None + + file2 = ShareFileClient( + self.account_url(storage_account_name, "file"), + share_name=self.share_name, + file_path=file_name + "file2", + credential=storage_account_key + ) + file2.create_file(1024, file_property_semantics="New") + props = file2.get_file_properties() + assert props is not None + + file3 = ShareFileClient( + self.account_url(storage_account_name, "file"), + share_name=self.share_name, + file_path=file_name + "file2", + credential=storage_account_key + ) + file3.create_file(1024, file_property_semantics="Restore", file_permission=TEST_FILE_PERMISSIONS) + props = file3.get_file_properties() + assert props is not None + + @FileSharePreparer() + @recorded_by_proxy + def test_create_file_with_data(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + self._setup(storage_account_name, storage_account_key) + file_name = self._get_file_reference() + file_client = ShareFileClient( + self.account_url(storage_account_name, "file"), + share_name=self.share_name, + file_path=file_name + "file", + credential=storage_account_key.secret + ) + size = 1024 + data = b"abc" * size + file_client.create_file(len(data), data=data) + downloaded_data = file_client.download_file().readall() + assert downloaded_data == data + # ------------------------------------------------------------------------------ diff --git a/sdk/storage/azure-storage-file-share/tests/test_file_async.py b/sdk/storage/azure-storage-file-share/tests/test_file_async.py index 0fd265019a4b..73d9780c8f56 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_file_async.py +++ b/sdk/storage/azure-storage-file-share/tests/test_file_async.py @@ -3,7 +3,6 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- - import asyncio import base64 import os @@ -4188,3 +4187,66 @@ async def test_download_file_with_none_max_concurrency(self, **kwargs): content = await (await file_client.download_file(max_concurrency=None)).readall() assert content == data + + @FileSharePreparer() + @recorded_by_proxy_async + async def test_create_file_semantics(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + self._setup(storage_account_name, storage_account_key) + await self._setup_share(storage_account_name, storage_account_key) + storage_account_key = storage_account_key.secret + file_name = self._get_file_reference() + + file1 = ShareFileClient( + self.account_url(storage_account_name, "file"), + share_name=self.share_name, + file_path=file_name + "file1", + credential=storage_account_key + ) + await file1.create_file(1024, file_property_semantics=None) + props = await file1.get_file_properties() + assert props is not None + + file2 = ShareFileClient( + self.account_url(storage_account_name, "file"), + share_name=self.share_name, + file_path=file_name + "file2", + credential=storage_account_key + ) + await file2.create_file(1024, file_property_semantics="New") + props = await file2.get_file_properties() + assert props is not None + + file3 = ShareFileClient( + self.account_url(storage_account_name, "file"), + share_name=self.share_name, + file_path=file_name + "file2", + credential=storage_account_key + ) + await file3.create_file(1024, file_property_semantics="Restore", file_permission=TEST_FILE_PERMISSIONS) + props = await file3.get_file_properties() + assert props is not None + + @FileSharePreparer() + @recorded_by_proxy_async + async def test_create_file_with_data(self, **kwargs): + storage_account_name = kwargs.pop("storage_account_name") + storage_account_key = kwargs.pop("storage_account_key") + + self._setup(storage_account_name, storage_account_key) + await self._setup_share(storage_account_name, storage_account_key) + + file_name = self._get_file_reference() + file_client = ShareFileClient( + self.account_url(storage_account_name, "file"), + share_name=self.share_name, + file_path=file_name + "file", + credential=storage_account_key.secret + ) + size = 1024 + data = b"abc" * size + await file_client.create_file(len(data), data=data) + downloaded_data = await (await file_client.download_file()).readall() + assert downloaded_data == data diff --git a/sdk/storage/azure-storage-file-share/tests/test_file_client.py b/sdk/storage/azure-storage-file-share/tests/test_file_client.py index df8a607f27f8..101a96270c96 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_file_client.py +++ b/sdk/storage/azure-storage-file-share/tests/test_file_client.py @@ -8,7 +8,14 @@ import pytest from azure.core.exceptions import AzureError -from azure.storage.fileshare import ShareClient, ShareDirectoryClient, ShareFileClient, ShareServiceClient, VERSION +from azure.storage.fileshare import ( + LocationMode, + ShareClient, + ShareDirectoryClient, + ShareFileClient, + ShareServiceClient, + VERSION +) from devtools_testutils import recorded_by_proxy from devtools_testutils.storage import StorageRecordedTestCase @@ -50,6 +57,15 @@ def validate_standard_account_endpoints(self, service, service_type, protocol='h assert service.primary_endpoint.startswith('{}://{}.{}.core.windows.net/'.format(protocol, self.account_name, service_type)) is True assert service.secondary_endpoint.startswith('{}://{}-secondary.{}.core.windows.net/'.format(protocol, self.account_name, service_type)) is True + def validate_ipv6_account_endpoints(self, service, account_name, account_key, primary_endpoint, secondary_endpoint): + assert service is not None + assert service.scheme == "https" + assert service.account_name == account_name + assert service.credential.account_name == account_name + assert service.credential.account_key == account_key + assert service._hosts[LocationMode.PRIMARY] == primary_endpoint + assert service._hosts[LocationMode.SECONDARY] == secondary_endpoint + # --Direct Parameters Test Cases -------------------------------------------- @FileSharePreparer() def test_create_service_with_key(self, **kwargs): @@ -176,6 +192,106 @@ def test_create_service_with_socket_timeout(self, **kwargs): assert service._client._client._pipeline._transport.connection_config.timeout == 22 assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + @pytest.mark.parametrize( + "account_url, expected_primary, expected_secondary", [ + ( + "https://myaccount.file.core.windows.net/", + "myaccount.file.core.windows.net", + "myaccount-secondary.file.core.windows.net", + ), + ( + "https://myaccount-secondary.file.core.windows.net/", + "myaccount-secondary.file.core.windows.net", + "myaccount-secondary.file.core.windows.net", + ), + ( + "https://myaccount-dualstack.file.core.windows.net/", + "myaccount-dualstack.file.core.windows.net", + "myaccount-secondary-dualstack.file.core.windows.net", + ), + ( + "https://myaccount-ipv6.file.core.windows.net/", + "myaccount-ipv6.file.core.windows.net", + "myaccount-secondary-ipv6.file.core.windows.net", + ), + ( + "https://myaccount-secondary-dualstack.file.core.windows.net/", + "myaccount-secondary-dualstack.file.core.windows.net", + "myaccount-secondary-dualstack.file.core.windows.net", + ), + ( + "https://myaccount-secondary-ipv6.file.core.windows.net/", + "myaccount-secondary-ipv6.file.core.windows.net", + "myaccount-secondary-ipv6.file.core.windows.net", + ), + ] + ) + @FileSharePreparer() + def test_create_service_ipv6(self, account_url, expected_primary, expected_secondary, **kwargs): + storage_account_name = "myaccount" + storage_account_key = kwargs.pop("storage_account_key") + + share_name, directory_path, file_path = "foo", "bar", "baz" + + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=storage_account_key.secret, + share_name=share_name, + directory_path=directory_path, + file_path=file_path + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + conn_str = ( + "DefaultEndpointsProtocol=https;" + f"AccountName={storage_account_name};" + f"AccountKey={storage_account_key.secret};" + f"FileEndpoint={account_url};" + ) + service = service_type.from_connection_string( + conn_str, + credential=storage_account_key.secret, + share_name=share_name, + directory_path=directory_path, + file_path=file_path + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + service = ShareFileClient.from_file_url( + file_url=f"{account_url}/{share_name}/{directory_path}/{file_path}-secondary", + credential=storage_account_key.secret + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + @FileSharePreparer() + def test_create_service_ipv6_custom_domain(self): + token_credential = self.get_credential(ShareServiceClient) + + hostname= "github.com" + account_url = f"https://{hostname}" + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=token_credential, + share_name="foo", + directory_path="bar", + file_path="baz", + token_intent="backup" + ) + assert service is not None + assert service.scheme == "https" + assert service.account_name is None + assert service.credential is not None + assert service._hosts[LocationMode.PRIMARY] == hostname + assert service._hosts[LocationMode.SECONDARY] == "" + # --Connection String Test Cases -------------------------------------------- @FileSharePreparer() @@ -503,4 +619,3 @@ def test_closing_pipeline_client_simple(self, **kwargs): service = client( self.account_url(storage_account_name, "file"), credential=self.account_key.secret, share_name='foo', directory_path='bar', file_path='baz') service.close() - diff --git a/sdk/storage/azure-storage-file-share/tests/test_file_client_async.py b/sdk/storage/azure-storage-file-share/tests/test_file_client_async.py index bd2dac98405b..a39b81ba430e 100644 --- a/sdk/storage/azure-storage-file-share/tests/test_file_client_async.py +++ b/sdk/storage/azure-storage-file-share/tests/test_file_client_async.py @@ -7,7 +7,7 @@ import platform import pytest -from azure.storage.fileshare import VERSION +from azure.storage.fileshare import LocationMode, VERSION from azure.storage.fileshare.aio import ShareClient, ShareDirectoryClient, ShareFileClient, ShareServiceClient from devtools_testutils.aio import recorded_by_proxy_async @@ -51,6 +51,15 @@ def validate_standard_account_endpoints(self, service, service_type, protocol='h assert service.primary_endpoint.startswith('{}://{}.{}.core.windows.net/'.format(protocol, self.account_name, service_type)) is True assert service.secondary_endpoint.startswith('{}://{}-secondary.{}.core.windows.net/'.format(protocol, self.account_name, service_type)) is True + def validate_ipv6_account_endpoints(self, service, account_name, account_key, primary_endpoint, secondary_endpoint): + assert service is not None + assert service.scheme == "https" + assert service.account_name == account_name + assert service.credential.account_name == account_name + assert service.credential.account_key == account_key + assert service._hosts[LocationMode.PRIMARY] == primary_endpoint + assert service._hosts[LocationMode.SECONDARY] == secondary_endpoint + # --Direct Parameters Test Cases -------------------------------------------- @FileSharePreparer() async def test_create_service_with_key(self, **kwargs): @@ -176,6 +185,106 @@ async def test_create_service_with_socket_timeout(self, **kwargs): assert service._client._client._pipeline._transport.connection_config.timeout == 22 assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + @pytest.mark.parametrize( + "account_url, expected_primary, expected_secondary", [ + ( + "https://myaccount.file.core.windows.net/", + "myaccount.file.core.windows.net", + "myaccount-secondary.file.core.windows.net", + ), + ( + "https://myaccount-secondary.file.core.windows.net/", + "myaccount-secondary.file.core.windows.net", + "myaccount-secondary.file.core.windows.net", + ), + ( + "https://myaccount-dualstack.file.core.windows.net/", + "myaccount-dualstack.file.core.windows.net", + "myaccount-secondary-dualstack.file.core.windows.net", + ), + ( + "https://myaccount-ipv6.file.core.windows.net/", + "myaccount-ipv6.file.core.windows.net", + "myaccount-secondary-ipv6.file.core.windows.net", + ), + ( + "https://myaccount-secondary-dualstack.file.core.windows.net/", + "myaccount-secondary-dualstack.file.core.windows.net", + "myaccount-secondary-dualstack.file.core.windows.net", + ), + ( + "https://myaccount-secondary-ipv6.file.core.windows.net/", + "myaccount-secondary-ipv6.file.core.windows.net", + "myaccount-secondary-ipv6.file.core.windows.net", + ), + ] + ) + @FileSharePreparer() + def test_create_service_ipv6(self, account_url, expected_primary, expected_secondary, **kwargs): + storage_account_name = "myaccount" + storage_account_key = kwargs.pop("storage_account_key") + + share_name, directory_path, file_path = "foo", "bar", "baz" + + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=storage_account_key.secret, + share_name=share_name, + directory_path=directory_path, + file_path=file_path + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + conn_str = ( + "DefaultEndpointsProtocol=https;" + f"AccountName={storage_account_name};" + f"AccountKey={storage_account_key.secret};" + f"FileEndpoint={account_url};" + ) + service = service_type.from_connection_string( + conn_str, + credential=storage_account_key.secret, + share_name=share_name, + directory_path=directory_path, + file_path=file_path + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + service = ShareFileClient.from_file_url( + file_url=f"{account_url}/{share_name}/{directory_path}/{file_path}-secondary", + credential=storage_account_key.secret + ) + self.validate_ipv6_account_endpoints( + service, storage_account_name, storage_account_key.secret, expected_primary, expected_secondary + ) + + @FileSharePreparer() + def test_create_service_ipv6_custom_domain(self): + token_credential = self.get_credential(ShareServiceClient, is_async=True) + + hostname = "github.com" + account_url = f"https://{hostname}" + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=token_credential, + share_name="foo", + directory_path="bar", + file_path="baz", + token_intent="backup" + ) + assert service is not None + assert service.scheme == "https" + assert service.account_name is None + assert service.credential is not None + assert service._hosts[LocationMode.PRIMARY] == hostname + assert service._hosts[LocationMode.SECONDARY] == "" + # --Connection String Test Cases -------------------------------------------- @FileSharePreparer() @@ -456,5 +565,3 @@ async def test_closing_pipeline_client_simple(self, **kwargs): service = client( self.account_url(storage_account_name, "file"), credential=self.account_key.secret, share_name='foo', directory_path='bar', file_path='baz') await service.close() - - diff --git a/sdk/storage/azure-storage-queue/CHANGELOG.md b/sdk/storage/azure-storage-queue/CHANGELOG.md index 33d2e06983f1..f48f6c62d0ea 100644 --- a/sdk/storage/azure-storage-queue/CHANGELOG.md +++ b/sdk/storage/azure-storage-queue/CHANGELOG.md @@ -1,8 +1,11 @@ # Release History -## 12.17.0b1 (Unreleased) +## 12.17.0b1 (2026-03-30) ### Features Added +- Added support for service version 2026-06-06. +- Added support for connection strings and `account_url`s to accept URLs with `-ipv6` and `-dualstack` suffixes +for `QueueServiceClient` and `QueueClient`. ## 12.16.0b1 (2026-01-27) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_encryption.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_encryption.py index 2153d1da1da6..5d9fcb187987 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_encryption.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_encryption.py @@ -41,7 +41,11 @@ _ENCRYPTION_PROTOCOL_V1 = "1.0" _ENCRYPTION_PROTOCOL_V2 = "2.0" _ENCRYPTION_PROTOCOL_V2_1 = "2.1" -_VALID_ENCRYPTION_PROTOCOLS = [_ENCRYPTION_PROTOCOL_V1, _ENCRYPTION_PROTOCOL_V2, _ENCRYPTION_PROTOCOL_V2_1] +_VALID_ENCRYPTION_PROTOCOLS = [ + _ENCRYPTION_PROTOCOL_V1, + _ENCRYPTION_PROTOCOL_V2, + _ENCRYPTION_PROTOCOL_V2_1, +] _ENCRYPTION_V2_PROTOCOLS = [_ENCRYPTION_PROTOCOL_V2, _ENCRYPTION_PROTOCOL_V2_1] _GCM_REGION_DATA_LENGTH = 4 * 1024 * 1024 _GCM_NONCE_LENGTH = 12 @@ -308,7 +312,10 @@ def is_encryption_v2(encryption_data: Optional[_EncryptionData]) -> bool: def modify_user_agent_for_encryption( - user_agent: str, moniker: str, encryption_version: str, request_options: Dict[str, Any] + user_agent: str, + moniker: str, + encryption_version: str, + request_options: Dict[str, Any], ) -> None: """ Modifies the request options to contain a user agent string updated with encryption information. @@ -360,7 +367,10 @@ def get_adjusted_upload_size(length: int, encryption_version: str) -> int: def get_adjusted_download_range_and_offset( - start: int, end: int, length: Optional[int], encryption_data: Optional[_EncryptionData] + start: int, + end: int, + length: Optional[int], + encryption_data: Optional[_EncryptionData], ) -> Tuple[Tuple[int, int], Tuple[int, int]]: """ Gets the new download range and offsets into the decrypted data for @@ -579,11 +589,17 @@ def _dict_to_encryption_data(encryption_data_dict: Dict[str, Any]) -> _Encryptio if "EncryptedRegionInfo" in encryption_data_dict: encrypted_region_info = encryption_data_dict["EncryptedRegionInfo"] region_info = _EncryptedRegionInfo( - encrypted_region_info["DataLength"], encrypted_region_info["NonceLength"], _GCM_TAG_LENGTH + encrypted_region_info["DataLength"], + encrypted_region_info["NonceLength"], + _GCM_TAG_LENGTH, ) encryption_data = _EncryptionData( - encryption_iv, region_info, encryption_agent, wrapped_content_key, key_wrapping_metadata + encryption_iv, + region_info, + encryption_agent, + wrapped_content_key, + key_wrapping_metadata, ) return encryption_data @@ -656,7 +672,8 @@ def _validate_and_unwrap_cek( raise ValueError("Provided or resolved key-encryption-key does not match the id of key used to encrypt.") # Will throw an exception if the specified algorithm is not supported. content_encryption_key = key_encryption_key.unwrap_key( - encryption_data.wrapped_content_key.encrypted_key, encryption_data.wrapped_content_key.algorithm + encryption_data.wrapped_content_key.encrypted_key, + encryption_data.wrapped_content_key.algorithm, ) # For V2, the version is included with the cek. We need to validate it @@ -889,7 +906,10 @@ def decrypt_blob( # pylint: disable=too-many-locals,too-many-statements return content algorithm = encryption_data.encryption_agent.encryption_algorithm - if algorithm not in (_EncryptionAlgorithm.AES_CBC_256, _EncryptionAlgorithm.AES_GCM_256): + if algorithm not in ( + _EncryptionAlgorithm.AES_CBC_256, + _EncryptionAlgorithm.AES_GCM_256, + ): raise ValueError("Specified encryption algorithm is not supported.") version = encryption_data.encryption_agent.protocol diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py index 8ef85723b820..688769fac8c3 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_azure_queue_storage.py @@ -17,7 +17,12 @@ from . import models as _models from ._configuration import AzureQueueStorageConfiguration from ._utils.serialization import Deserializer, Serializer -from .operations import MessageIdOperations, MessagesOperations, QueueOperations, ServiceOperations +from .operations import ( + MessageIdOperations, + MessagesOperations, + QueueOperations, + ServiceOperations, +) class AzureQueueStorage: # pylint: disable=client-accepts-api-version-keyword @@ -59,7 +64,7 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential self._config.custom_hook_policy, self._config.logging_policy, policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + (policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None), self._config.http_logging_policy, ] self._client: PipelineClient = PipelineClient(base_url=base_url, policies=_policies, **kwargs) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_utils/serialization.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_utils/serialization.py index 6da830e0cf4a..adacf7b3e18a 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_utils/serialization.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/_utils/serialization.py @@ -235,9 +235,17 @@ def __init__(self, **kwargs: Any) -> None: self.additional_properties: Optional[dict[str, Any]] = {} for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: - _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + _LOGGER.warning( + "%s is not a known attribute of class %s and will be ignored", + k, + self.__class__, + ) elif k in self._validation and self._validation[k].get("readonly", False): - _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + _LOGGER.warning( + "Readonly attribute %s will be ignored in class %s", + k, + self.__class__, + ) else: setattr(self, k, kwargs[k]) @@ -288,7 +296,11 @@ def _create_xml_node(cls): except AttributeError: xml_map = {} - return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + return _create_xml_node( + xml_map.get("name", cls.__name__), + xml_map.get("prefix", None), + xml_map.get("ns", None), + ) def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: """Return the JSON that would be sent to server from this model. @@ -449,7 +461,11 @@ def _classify(cls, response, objects): ) break else: - _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + _LOGGER.warning( + "Discriminator %s is absent or null, use base class %s.", + subtype_key, + cls.__name__, + ) break return cls @@ -916,7 +932,11 @@ def serialize_iter(self, data, iter_type, div=None, **kwargs): if isinstance(el, ET.Element): el_node = el else: - el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + el_node = _create_xml_node( + node_name, + xml_desc.get("prefix", None), + xml_desc.get("ns", None), + ) if el is not None: # Otherwise it writes "None" :-p el_node.text = str(el) final_result.append(el_node) @@ -1153,7 +1173,12 @@ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument if microseconds: microseconds = "." + microseconds date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( - utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + utc.tm_year, + utc.tm_mon, + utc.tm_mday, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, ) return date + microseconds + "Z" except (ValueError, OverflowError) as err: @@ -1429,7 +1454,10 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] try: - for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + for ( + attr, + mapconfig, + ) in data._attribute_map.items(): # pylint: disable=protected-access if attr in constants: continue value = getattr(data, attr) @@ -1547,7 +1575,8 @@ def failsafe_deserialize(self, target_obj, data, content_type=None): return self(target_obj, data, content_type=content_type) except: # pylint: disable=bare-except _LOGGER.debug( - "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", + exc_info=True, ) return None @@ -1849,7 +1878,11 @@ def deserialize_enum(data, enum_obj): if enum_value.value.lower() == str(data).lower(): return enum_value # We don't fail anymore for unknown value, we deserialize as a string - _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + _LOGGER.warning( + "Deserializer is not able to find %s as valid enum in %s", + data, + enum_obj, + ) return Deserializer.deserialize_unicode(data) @staticmethod diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py index 3b6b202768e2..5a2ba62e4064 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/_azure_queue_storage.py @@ -17,7 +17,12 @@ from .. import models as _models from .._utils.serialization import Deserializer, Serializer from ._configuration import AzureQueueStorageConfiguration -from .operations import MessageIdOperations, MessagesOperations, QueueOperations, ServiceOperations +from .operations import ( + MessageIdOperations, + MessagesOperations, + QueueOperations, + ServiceOperations, +) class AzureQueueStorage: # pylint: disable=client-accepts-api-version-keyword @@ -59,7 +64,7 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential self._config.custom_hook_policy, self._config.logging_policy, policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + (policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None), self._config.http_logging_policy, ] self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=base_url, policies=_policies, **kwargs) diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py index 997087907907..bc26d91d2d9d 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_message_id_operations.py @@ -25,7 +25,10 @@ from ... import models as _models from ..._utils.serialization import Deserializer, Serializer -from ...operations._message_id_operations import build_delete_request, build_update_request +from ...operations._message_id_operations import ( + build_delete_request, + build_update_request, +) from .._configuration import AzureQueueStorageConfiguration T = TypeVar("T") diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_patch.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_patch.py index 5755fd181b3f..2e25743cab74 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_patch.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_patch.py @@ -8,6 +8,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ + from typing import List __all__: List[str] = [] # Add all objects you want publicly available to users at this package level diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py index 76787fa67345..252b7d54f4c5 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/aio/operations/_queue_operations.py @@ -460,7 +460,10 @@ async def set_access_policy( serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True}} if queue_acl is not None: _content = self._serialize.body( - queue_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt + queue_acl, + "[SignedIdentifier]", + is_xml=True, + serialization_ctxt=serialization_ctxt, ) else: _content = None diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py index 825f04418cf0..e28743c64d07 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_models_py3.py @@ -404,7 +404,11 @@ class ListQueuesSegmentResponse(_serialization.Model): } _attribute_map = { - "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}}, + "service_endpoint": { + "key": "ServiceEndpoint", + "type": "str", + "xml": {"attr": True}, + }, "prefix": {"key": "Prefix", "type": "str"}, "marker": {"key": "Marker", "type": "str"}, "max_results": {"key": "MaxResults", "type": "int"}, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_patch.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_patch.py index 5755fd181b3f..2e25743cab74 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_patch.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/models/_patch.py @@ -8,6 +8,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ + from typing import List __all__: List[str] = [] # Add all objects you want publicly available to users at this package level diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_patch.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_patch.py index 5755fd181b3f..2e25743cab74 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_patch.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_patch.py @@ -8,6 +8,7 @@ Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ + from typing import List __all__: List[str] = [] # Add all objects you want publicly available to users at this package level diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py index fd4370c3801f..ddd138f17de9 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_queue_operations.py @@ -662,7 +662,10 @@ def set_access_policy( # pylint: disable=inconsistent-return-statements serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True}} if queue_acl is not None: _content = self._serialize.body( - queue_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt + queue_acl, + "[SignedIdentifier]", + is_xml=True, + serialization_ctxt=serialization_ctxt, ) else: _content = None diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_message_encoding.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_message_encoding.py index bd62f9933338..5dc0dc8f6a26 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_message_encoding.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_message_encoding.py @@ -9,7 +9,12 @@ from azure.core.exceptions import DecodeError -from ._encryption import decrypt_queue_message, encrypt_queue_message, KeyEncryptionKey, _ENCRYPTION_PROTOCOL_V1 +from ._encryption import ( + decrypt_queue_message, + encrypt_queue_message, + KeyEncryptionKey, + _ENCRYPTION_PROTOCOL_V1, +) if TYPE_CHECKING: from azure.core.pipeline import PipelineResponse @@ -78,7 +83,11 @@ def __call__(self, response: "PipelineResponse", obj: Iterable, headers: Dict[st content = message.message_text if (self.key_encryption_key is not None) or (self.resolver is not None): content = decrypt_queue_message( - content, response, self.require_encryption, self.key_encryption_key, self.resolver + content, + response, + self.require_encryption, + self.key_encryption_key, + self.resolver, ) message.message_text = self.decode(content, response) return obj diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_models.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_models.py index 6d7faf9e860e..5fd01179d74e 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_models.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_models.py @@ -9,7 +9,10 @@ from typing import Any, Callable, Dict, List, Optional, Tuple, TYPE_CHECKING, Union from azure.core.exceptions import HttpResponseError from azure.core.paging import PageIterator -from ._shared.response_handlers import process_storage_error, return_context_and_deserialized +from ._shared.response_handlers import ( + process_storage_error, + return_context_and_deserialized, +) from ._shared.models import DictMixin from ._generated.models import AccessPolicy as GenAccessPolicy from ._generated.models import CorsRule as GeneratedCorsRule @@ -198,7 +201,9 @@ def __init__(self, allowed_origins: List[str], allowed_methods: List[str], **kwa self.max_age_in_seconds = kwargs.get("max_age_in_seconds", 0) @staticmethod - def _to_generated(rules: Optional[List["CorsRule"]]) -> Optional[List[GeneratedCorsRule]]: + def _to_generated( + rules: Optional[List["CorsRule"]], + ) -> Optional[List[GeneratedCorsRule]]: if rules is None: return rules @@ -251,7 +256,13 @@ class QueueSasPermissions(object): process: bool = False """Get and delete messages from the queue.""" - def __init__(self, read: bool = False, add: bool = False, update: bool = False, process: bool = False) -> None: + def __init__( + self, + read: bool = False, + add: bool = False, + update: bool = False, + process: bool = False, + ) -> None: self.read = read self.add = add self.update = update diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py index 7812bffb9602..17205bbf69eb 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client.py @@ -23,10 +23,18 @@ from ._serialize import get_api_version from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin from ._shared.request_handlers import add_metadata_headers, serialize_iso -from ._shared.response_handlers import process_storage_error, return_headers_and_deserialized, return_response_headers +from ._shared.response_handlers import ( + process_storage_error, + return_headers_and_deserialized, + return_response_headers, +) if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from azure.core.credentials import ( + AzureNamedKeyCredential, + AzureSasCredential, + TokenCredential, + ) from ._message_encoding import ( BinaryBase64DecodePolicy, BinaryBase64EncodePolicy, @@ -96,7 +104,13 @@ def __init__( account_url: str, queue_name: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "TokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -120,7 +134,10 @@ def __init__( self._message_encode_policy = message_encode_policy or NoEncodePolicy() self._message_decode_policy = message_decode_policy or NoDecodePolicy() self._client = AzureQueueStorage( - self.url, get_api_version(api_version), base_url=self.url, pipeline=self._pipeline + self.url, + get_api_version(api_version), + base_url=self.url, + pipeline=self._pipeline, ) self._configure_encryption(kwargs) @@ -129,7 +146,10 @@ def __enter__(self) -> Self: return self def __exit__( - self, typ: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType] + self, + typ: Optional[type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], ) -> None: self._client.__exit__(typ, exc, tb) # pylint: disable=specify-parameter-names-in-call @@ -150,14 +170,25 @@ def _format_url(self, hostname: str) -> str: :returns: The formatted endpoint URL according to the specified location mode hostname. :rtype: str """ - return _format_url(queue_name=self.queue_name, hostname=hostname, scheme=self.scheme, query_str=self._query_str) + return _format_url( + queue_name=self.queue_name, + hostname=hostname, + scheme=self.scheme, + query_str=self._query_str, + ) @classmethod def from_queue_url( cls, queue_url: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "TokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -222,7 +253,13 @@ def from_connection_string( conn_str: str, queue_name: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "TokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -575,7 +612,10 @@ def send_message( """ if self.key_encryption_key: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) try: @@ -663,7 +703,10 @@ def receive_message( """ if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) self._message_decode_policy.configure( @@ -756,7 +799,10 @@ def receive_messages( """ if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) self._message_decode_policy.configure( @@ -847,7 +893,10 @@ def update_message( """ if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) if isinstance(message, QueueMessage): @@ -883,7 +932,9 @@ def update_message( Retrying without encryption_version." ) self._message_encode_policy.configure( - self.require_encryption, self.key_encryption_key, self.key_resolver_function + self.require_encryption, + self.key_encryption_key, + self.key_resolver_function, ) encoded_message_text = self._message_encode_policy(message_text) updated = GenQueueMessage(message_text=encoded_message_text) @@ -963,7 +1014,10 @@ def peek_messages( if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) self._message_decode_policy.configure( diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client_helpers.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client_helpers.py index 36e1ddf3a7e7..1ce357070db3 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client_helpers.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_client_helpers.py @@ -9,7 +9,11 @@ from ._shared.base_client import parse_query if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from azure.core.credentials import ( + AzureNamedKeyCredential, + AzureSasCredential, + TokenCredential, + ) from azure.core.credentials_async import AsyncTokenCredential from urllib.parse import ParseResult diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py index 4c2004122960..84fbef762fe0 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client.py @@ -26,13 +26,24 @@ from ._queue_client import QueueClient from ._queue_service_client_helpers import _parse_url from ._serialize import get_api_version -from ._shared.base_client import parse_connection_str, StorageAccountHostsMixin, TransportWrapper +from ._shared.base_client import ( + parse_connection_str, + StorageAccountHostsMixin, + TransportWrapper, +) from ._shared.models import LocationMode from ._shared.parser import _to_utc_datetime -from ._shared.response_handlers import parse_to_internal_user_delegation_key, process_storage_error +from ._shared.response_handlers import ( + parse_to_internal_user_delegation_key, + process_storage_error, +) if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from azure.core.credentials import ( + AzureNamedKeyCredential, + AzureSasCredential, + TokenCredential, + ) from datetime import datetime from ._models import Metrics, QueueAnalyticsLogging from ._shared.models import UserDelegationKey @@ -98,7 +109,13 @@ def __init__( self, account_url: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "TokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -117,7 +134,10 @@ def __init__( **kwargs, ) self._client = AzureQueueStorage( - self.url, get_api_version(api_version), base_url=self.url, pipeline=self._pipeline + self.url, + get_api_version(api_version), + base_url=self.url, + pipeline=self._pipeline, ) self._configure_encryption(kwargs) @@ -126,7 +146,10 @@ def __enter__(self) -> Self: return self def __exit__( - self, typ: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType] + self, + typ: Optional[type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], ) -> None: self._client.__exit__(typ, exc, tb) # pylint: disable=specify-parameter-names-in-call @@ -154,7 +177,13 @@ def from_connection_string( cls, conn_str: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "TokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "TokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -426,7 +455,12 @@ def list_queues( @distributed_trace def create_queue( - self, name: str, metadata: Optional[Dict[str, str]] = None, *, timeout: Optional[int] = None, **kwargs: Any + self, + name: str, + metadata: Optional[Dict[str, str]] = None, + *, + timeout: Optional[int] = None, + **kwargs: Any, ) -> QueueClient: """Creates a new queue under the specified account. @@ -459,7 +493,11 @@ def create_queue( @distributed_trace def delete_queue( - self, queue: Union["QueueProperties", str], *, timeout: Optional[int] = None, **kwargs: Any + self, + queue: Union["QueueProperties", str], + *, + timeout: Optional[int] = None, + **kwargs: Any, ) -> None: """Deletes the specified queue and any messages it contains. diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client_helpers.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client_helpers.py index 9e29d00b4dd5..62266ac058f6 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client_helpers.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_queue_service_client_helpers.py @@ -9,7 +9,11 @@ from ._shared.base_client import parse_query if TYPE_CHECKING: - from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential, TokenCredential + from azure.core.credentials import ( + AzureNamedKeyCredential, + AzureSasCredential, + TokenCredential, + ) from azure.core.credentials_async import AsyncTokenCredential from urllib.parse import ParseResult diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/authentication.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/authentication.py index f778dc71eec4..cdbeeace2427 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/authentication.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/authentication.py @@ -16,7 +16,9 @@ pass try: - from azure.core.pipeline.transport import AioHttpTransport # pylint: disable=non-abstract-transport-import + from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import + AioHttpTransport, + ) except ImportError: AioHttpTransport = None @@ -152,9 +154,16 @@ def _get_canonicalized_resource(self, request): try: if ( isinstance(request.context.transport, AioHttpTransport) - or isinstance(getattr(request.context.transport, "_transport", None), AioHttpTransport) or isinstance( - getattr(getattr(request.context.transport, "_transport", None), "_transport", None), + getattr(request.context.transport, "_transport", None), + AioHttpTransport, + ) + or isinstance( + getattr( + getattr(request.context.transport, "_transport", None), + "_transport", + None, + ), AioHttpTransport, ) ): diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client.py index 3f7609b9f026..86734de7a20b 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client.py @@ -17,7 +17,11 @@ ) from urllib.parse import parse_qs, quote -from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential, TokenCredential +from azure.core.credentials import ( + AzureSasCredential, + AzureNamedKeyCredential, + TokenCredential, +) from azure.core.exceptions import HttpResponseError from azure.core.pipeline import Pipeline from azure.core.pipeline.transport import ( # pylint: disable=non-abstract-transport-import, no-name-in-module @@ -64,7 +68,10 @@ if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential - from azure.core.pipeline.transport import HttpRequest, HttpResponse # pylint: disable=C4756 + from azure.core.pipeline.transport import ( # pylint: disable=C4756 + HttpRequest, + HttpResponse, + ) _LOGGER = logging.getLogger(__name__) _SERVICE_PARAMS = { @@ -73,6 +80,38 @@ "file": {"primary": "FILEENDPOINT", "secondary": "FILESECONDARYENDPOINT"}, "dfs": {"primary": "BLOBENDPOINT", "secondary": "BLOBENDPOINT"}, } +_SECONDARY_SUFFIX = "-secondary" +_KNOWN_FEATURE_SUFFIXES = {"-ipv6", "-dualstack"} + + +def _construct_endpoints(netloc: str, account_part: str) -> Tuple[str, str, str]: + """ + Construct primary and secondary hostnames from a storage account URL's netloc. + + :param str netloc: The network location in a URL. + :param str account_part: The account part after parsing the URL. + :return: The account name, primary hostname, and secondary hostname. + :rtype: Tuple[str, str, str] + """ + domain_suffix = netloc[len(account_part) :] + secondary_idx = account_part.find(_SECONDARY_SUFFIX) + + # Case where customer provides secondary URL + if secondary_idx >= 0: + account_name = account_part[:secondary_idx] + primary_hostname = secondary_hostname = f"{account_part}{domain_suffix}" + else: + feature_suffix = "" + account_name = account_part + for suffix in _KNOWN_FEATURE_SUFFIXES: + if account_name.endswith(suffix): + feature_suffix = suffix + account_name = account_name[: -len(suffix)] + break + primary_hostname = f"{account_part}{domain_suffix}" + secondary_hostname = f"{account_name}{_SECONDARY_SUFFIX}{feature_suffix}{domain_suffix}" + + return account_name, primary_hostname, secondary_hostname class StorageAccountHostsMixin(object): @@ -115,22 +154,32 @@ def __init__( self._is_localhost = True self.account_name = parsed_url.path.strip("/") + secondary_hostname = "" + if len(account) > 1: + self.account_name, primary_hostname, secondary_hostname = _construct_endpoints( + parsed_url.netloc, account[0] + ) + else: + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") + self.credential = _format_shared_key_credential(self.account_name, credential) if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"): raise ValueError("Token credential is only supported with HTTPS.") - secondary_hostname = "" if hasattr(self.credential, "account_name"): + if not self.account_name: + secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" self.account_name = self.credential.account_name - secondary_hostname = f"{self.credential.account_name}-secondary.{service_name}.{SERVICE_HOST_BASE}" if not self._hosts: - if len(account) > 1: - secondary_hostname = parsed_url.netloc.replace(account[0], account[0] + "-secondary") if kwargs.get("secondary_hostname"): secondary_hostname = kwargs["secondary_hostname"] - primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") - self._hosts = {LocationMode.PRIMARY: primary_hostname, LocationMode.SECONDARY: secondary_hostname} + if not primary_hostname: + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") + self._hosts = { + LocationMode.PRIMARY: primary_hostname, + LocationMode.SECONDARY: secondary_hostname, + } self._sdk_moniker = f"storage-{service}/{VERSION}" self._config, self._pipeline = self._create_pipeline(self.credential, sdk_moniker=self._sdk_moniker, **kwargs) @@ -224,12 +273,27 @@ def _format_query_string( self, sas_token: Optional[str], credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + TokenCredential, + ] ], snapshot: Optional[str] = None, share_snapshot: Optional[str] = None, ) -> Tuple[ - str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", TokenCredential]] + str, + Optional[ + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + TokenCredential, + ] + ], ]: query_str = "?" if snapshot: @@ -251,7 +315,13 @@ def _format_query_string( def _create_pipeline( self, credential: Optional[ - Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential] + Union[ + str, + Dict[str, str], + AzureNamedKeyCredential, + AzureSasCredential, + TokenCredential, + ] ] = None, **kwargs: Any, ) -> Tuple[StorageConfiguration, Pipeline]: @@ -347,7 +417,9 @@ def _batch_send(self, *reqs: "HttpRequest", **kwargs: Any) -> Iterator["HttpResp parts = list(response.parts()) if any(p for p in parts if not 200 <= p.status_code < 300): error = PartialBatchErrorException( - message="There is a partial failure in the batch operation.", response=response, parts=parts + message="There is a partial failure in the batch operation.", + response=response, + parts=parts, ) raise error return iter(parts) @@ -384,7 +456,14 @@ def __exit__(self, *args): def _format_shared_key_credential( account_name: Optional[str], credential: Optional[ - Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, "AsyncTokenCredential", TokenCredential] + Union[ + str, + Dict[str, str], + AzureNamedKeyCredential, + AzureSasCredential, + "AsyncTokenCredential", + TokenCredential, + ] ] = None, ) -> Any: if isinstance(credential, str): @@ -404,12 +483,28 @@ def _format_shared_key_credential( def parse_connection_str( conn_str: str, - credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]], + credential: Optional[ + Union[ + str, + Dict[str, str], + AzureNamedKeyCredential, + AzureSasCredential, + TokenCredential, + ] + ], service: str, ) -> Tuple[ str, Optional[str], - Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, TokenCredential]], + Optional[ + Union[ + str, + Dict[str, str], + AzureNamedKeyCredential, + AzureSasCredential, + TokenCredential, + ] + ], ]: conn_str = conn_str.rstrip(";") conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")] @@ -423,7 +518,10 @@ def parse_connection_str( secondary = None if not credential: try: - credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} + credential = { + "account_name": conn_settings["ACCOUNTNAME"], + "account_key": conn_settings["ACCOUNTKEY"], + } except KeyError: credential = conn_settings.get("SHAREDACCESSSIGNATURE") if endpoints["primary"] in conn_settings: diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client_async.py index 400f7d6f6dff..54446f7fc5b4 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/base_client_async.py @@ -41,12 +41,18 @@ StorageHosts, StorageRequestHook, ) -from .policies_async import AsyncStorageBearerTokenCredentialPolicy, AsyncStorageResponseHook +from .policies_async import ( + AsyncStorageBearerTokenCredentialPolicy, + AsyncStorageResponseHook, +) from .response_handlers import PartialBatchErrorException, process_storage_error from .._shared_access_signature import _is_credential_sastoken if TYPE_CHECKING: - from azure.core.pipeline.transport import HttpRequest, HttpResponse # pylint: disable=C4756 + from azure.core.pipeline.transport import ( # pylint: disable=C4756 + HttpRequest, + HttpResponse, + ) _LOGGER = logging.getLogger(__name__) _SERVICE_PARAMS = { @@ -63,12 +69,27 @@ def _format_query_string( self, sas_token: Optional[str], credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + AsyncTokenCredential, + ] ], snapshot: Optional[str] = None, share_snapshot: Optional[str] = None, ) -> Tuple[ - str, Optional[Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", AsyncTokenCredential]] + str, + Optional[ + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + AsyncTokenCredential, + ] + ], ]: query_str = "?" if snapshot: @@ -89,12 +110,22 @@ def _format_query_string( def _create_pipeline( self, credential: Optional[ - Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential] + Union[ + str, + Dict[str, str], + AzureNamedKeyCredential, + AzureSasCredential, + AsyncTokenCredential, + ] ] = None, **kwargs: Any, ) -> Tuple[StorageConfiguration, AsyncPipeline]: self._credential_policy: Optional[ - Union[AsyncStorageBearerTokenCredentialPolicy, SharedKeyCredentialPolicy, AzureSasCredentialPolicy] + Union[ + AsyncStorageBearerTokenCredentialPolicy, + SharedKeyCredentialPolicy, + AzureSasCredentialPolicy, + ] ] = None if hasattr(credential, "get_token"): if kwargs.get("audience"): @@ -198,12 +229,28 @@ async def _batch_send(self, *reqs: "HttpRequest", **kwargs: Any) -> AsyncList["H def parse_connection_str( conn_str: str, - credential: Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]], + credential: Optional[ + Union[ + str, + Dict[str, str], + AzureNamedKeyCredential, + AzureSasCredential, + AsyncTokenCredential, + ] + ], service: str, ) -> Tuple[ str, Optional[str], - Optional[Union[str, Dict[str, str], AzureNamedKeyCredential, AzureSasCredential, AsyncTokenCredential]], + Optional[ + Union[ + str, + Dict[str, str], + AzureNamedKeyCredential, + AzureSasCredential, + AsyncTokenCredential, + ] + ], ]: conn_str = conn_str.rstrip(";") conn_settings_list = [s.split("=", 1) for s in conn_str.split(";")] @@ -217,7 +264,10 @@ def parse_connection_str( secondary = None if not credential: try: - credential = {"account_name": conn_settings["ACCOUNTNAME"], "account_key": conn_settings["ACCOUNTKEY"]} + credential = { + "account_name": conn_settings["ACCOUNTNAME"], + "account_key": conn_settings["ACCOUNTKEY"], + } except KeyError: credential = conn_settings.get("SHAREDACCESSSIGNATURE") if endpoints["primary"] in conn_settings: diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/constants.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/constants.py index 2bf865acf343..c9c2ba8f74d0 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/constants.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/constants.py @@ -6,7 +6,6 @@ from .._serialize import _SUPPORTED_API_VERSIONS - X_MS_VERSION = _SUPPORTED_API_VERSIONS[-1] # Connection defaults diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py index aa900a4f404a..51565b636f6c 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/models.py @@ -292,7 +292,10 @@ class ResourceTypes(object): _str: str def __init__( - self, service: bool = False, container: bool = False, object: bool = False # pylint: disable=redefined-builtin + self, + service: bool = False, + container: bool = False, + object: bool = False, # pylint: disable=redefined-builtin ) -> None: self.service = service self.container = container diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/parser.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/parser.py index 7755398d8090..f16230c9d702 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/parser.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/parser.py @@ -53,7 +53,10 @@ def _filetime_to_datetime(filetime: str) -> Optional[datetime]: if temp_filetime == 0: return None - return datetime.fromtimestamp((temp_filetime - EPOCH_AS_FILETIME) / HUNDREDS_OF_NANOSECONDS, tz=timezone.utc) + return datetime.fromtimestamp( + (temp_filetime - EPOCH_AS_FILETIME) / HUNDREDS_OF_NANOSECONDS, + tz=timezone.utc, + ) except ValueError: pass diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py index b343373dfce5..0127d571f003 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies.py @@ -55,7 +55,12 @@ def encode_base64(data): # Are we out of retries? def is_exhausted(settings): - retry_counts = (settings["total"], settings["connect"], settings["read"], settings["status"]) + retry_counts = ( + settings["total"], + settings["connect"], + settings["read"], + settings["status"], + ) retry_counts = list(filter(None, retry_counts)) if not retry_counts: return False @@ -228,7 +233,16 @@ def on_request(self, request: "PipelineRequest") -> None: parsed_qs["sig"] = "*****" # the SAS needs to be put back together - value = urlunparse((scheme, netloc, path, params, urlencode(parsed_qs), fragment)) + value = urlunparse( + ( + scheme, + netloc, + path, + params, + urlencode(parsed_qs), + fragment, + ) + ) _LOGGER.debug(" %r: %r", header, value) _LOGGER.debug("Request body:") @@ -572,11 +586,16 @@ def send(self, request): response = self.next.send(request) if is_retry(response, retry_settings["mode"]) or is_checksum_retry(response): retries_remaining = self.increment( - retry_settings, request=request.http_request, response=response.http_response + retry_settings, + request=request.http_request, + response=response.http_response, ) if retries_remaining: retry_hook( - retry_settings, request=request.http_request, response=response.http_response, error=None + retry_settings, + request=request.http_request, + response=response.http_response, + error=None, ) self.sleep(retry_settings, request.context.transport) continue @@ -586,7 +605,12 @@ def send(self, request): raise retries_remaining = self.increment(retry_settings, request=request.http_request, error=err) if retries_remaining: - retry_hook(retry_settings, request=request.http_request, response=None, error=err) + retry_hook( + retry_settings, + request=request.http_request, + response=None, + error=err, + ) self.sleep(retry_settings, request.context.transport) continue raise err diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py index 4cb32f23248b..f4d235c082d8 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/policies_async.py @@ -11,11 +11,19 @@ from typing import Any, Dict, TYPE_CHECKING from azure.core.exceptions import AzureError, StreamClosedError, StreamConsumedError -from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy +from azure.core.pipeline.policies import ( + AsyncBearerTokenCredentialPolicy, + AsyncHTTPPolicy, +) from .authentication import AzureSigningError, StorageHttpChallenge from .constants import DEFAULT_OAUTH_SCOPE -from .policies import encode_base64, is_retry, StorageContentValidation, StorageRetryPolicy +from .policies import ( + encode_base64, + is_retry, + StorageContentValidation, + StorageRetryPolicy, +) if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -125,11 +133,16 @@ async def send(self, request): response = await self.next.send(request) if is_retry(response, retry_settings["mode"]) or await is_checksum_retry(response): retries_remaining = self.increment( - retry_settings, request=request.http_request, response=response.http_response + retry_settings, + request=request.http_request, + response=response.http_response, ) if retries_remaining: await retry_hook( - retry_settings, request=request.http_request, response=response.http_response, error=None + retry_settings, + request=request.http_request, + response=response.http_response, + error=None, ) await self.sleep(retry_settings, request.context.transport) continue @@ -139,7 +152,12 @@ async def send(self, request): raise retries_remaining = self.increment(retry_settings, request=request.http_request, error=err) if retries_remaining: - await retry_hook(retry_settings, request=request.http_request, response=None, error=err) + await retry_hook( + retry_settings, + request=request.http_request, + response=None, + error=err, + ) await self.sleep(retry_settings, request.context.transport) continue raise err diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/request_handlers.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/request_handlers.py index b23f65859690..699635565b18 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/request_handlers.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/request_handlers.py @@ -12,7 +12,6 @@ import isodate - _LOGGER = logging.getLogger(__name__) _REQUEST_DELIMITER_PREFIX = "batch_" diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/response_handlers.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/response_handlers.py index 40faa840cfbf..8c4e5dfb80c2 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/response_handlers.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/response_handlers.py @@ -21,7 +21,6 @@ from .models import get_enum_value, StorageErrorCode, UserDelegationKey from .parser import _to_utc_datetime - SV_DOCS_URL = "https://learn.microsoft.com/rest/api/storageservices/versioning-for-the-azure-storage-services" _LOGGER = logging.getLogger(__name__) @@ -80,7 +79,10 @@ def return_context_and_deserialized(response, deserialized, response_headers): def return_raw_deserialized(response, *_): - return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME] + return ( + response.http_response.location_mode, + response.context[ContentDecodePolicy.CONTEXT_NAME], + ) def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches @@ -96,7 +98,12 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p # If it is one of those three then it has been serialized prior by the generated layer. if isinstance( storage_error, - (PartialBatchErrorException, ClientAuthenticationError, ResourceNotFoundError, ResourceExistsError), + ( + PartialBatchErrorException, + ClientAuthenticationError, + ResourceNotFoundError, + ResourceExistsError, + ), ): serialized = True error_code = storage_error.response.headers.get("x-ms-error-code") @@ -118,7 +125,8 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p error_dict = error_body.get("error", {}) elif not error_code: _LOGGER.warning( - "Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.", type(error_body) + "Unexpected return type %s from ContentDecodePolicy.deserialize_from_http_generics.", + type(error_body), ) error_dict = {"message": str(error_body)} @@ -135,9 +143,15 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p # This check would be unnecessary if we have already serialized the error if error_code and not serialized: error_code = StorageErrorCode(error_code) - if error_code in [StorageErrorCode.condition_not_met, StorageErrorCode.blob_overwritten]: + if error_code in [ + StorageErrorCode.condition_not_met, + StorageErrorCode.blob_overwritten, + ]: raise_error = ResourceModifiedError - if error_code in [StorageErrorCode.invalid_authentication_info, StorageErrorCode.authentication_failed]: + if error_code in [ + StorageErrorCode.invalid_authentication_info, + StorageErrorCode.authentication_failed, + ]: raise_error = ClientAuthenticationError if error_code in [ StorageErrorCode.resource_not_found, diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/shared_access_signature.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/shared_access_signature.py index b8582a8f71f4..60d2a3475f94 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/shared_access_signature.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/shared_access_signature.py @@ -226,7 +226,12 @@ def add_account(self, services, resource_types): self._add_query(QueryStringConstants.SIGNED_RESOURCE_TYPES, resource_types) def add_override_response_headers( - self, cache_control, content_disposition, content_encoding, content_language, content_type + self, + cache_control, + content_disposition, + content_encoding, + content_language, + content_type, ): self._add_query(QueryStringConstants.SIGNED_CACHE_CONTROL, cache_control) self._add_query(QueryStringConstants.SIGNED_CONTENT_DISPOSITION, content_disposition) @@ -275,7 +280,10 @@ def get_value_to_append(query): + "\n" # Signed Encryption Scope - always empty for queue ) - self._add_query(QueryStringConstants.SIGNED_SIGNATURE, sign_string(account_key, string_to_sign)) + self._add_query( + QueryStringConstants.SIGNED_SIGNATURE, + sign_string(account_key, string_to_sign), + ) self.string_to_sign = string_to_sign def get_token(self) -> str: diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads.py index 7a5fb3f3dc91..341d034fd07c 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads.py @@ -16,7 +16,6 @@ from .request_handlers import get_length from .response_handlers import return_response_headers - _LARGE_BLOB_UPLOAD_MAX_READ_BUFFER_SIZE = 4 * 1024 * 1024 _ERROR_VALUE_SHOULD_BE_SEEKABLE_STREAM = "{0} should be a seekable file-like/io.IOBase type stream object." @@ -113,7 +112,12 @@ def upload_substream_blocks( executor.submit(with_current_context(uploader.process_substream_block), u) for u in islice(upload_tasks, 0, max_concurrency) ] - range_ids = _parallel_uploads(executor, uploader.process_substream_block, upload_tasks, running_futures) + range_ids = _parallel_uploads( + executor, + uploader.process_substream_block, + upload_tasks, + running_futures, + ) else: range_ids = [uploader.process_substream_block(b) for b in uploader.get_substream_blocks()] if any(range_ids): @@ -166,7 +170,10 @@ def get_chunk_streams(self): # Buffer until we either reach the end of the stream or get a whole chunk. while True: if self.total_size: - read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) + read_size = min( + self.chunk_size - len(data), + self.total_size - (index + len(data)), + ) temp = self.stream.read(read_size) if not isinstance(temp, bytes): raise TypeError("Blob data should be of type bytes.") diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads_async.py index 6ed5ba1d0f91..388429a288a4 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared/uploads_async.py @@ -189,7 +189,10 @@ async def get_chunk_streams(self): # Buffer until we either reach the end of the stream or get a whole chunk. while True: if self.total_size: - read_size = min(self.chunk_size - len(data), self.total_size - (index + len(data))) + read_size = min( + self.chunk_size - len(data), + self.total_size - (index + len(data)), + ) temp = self.stream.read(read_size) if inspect.isawaitable(temp): temp = await temp @@ -437,7 +440,11 @@ class AsyncIterStreamer: File-like streaming object for AsyncGenerators. """ - def __init__(self, generator: AsyncGenerator[Union[bytes, str], None], encoding: str = "UTF-8"): + def __init__( + self, + generator: AsyncGenerator[Union[bytes, str], None], + encoding: str = "UTF-8", + ): self.iterator = generator.__aiter__() self.leftover = b"" self.encoding = encoding diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared_access_signature.py b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared_access_signature.py index 465835d6ad69..75f49f70ec64 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/_shared_access_signature.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/_shared_access_signature.py @@ -18,7 +18,11 @@ ) if TYPE_CHECKING: - from azure.storage.queue import AccountSasPermissions, QueueSasPermissions, ResourceTypes + from azure.storage.queue import ( + AccountSasPermissions, + QueueSasPermissions, + ResourceTypes, + ) from datetime import datetime @@ -116,7 +120,10 @@ def generate_queue( sas.add_id(policy_id) sas.add_user_delegation_oid(user_delegation_oid) sas.add_resource_signature( - self.account_name, self.account_key, queue_name, user_delegation_key=self.user_delegation_key + self.account_name, + self.account_key, + queue_name, + user_delegation_key=self.user_delegation_key, ) if sts_hook is not None: @@ -150,11 +157,21 @@ def get_value_to_append(query): self._add_query(QueryStringConstants.SIGNED_OID, user_delegation_key.signed_oid) self._add_query(QueryStringConstants.SIGNED_TID, user_delegation_key.signed_tid) self._add_query(QueryStringConstants.SIGNED_KEY_START, user_delegation_key.signed_start) - self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry) - self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service) - self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version) self._add_query( - QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, user_delegation_key.signed_delegated_user_tid + QueryStringConstants.SIGNED_KEY_EXPIRY, + user_delegation_key.signed_expiry, + ) + self._add_query( + QueryStringConstants.SIGNED_KEY_SERVICE, + user_delegation_key.signed_service, + ) + self._add_query( + QueryStringConstants.SIGNED_KEY_VERSION, + user_delegation_key.signed_version, + ) + self._add_query( + QueryStringConstants.SIGNED_KEY_DELEGATED_USER_TID, + user_delegation_key.signed_delegated_user_tid, ) string_to_sign += ( @@ -182,7 +199,10 @@ def get_value_to_append(query): self._add_query( QueryStringConstants.SIGNED_SIGNATURE, - sign_string(account_key if user_delegation_key is None else user_delegation_key.value, string_to_sign), + sign_string( + (account_key if user_delegation_key is None else user_delegation_key.value), + string_to_sign, + ), ) self.string_to_sign = string_to_sign diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/__init__.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/__init__.py index 434d5fe99bba..114731165ee8 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/__init__.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/__init__.py @@ -7,7 +7,6 @@ from ._queue_client_async import QueueClient from ._queue_service_client_async import QueueServiceClient - __all__ = [ "QueueClient", "QueueServiceClient", diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_models.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_models.py index 5a1dfba70bde..cbd506fbd183 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_models.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_models.py @@ -10,7 +10,10 @@ from azure.core.async_paging import AsyncPageIterator from azure.core.exceptions import HttpResponseError from .._models import QueueMessage, QueueProperties -from .._shared.response_handlers import process_storage_error, return_context_and_deserialized +from .._shared.response_handlers import ( + process_storage_error, + return_context_and_deserialized, +) class MessagesPaged(AsyncPageIterator): diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py index 5134ea26a5ad..ed1c7e8b64b6 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_client_async.py @@ -24,10 +24,17 @@ from .._queue_client_helpers import _format_url, _from_queue_url, _parse_url from .._serialize import get_api_version from .._shared.base_client import StorageAccountHostsMixin -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, parse_connection_str +from .._shared.base_client_async import ( + AsyncStorageAccountHostsMixin, + parse_connection_str, +) from .._shared.policies_async import ExponentialRetry from .._shared.request_handlers import add_metadata_headers, serialize_iso -from .._shared.response_handlers import process_storage_error, return_headers_and_deserialized, return_response_headers +from .._shared.response_handlers import ( + process_storage_error, + return_headers_and_deserialized, + return_response_headers, +) if TYPE_CHECKING: from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential @@ -106,7 +113,13 @@ def __init__( account_url: str, queue_name: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "AsyncTokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -133,7 +146,11 @@ def __init__( self._message_encode_policy = message_encode_policy or NoEncodePolicy() self._message_decode_policy = message_decode_policy or NoDecodePolicy() self._client = AzureQueueStorage( - self.url, get_api_version(api_version), base_url=self.url, pipeline=self._pipeline, loop=loop + self.url, + get_api_version(api_version), + base_url=self.url, + pipeline=self._pipeline, + loop=loop, ) self._loop = loop self._configure_encryption(kwargs) @@ -143,7 +160,10 @@ async def __aenter__(self) -> Self: return self async def __aexit__( - self, typ: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType] + self, + typ: Optional[type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], ) -> None: await self._client.__aexit__(typ, exc, tb) # pylint: disable=specify-parameter-names-in-call @@ -164,14 +184,25 @@ def _format_url(self, hostname: str) -> str: :returns: The formatted endpoint URL according to the specified location mode hostname. :rtype: str """ - return _format_url(queue_name=self.queue_name, hostname=hostname, scheme=self.scheme, query_str=self._query_str) + return _format_url( + queue_name=self.queue_name, + hostname=hostname, + scheme=self.scheme, + query_str=self._query_str, + ) @classmethod def from_queue_url( cls, queue_url: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "AsyncTokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -236,7 +267,13 @@ def from_connection_string( conn_str: str, queue_name: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "AsyncTokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -591,7 +628,10 @@ async def send_message( """ if self.key_encryption_key: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) try: @@ -679,7 +719,10 @@ async def receive_message( """ if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) self._message_decode_policy.configure( @@ -762,7 +805,10 @@ def receive_messages( """ if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) self._message_decode_policy.configure( @@ -853,7 +899,10 @@ async def update_message( """ if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) if isinstance(message, QueueMessage): @@ -889,7 +938,9 @@ async def update_message( Retrying without encryption_version." ) self._message_encode_policy.configure( - self.require_encryption, self.key_encryption_key, self.key_resolver_function + self.require_encryption, + self.key_encryption_key, + self.key_resolver_function, ) encoded_message_text = self._message_encode_policy(message_text) updated = GenQueueMessage(message_text=encoded_message_text) @@ -969,7 +1020,10 @@ async def peek_messages( if self.key_encryption_key or self.key_resolver_function: modify_user_agent_for_encryption( - self._config.user_agent_policy.user_agent, self._sdk_moniker, self.encryption_version, kwargs + self._config.user_agent_policy.user_agent, + self._sdk_moniker, + self.encryption_version, + kwargs, ) self._message_decode_policy.configure( diff --git a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py index dfb65bfccd3a..bd694412a038 100644 --- a/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py +++ b/sdk/storage/azure-storage-queue/azure/storage/queue/aio/_queue_service_client_async.py @@ -19,15 +19,27 @@ from .._encryption import StorageEncryptionMixin from .._generated.aio import AzureQueueStorage from .._generated.models import KeyInfo, StorageServiceProperties -from .._models import CorsRule, QueueProperties, service_properties_deserialize, service_stats_deserialize +from .._models import ( + CorsRule, + QueueProperties, + service_properties_deserialize, + service_stats_deserialize, +) from .._queue_service_client_helpers import _parse_url from .._serialize import get_api_version from .._shared.base_client import StorageAccountHostsMixin -from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper, parse_connection_str +from .._shared.base_client_async import ( + AsyncStorageAccountHostsMixin, + AsyncTransportWrapper, + parse_connection_str, +) from .._shared.models import LocationMode from .._shared.parser import _to_utc_datetime from .._shared.policies_async import ExponentialRetry -from .._shared.response_handlers import parse_to_internal_user_delegation_key, process_storage_error +from .._shared.response_handlers import ( + parse_to_internal_user_delegation_key, + process_storage_error, +) if TYPE_CHECKING: from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential @@ -95,7 +107,13 @@ def __init__( self, account_url: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "AsyncTokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -116,7 +134,11 @@ def __init__( **kwargs, ) self._client = AzureQueueStorage( - self.url, get_api_version(api_version), base_url=self.url, pipeline=self._pipeline, loop=loop + self.url, + get_api_version(api_version), + base_url=self.url, + pipeline=self._pipeline, + loop=loop, ) self._loop = loop self._configure_encryption(kwargs) @@ -126,7 +148,10 @@ async def __aenter__(self) -> Self: return self async def __aexit__( - self, typ: Optional[type[BaseException]], exc: Optional[BaseException], tb: Optional[TracebackType] + self, + typ: Optional[type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], ) -> None: await self._client.__aexit__(typ, exc, tb) # pylint: disable=specify-parameter-names-in-call @@ -154,7 +179,13 @@ def from_connection_string( cls, conn_str: str, credential: Optional[ - Union[str, Dict[str, str], "AzureNamedKeyCredential", "AzureSasCredential", "AsyncTokenCredential"] + Union[ + str, + Dict[str, str], + "AzureNamedKeyCredential", + "AzureSasCredential", + "AsyncTokenCredential", + ] ] = None, *, api_version: Optional[str] = None, @@ -425,7 +456,12 @@ def list_queues( @distributed_trace_async async def create_queue( - self, name: str, metadata: Optional[Dict[str, str]] = None, *, timeout: Optional[int] = None, **kwargs: Any + self, + name: str, + metadata: Optional[Dict[str, str]] = None, + *, + timeout: Optional[int] = None, + **kwargs: Any, ) -> QueueClient: """Creates a new queue under the specified account. @@ -458,7 +494,11 @@ async def create_queue( @distributed_trace_async async def delete_queue( - self, queue: Union["QueueProperties", str], *, timeout: Optional[int] = None, **kwargs: Any + self, + queue: Union["QueueProperties", str], + *, + timeout: Optional[int] = None, + **kwargs: Any, ) -> None: """Deletes the specified queue and any messages it contains. diff --git a/sdk/storage/azure-storage-queue/samples/queue_samples_authentication.py b/sdk/storage/azure-storage-queue/samples/queue_samples_authentication.py index 6219caf5436b..0b0790576860 100644 --- a/sdk/storage/azure-storage-queue/samples/queue_samples_authentication.py +++ b/sdk/storage/azure-storage-queue/samples/queue_samples_authentication.py @@ -25,7 +25,6 @@ 4) STORAGE_ACCOUNT_KEY - the storage account access key """ - from datetime import datetime, timedelta import os import sys @@ -119,7 +118,11 @@ def authentication_by_shared_access_signature(self): queue_service = QueueServiceClient.from_connection_string(conn_str=self.connection_string) # Create a SAS token to use for authentication of a client - from azure.storage.queue import generate_account_sas, ResourceTypes, AccountSasPermissions + from azure.storage.queue import ( + generate_account_sas, + ResourceTypes, + AccountSasPermissions, + ) sas_token = generate_account_sas( self.account_name, diff --git a/sdk/storage/azure-storage-queue/samples/queue_samples_authentication_async.py b/sdk/storage/azure-storage-queue/samples/queue_samples_authentication_async.py index c109df896b73..1948ba8a4478 100644 --- a/sdk/storage/azure-storage-queue/samples/queue_samples_authentication_async.py +++ b/sdk/storage/azure-storage-queue/samples/queue_samples_authentication_async.py @@ -25,7 +25,6 @@ 4) STORAGE_ACCOUNT_KEY - the storage account access key """ - from datetime import datetime, timedelta import asyncio import os @@ -122,7 +121,11 @@ async def authentication_by_shared_access_signature_async(self): queue_service = QueueServiceClient.from_connection_string(conn_str=self.connection_string) # Create a SAS token to use for authentication of a client - from azure.storage.queue import generate_account_sas, ResourceTypes, AccountSasPermissions + from azure.storage.queue import ( + generate_account_sas, + ResourceTypes, + AccountSasPermissions, + ) sas_token = generate_account_sas( self.account_name, diff --git a/sdk/storage/azure-storage-queue/samples/queue_samples_hello_world_async.py b/sdk/storage/azure-storage-queue/samples/queue_samples_hello_world_async.py index 46803f90cda5..dc7d1dee4342 100644 --- a/sdk/storage/azure-storage-queue/samples/queue_samples_hello_world_async.py +++ b/sdk/storage/azure-storage-queue/samples/queue_samples_hello_world_async.py @@ -20,7 +20,6 @@ 1) STORAGE_CONNECTION_STRING - the connection string to your storage account """ - import asyncio import os import sys @@ -71,7 +70,8 @@ async def queue_and_messages_example_async(self): try: # Send messages await asyncio.gather( - queue.send_message("I'm using queues!"), queue.send_message("This is my second message") + queue.send_message("I'm using queues!"), + queue.send_message("This is my second message"), ) # Receive the messages diff --git a/sdk/storage/azure-storage-queue/samples/queue_samples_message.py b/sdk/storage/azure-storage-queue/samples/queue_samples_message.py index 1320ab463224..946f64e8e321 100644 --- a/sdk/storage/azure-storage-queue/samples/queue_samples_message.py +++ b/sdk/storage/azure-storage-queue/samples/queue_samples_message.py @@ -22,7 +22,6 @@ 1) STORAGE_CONNECTION_STRING - the connection string to your storage account """ - from datetime import datetime, timedelta import os import sys @@ -72,7 +71,10 @@ def set_access_policy(self): from azure.storage.queue import generate_queue_sas sas_token = generate_queue_sas( - queue.account_name, queue.queue_name, queue.credential.account_key, policy_id="my-access-policy-id" + queue.account_name, + queue.queue_name, + queue.credential.account_key, + policy_id="my-access-policy-id", ) # [END queue_client_sas_token] @@ -333,7 +335,10 @@ def update_message(self): # Update the message list_result = next(messages) message = queue.update_message( - list_result.id, pop_receipt=list_result.pop_receipt, visibility_timeout=0, content="updated" + list_result.id, + pop_receipt=list_result.pop_receipt, + visibility_timeout=0, + content="updated", ) # [END update_message] diff --git a/sdk/storage/azure-storage-queue/samples/queue_samples_message_async.py b/sdk/storage/azure-storage-queue/samples/queue_samples_message_async.py index 1fa4ac924b99..d04393a6d3b1 100644 --- a/sdk/storage/azure-storage-queue/samples/queue_samples_message_async.py +++ b/sdk/storage/azure-storage-queue/samples/queue_samples_message_async.py @@ -72,7 +72,10 @@ async def set_access_policy_async(self): from azure.storage.queue import generate_queue_sas sas_token = generate_queue_sas( - queue.account_name, queue.queue_name, queue.credential.account_key, policy_id="my-access-policy-id" + queue.account_name, + queue.queue_name, + queue.credential.account_key, + policy_id="my-access-policy-id", ) # Authenticate with the sas token @@ -180,7 +183,9 @@ async def receive_one_message_from_queue(self): try: await asyncio.gather( - queue.send_message("message1"), queue.send_message("message2"), queue.send_message("message3") + queue.send_message("message1"), + queue.send_message("message2"), + queue.send_message("message3"), ) # [START receive_one_message] diff --git a/sdk/storage/azure-storage-queue/samples/queue_samples_service.py b/sdk/storage/azure-storage-queue/samples/queue_samples_service.py index b537470edb93..e34a1afc7724 100644 --- a/sdk/storage/azure-storage-queue/samples/queue_samples_service.py +++ b/sdk/storage/azure-storage-queue/samples/queue_samples_service.py @@ -40,17 +40,31 @@ def queue_service_properties(self): # [START set_queue_service_properties] # Create service properties - from azure.storage.queue import QueueAnalyticsLogging, Metrics, CorsRule, RetentionPolicy + from azure.storage.queue import ( + QueueAnalyticsLogging, + Metrics, + CorsRule, + RetentionPolicy, + ) # Create logging settings logging = QueueAnalyticsLogging( - read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5) + read=True, + write=True, + delete=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) # Create metrics for requests statistics - hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5)) + hour_metrics = Metrics( + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=5), + ) minute_metrics = Metrics( - enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) # Create CORS rules @@ -58,8 +72,18 @@ def queue_service_properties(self): allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"] allowed_methods = ["GET", "PUT"] max_age_in_seconds = 500 - exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"] - allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"] + exposed_headers = [ + "x-ms-meta-data*", + "x-ms-meta-source*", + "x-ms-meta-abc", + "x-ms-meta-bcd", + ] + allowed_headers = [ + "x-ms-meta-data*", + "x-ms-meta-target*", + "x-ms-meta-xyz", + "x-ms-meta-foo", + ] cors_rule2 = CorsRule( allowed_origins, allowed_methods, diff --git a/sdk/storage/azure-storage-queue/samples/queue_samples_service_async.py b/sdk/storage/azure-storage-queue/samples/queue_samples_service_async.py index 7b6d669fa6f5..79296c6d6545 100644 --- a/sdk/storage/azure-storage-queue/samples/queue_samples_service_async.py +++ b/sdk/storage/azure-storage-queue/samples/queue_samples_service_async.py @@ -20,7 +20,6 @@ 1) STORAGE_CONNECTION_STRING - the connection string to your storage account """ - import asyncio import os import sys @@ -43,19 +42,31 @@ async def queue_service_properties_async(self): async with queue_service: # [START async_set_queue_service_properties] # Create service properties - from azure.storage.queue import QueueAnalyticsLogging, Metrics, CorsRule, RetentionPolicy + from azure.storage.queue import ( + QueueAnalyticsLogging, + Metrics, + CorsRule, + RetentionPolicy, + ) # Create logging settings logging = QueueAnalyticsLogging( - read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5) + read=True, + write=True, + delete=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) # Create metrics for requests statistics hour_metrics = Metrics( - enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) minute_metrics = Metrics( - enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) # Create CORS rules @@ -63,8 +74,18 @@ async def queue_service_properties_async(self): allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"] allowed_methods = ["GET", "PUT"] max_age_in_seconds = 500 - exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"] - allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"] + exposed_headers = [ + "x-ms-meta-data*", + "x-ms-meta-source*", + "x-ms-meta-abc", + "x-ms-meta-bcd", + ] + allowed_headers = [ + "x-ms-meta-data*", + "x-ms-meta-target*", + "x-ms-meta-xyz", + "x-ms-meta-foo", + ] cors_rule2 = CorsRule( allowed_origins, allowed_methods, diff --git a/sdk/storage/azure-storage-queue/setup.py b/sdk/storage/azure-storage-queue/setup.py index ac028b43a7a5..a3791f2a8895 100644 --- a/sdk/storage/azure-storage-queue/setup.py +++ b/sdk/storage/azure-storage-queue/setup.py @@ -69,7 +69,12 @@ ] ), python_requires=">=3.9", - install_requires=["azure-core>=1.37.0", "cryptography>=2.1.4", "typing-extensions>=4.6.0", "isodate>=0.6.1"], + install_requires=[ + "azure-core>=1.37.0", + "cryptography>=2.1.4", + "typing-extensions>=4.6.0", + "isodate>=0.6.1", + ], extras_require={ "aio": [ "azure-core[aio]>=1.37.0", diff --git a/sdk/storage/azure-storage-queue/tests/conftest.py b/sdk/storage/azure-storage-queue/tests/conftest.py index ca09905c2ca6..8a80ea7d6c86 100644 --- a/sdk/storage/azure-storage-queue/tests/conftest.py +++ b/sdk/storage/azure-storage-queue/tests/conftest.py @@ -14,6 +14,7 @@ add_header_regex_sanitizer, add_oauth_response_sanitizer, add_uri_string_sanitizer, + add_uri_regex_sanitizer, test_proxy, ) @@ -30,6 +31,13 @@ def add_sanitizers(test_proxy): add_header_regex_sanitizer(key="x-ms-copy-source-authorization", value="Sanitized") add_header_regex_sanitizer(key="x-ms-encryption-key", value="Sanitized") - add_general_regex_sanitizer(regex=r'"EncryptionLibrary": "Python .*?"', value='"EncryptionLibrary": "Python x.x.x"') + add_general_regex_sanitizer( + regex=r'"EncryptionLibrary": "Python .*?"', + value='"EncryptionLibrary": "Python x.x.x"', + ) add_uri_string_sanitizer(target=".preprod.", value=".") + add_uri_regex_sanitizer( + regex=r"(?<=[?&]sktid=)[^&#]+", + value="00000000-0000-0000-0000-000000000000", + ) diff --git a/sdk/storage/azure-storage-queue/tests/encryption_test_helper.py b/sdk/storage/azure-storage-queue/tests/encryption_test_helper.py index d54df3de963b..7d657b882dd5 100644 --- a/sdk/storage/azure-storage-queue/tests/encryption_test_helper.py +++ b/sdk/storage/azure-storage-queue/tests/encryption_test_helper.py @@ -62,7 +62,8 @@ def __init__(self, kid="local:key2"): def wrap_key(self, key, algorithm="RSA"): if algorithm == "RSA": return self.public_key.encrypt( - key, OAEP(mgf=MGF1(algorithm=SHA1()), algorithm=SHA1(), label=None) # nosec # nosec + key, + OAEP(mgf=MGF1(algorithm=SHA1()), algorithm=SHA1(), label=None), # nosec # nosec ) raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) @@ -70,7 +71,8 @@ def wrap_key(self, key, algorithm="RSA"): def unwrap_key(self, key, algorithm): if algorithm == "RSA": return self.private_key.decrypt( - key, OAEP(mgf=MGF1(algorithm=SHA1()), algorithm=SHA1(), label=None) # nosec # nosec + key, + OAEP(mgf=MGF1(algorithm=SHA1()), algorithm=SHA1(), label=None), # nosec # nosec ) raise ValueError(_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM) diff --git a/sdk/storage/azure-storage-queue/tests/test_queue.py b/sdk/storage/azure-storage-queue/tests/test_queue.py index b010159d7490..7f956db3f17e 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue.py @@ -215,7 +215,11 @@ def test_list_queues_with_metadata(self, **kwargs): queue.set_queue_metadata(metadata={"val1": "test", "val2": "blah"}) listed_queue = list( - qsc.list_queues(name_starts_with=queue.queue_name, results_per_page=1, include_metadata=True) + qsc.list_queues( + name_starts_with=queue.queue_name, + results_per_page=1, + include_metadata=True, + ) )[0] # Asserts @@ -775,7 +779,10 @@ def test_update_message_content(self, **kwargs): messages = queue_client.receive_messages() list_result1 = next(messages) message = queue_client.update_message( - list_result1.id, pop_receipt=list_result1.pop_receipt, visibility_timeout=0, content="new text" + list_result1.id, + pop_receipt=list_result1.pop_receipt, + visibility_timeout=0, + content="new text", ) list_result2 = next(messages) @@ -863,7 +870,8 @@ def test_account_sas_raises_if_sas_already_in_uri(self, **kwargs): with pytest.raises(ValueError): QueueServiceClient( - self.account_url(storage_account_name, "queue") + "?sig=foo", credential=AzureSasCredential("?foo=bar") + self.account_url(storage_account_name, "queue") + "?sig=foo", + credential=AzureSasCredential("?foo=bar"), ) @pytest.mark.live_test_only @@ -1205,7 +1213,11 @@ def test_set_queue_acl_with_signed_identifiers(self, **kwargs): # Act expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow() - timedelta(minutes=5)) - access_policy = AccessPolicy(permission=QueueSasPermissions(read=True), expiry=expiry_time, start=start_time) + access_policy = AccessPolicy( + permission=QueueSasPermissions(read=True), + expiry=expiry_time, + start=start_time, + ) identifiers = {"testid": access_policy} resp = queue_client.set_queue_access_policy(signed_identifiers=identifiers) @@ -1333,7 +1345,9 @@ def test_transport_closed_only_once(self, **kwargs): prefix = TEST_QUEUE_PREFIX queue_name = self.get_resource_name(prefix) with QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret, transport=transport + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, + transport=transport, ) as qsc: qsc.get_service_properties() assert transport.session is not None @@ -1392,7 +1406,11 @@ def test_storage_account_audience_queue_client(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - queue = QueueClient(self.account_url(storage_account_name, "queue"), "testqueue1", storage_account_key.secret) + queue = QueueClient( + self.account_url(storage_account_name, "queue"), + "testqueue1", + storage_account_key.secret, + ) queue.create_queue() # Act @@ -1415,7 +1433,11 @@ def test_bad_audience_queue_client(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") # Arrange - queue = QueueClient(self.account_url(storage_account_name, "queue"), "testqueue2", storage_account_key.secret) + queue = QueueClient( + self.account_url(storage_account_name, "queue"), + "testqueue2", + storage_account_key.secret, + ) queue.create_queue() # Act diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_api_version.py b/sdk/storage/azure-storage-queue/tests/test_queue_api_version.py index 2afdab424c7c..61eed578c786 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_api_version.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_api_version.py @@ -10,7 +10,6 @@ from devtools_testutils.storage import StorageRecordedTestCase - # ------------------------------------------------------------------------------ @@ -31,7 +30,9 @@ def test_service_client_api_version_property(self): service_client.api_version = "foo" service_client = QueueServiceClient( - "https://foo.queue.core.windows.net/account", credential="fake_key", api_version=self.api_version_1 + "https://foo.queue.core.windows.net/account", + credential="fake_key", + api_version=self.api_version_1, ) assert service_client.api_version == self.api_version_1 assert service_client._client._config.version == self.api_version_1 @@ -51,7 +52,11 @@ def test_queue_client_api_version_property(self): assert queue_client.api_version == self.api_version_1 assert queue_client._client._config.version == self.api_version_1 - queue_client = QueueClient("https://foo.queue.core.windows.net/account", "queue_name", credential="fake_key") + queue_client = QueueClient( + "https://foo.queue.core.windows.net/account", + "queue_name", + credential="fake_key", + ) assert queue_client.api_version == self.api_version_2 assert queue_client._client._config.version == self.api_version_2 diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_api_version_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_api_version_async.py index 3f3dc8ad6a5a..2bae873c98e3 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_api_version_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_api_version_async.py @@ -10,7 +10,6 @@ from devtools_testutils.storage.aio import AsyncStorageRecordedTestCase - # ------------------------------------------------------------------------------ @@ -31,7 +30,9 @@ def test_service_client_api_version_property(self): service_client.api_version = "foo" service_client = QueueServiceClient( - "https://foo.queue.core.windows.net/account", credential="fake_key", api_version=self.api_version_1 + "https://foo.queue.core.windows.net/account", + credential="fake_key", + api_version=self.api_version_1, ) assert service_client.api_version == self.api_version_1 assert service_client._client._config.version == self.api_version_1 @@ -51,7 +52,11 @@ def test_queue_client_api_version_property(self): assert queue_client.api_version == self.api_version_1 assert queue_client._client._config.version == self.api_version_1 - queue_client = QueueClient("https://foo.queue.core.windows.net/account", "queue_name", credential="fake_key") + queue_client = QueueClient( + "https://foo.queue.core.windows.net/account", + "queue_name", + credential="fake_key", + ) assert queue_client.api_version == self.api_version_2 assert queue_client._client._config.version == self.api_version_2 diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_async.py index c79124c5ab6f..8878f8c401b6 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_async.py @@ -780,7 +780,10 @@ async def test_update_message_content(self, **kwargs): messages.append(m) list_result1 = messages[0] message = await queue_client.update_message( - list_result1.id, pop_receipt=list_result1.pop_receipt, visibility_timeout=0, content="new text" + list_result1.id, + pop_receipt=list_result1.pop_receipt, + visibility_timeout=0, + content="new text", ) assert "new text" == message.content @@ -872,7 +875,8 @@ async def test_account_sas_raises_if_sas_already_in_uri(self, **kwargs): with pytest.raises(ValueError): QueueServiceClient( - self.account_url(storage_account_name, "queue") + "?sig=foo", credential=AzureSasCredential("?foo=bar") + self.account_url(storage_account_name, "queue") + "?sig=foo", + credential=AzureSasCredential("?foo=bar"), ) @pytest.mark.live_test_only @@ -1221,7 +1225,11 @@ async def test_set_queue_acl_with_signed_identifiers(self, **kwargs): # Act expiry_time = self.get_datetime_variable(variables, "expiry_time", datetime.utcnow() + timedelta(hours=1)) start_time = self.get_datetime_variable(variables, "start_time", datetime.utcnow() - timedelta(minutes=5)) - access_policy = AccessPolicy(permission=QueueSasPermissions(read=True), expiry=expiry_time, start=start_time) + access_policy = AccessPolicy( + permission=QueueSasPermissions(read=True), + expiry=expiry_time, + start=start_time, + ) identifiers = {"testid": access_policy} resp = await queue_client.set_queue_access_policy(signed_identifiers=identifiers) @@ -1349,7 +1357,9 @@ async def test_transport_closed_only_once(self, **kwargs): prefix = TEST_QUEUE_PREFIX queue_name = self.get_resource_name(prefix) async with QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret, transport=transport + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, + transport=transport, ) as qsc: await qsc.get_service_properties() assert transport.session is not None @@ -1409,7 +1419,11 @@ async def test_storage_account_audience_queue_client(self, **kwargs): # Arrange queue_name = self.get_resource_name(TEST_QUEUE_PREFIX) - queue = QueueClient(self.account_url(storage_account_name, "queue"), queue_name, storage_account_key.secret) + queue = QueueClient( + self.account_url(storage_account_name, "queue"), + queue_name, + storage_account_key.secret, + ) await queue.create_queue() # Act @@ -1433,7 +1447,11 @@ async def test_bad_audience_queue_client(self, **kwargs): # Arrange queue_name = self.get_resource_name(TEST_QUEUE_PREFIX) - queue = QueueClient(self.account_url(storage_account_name, "queue"), queue_name, storage_account_key.secret) + queue = QueueClient( + self.account_url(storage_account_name, "queue"), + queue_name, + storage_account_key.secret, + ) await queue.create_queue() # Act diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_client.py b/sdk/storage/azure-storage-queue/tests/test_queue_client.py index eefb88c6ce13..d7359bef3fc0 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_client.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_client.py @@ -11,12 +11,16 @@ from azure.storage.queue import ( AccountSasPermissions, generate_account_sas, + LocationMode, QueueClient, QueueServiceClient, ResourceTypes, VERSION, ) -from azure.storage.queue._shared.parser import DEVSTORE_ACCOUNT_KEY, DEVSTORE_ACCOUNT_NAME +from azure.storage.queue._shared.parser import ( + DEVSTORE_ACCOUNT_KEY, + DEVSTORE_ACCOUNT_NAME, +) from devtools_testutils import recorded_by_proxy from devtools_testutils.storage import StorageRecordedTestCase @@ -47,6 +51,15 @@ def validate_standard_account_endpoints(self, service, url_type, account_name, a assert f"{account_name}.{url_type}.core.windows.net" in service.url assert f"{account_name}-secondary.{url_type}.core.windows.net" in service.secondary_endpoint + def validate_ipv6_account_endpoints(self, service, account_name, account_key, primary_endpoint, secondary_endpoint): + assert service is not None + assert service.scheme == "https" + assert service.account_name == account_name + assert service.credential.account_name == account_name + assert service.credential.account_key == account_key + assert service._hosts[LocationMode.PRIMARY] == primary_endpoint + assert service._hosts[LocationMode.SECONDARY] == secondary_endpoint + def generate_fake_sas_token(self): fake_key = "a" * 30 + "b" * 30 @@ -70,7 +83,9 @@ def test_create_service_with_key(self, **kwargs): for client, url in SERVICES.items(): # Act service = client( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret, queue_name="foo" + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, + queue_name="foo", ) # Assert @@ -85,7 +100,8 @@ def test_create_service_with_connection_string(self, **kwargs): for service_type in SERVICES.items(): # Act service = service_type[0].from_connection_string( - self.connection_string(storage_account_name, storage_account_key.secret), queue_name="test" + self.connection_string(storage_account_name, storage_account_key.secret), + queue_name="test", ) # Assert @@ -104,7 +120,9 @@ def test_create_service_with_sas(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "queue"), credential=self.sas_token, queue_name="foo" + self.account_url(storage_account_name, "queue"), + credential=self.sas_token, + queue_name="foo", ) # Assert @@ -123,7 +141,9 @@ def test_create_service_with_token(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "queue"), credential=self.token_credential, queue_name="foo" + self.account_url(storage_account_name, "queue"), + credential=self.token_credential, + queue_name="foo", ) # Assert @@ -219,7 +239,9 @@ def test_create_service_with_socket_timeout(self, **kwargs): for service_type in SERVICES.items(): # Act default_service = service_type[0]( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret, queue_name="foo" + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, + queue_name="foo", ) service = service_type[0]( self.account_url(storage_account_name, "queue"), @@ -235,6 +257,106 @@ def test_create_service_with_socket_timeout(self, **kwargs): assert service._client._client._pipeline._transport.connection_config.timeout == 22 assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + @pytest.mark.parametrize( + "account_url, expected_primary, expected_secondary", + [ + ( + "https://myaccount.queue.core.windows.net/", + "myaccount.queue.core.windows.net", + "myaccount-secondary.queue.core.windows.net", + ), + ( + "https://myaccount-secondary.queue.core.windows.net/", + "myaccount-secondary.queue.core.windows.net", + "myaccount-secondary.queue.core.windows.net", + ), + ( + "https://myaccount-dualstack.queue.core.windows.net/", + "myaccount-dualstack.queue.core.windows.net", + "myaccount-secondary-dualstack.queue.core.windows.net", + ), + ( + "https://myaccount-ipv6.queue.core.windows.net/", + "myaccount-ipv6.queue.core.windows.net", + "myaccount-secondary-ipv6.queue.core.windows.net", + ), + ( + "https://myaccount-secondary-dualstack.queue.core.windows.net/", + "myaccount-secondary-dualstack.queue.core.windows.net", + "myaccount-secondary-dualstack.queue.core.windows.net", + ), + ( + "https://myaccount-secondary-ipv6.queue.core.windows.net/", + "myaccount-secondary-ipv6.queue.core.windows.net", + "myaccount-secondary-ipv6.queue.core.windows.net", + ), + ], + ) + @QueuePreparer() + def test_create_service_ipv6(self, account_url, expected_primary, expected_secondary, **kwargs): + storage_account_name = "myaccount" + storage_account_key = kwargs.pop("storage_account_key") + + queue_name = "queue" + + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=storage_account_key.secret, + queue_name=queue_name, + ) + self.validate_ipv6_account_endpoints( + service, + storage_account_name, + storage_account_key.secret, + expected_primary, + expected_secondary, + ) + + conn_str = ( + "DefaultEndpointsProtocol=https;" + f"AccountName={storage_account_name};" + f"AccountKey={storage_account_key.secret};" + f"QueueEndpoint={account_url};" + ) + service = service_type.from_connection_string( + conn_str, credential=storage_account_key.secret, queue_name=queue_name + ) + self.validate_ipv6_account_endpoints( + service, + storage_account_name, + storage_account_key.secret, + expected_primary, + expected_secondary, + ) + + service = QueueClient.from_queue_url( + queue_url=f"{account_url}/{queue_name}-secondary", + credential=storage_account_key.secret, + ) + self.validate_ipv6_account_endpoints( + service, + storage_account_name, + storage_account_key.secret, + expected_primary, + expected_secondary, + ) + + @QueuePreparer() + def test_create_service_ipv6_custom_domain(self): + token_credential = self.get_credential(QueueServiceClient) + + hostname = "github.com" + account_url = f"https://{hostname}" + for service_type in SERVICES.keys(): + service = service_type(account_url, credential=token_credential, queue_name="foo") + assert service is not None + assert service.scheme == "https" + assert service.account_name is None + assert service.credential is not None + assert service._hosts[LocationMode.PRIMARY] == hostname + assert service._hosts[LocationMode.SECONDARY] == "" + # --Connection String Test Cases -------------------------------------------- @QueuePreparer() def test_create_service_with_connection_string_key(self, **kwargs): @@ -497,7 +619,8 @@ def test_request_callback_signed_header(self, **kwargs): # Arrange service = QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, ) name = self.get_resource_name("cont") @@ -520,7 +643,8 @@ def test_response_callback(self, **kwargs): # Arrange service = QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, ) name = self.get_resource_name("cont") queue = service.get_queue_client(name) @@ -541,7 +665,8 @@ def test_user_agent_default(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") service = QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, ) def callback(response): @@ -589,7 +714,8 @@ def test_user_agent_append(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") service = QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, ) def callback(response): @@ -617,7 +743,16 @@ def test_create_queue_client_with_complete_queue_url(self, **kwargs): def test_error_with_malformed_conn_str(self): # Arrange - for conn_str in ["", "foobar", "foobar=baz=foo", "foo;bar;baz", "foo=;bar=;", "=", ";", "=;=="]: + for conn_str in [ + "", + "foobar", + "foobar=baz=foo", + "foo;bar;baz", + "foo=;bar=;", + "=", + ";", + "=;==", + ]: for service_type in SERVICES.items(): # Act with pytest.raises(ValueError) as e: diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py index 1a2366e8dc41..5b942028a125 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_client_async.py @@ -8,8 +8,17 @@ from datetime import datetime, timedelta import pytest -from azure.storage.queue import AccountSasPermissions, generate_account_sas, ResourceTypes, VERSION -from azure.storage.queue._shared.parser import DEVSTORE_ACCOUNT_KEY, DEVSTORE_ACCOUNT_NAME +from azure.storage.queue import ( + AccountSasPermissions, + generate_account_sas, + LocationMode, + ResourceTypes, + VERSION, +) +from azure.storage.queue._shared.parser import ( + DEVSTORE_ACCOUNT_KEY, + DEVSTORE_ACCOUNT_NAME, +) from azure.storage.queue.aio import QueueClient, QueueServiceClient from devtools_testutils.aio import recorded_by_proxy_async @@ -40,6 +49,15 @@ def validate_standard_account_endpoints(self, service, url_type, storage_account assert f"{storage_account_name}.{url_type}.core.windows.net" in service.url assert f"{storage_account_name}-secondary.{url_type}.core.windows.net" in service.secondary_endpoint + def validate_ipv6_account_endpoints(self, service, account_name, account_key, primary_endpoint, secondary_endpoint): + assert service is not None + assert service.scheme == "https" + assert service.account_name == account_name + assert service.credential.account_name == account_name + assert service.credential.account_key == account_key + assert service._hosts[LocationMode.PRIMARY] == primary_endpoint + assert service._hosts[LocationMode.SECONDARY] == secondary_endpoint + def generate_fake_sas_token(self): fake_key = "a" * 30 + "b" * 30 @@ -63,7 +81,9 @@ def test_create_service_with_key(self, **kwargs): for client, url in SERVICES.items(): # Act service = client( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret, queue_name="foo" + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, + queue_name="foo", ) # Assert @@ -78,7 +98,8 @@ def test_create_service_with_connection_string(self, **kwargs): for service_type in SERVICES.items(): # Act service = service_type[0].from_connection_string( - self.connection_string(storage_account_name, storage_account_key.secret), queue_name="test" + self.connection_string(storage_account_name, storage_account_key.secret), + queue_name="test", ) # Assert @@ -97,7 +118,9 @@ def test_create_service_with_sas(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "queue"), credential=self.sas_token, queue_name="foo" + self.account_url(storage_account_name, "queue"), + credential=self.sas_token, + queue_name="foo", ) # Assert @@ -115,7 +138,9 @@ async def test_create_service_with_token(self, **kwargs): for service_type in SERVICES: # Act service = service_type( - self.account_url(storage_account_name, "queue"), credential=self.token_credential, queue_name="foo" + self.account_url(storage_account_name, "queue"), + credential=self.token_credential, + queue_name="foo", ) # Assert @@ -210,7 +235,9 @@ def test_create_service_with_socket_timeout(self, **kwargs): for service_type in SERVICES.items(): # Act default_service = service_type[0]( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret, queue_name="foo" + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, + queue_name="foo", ) service = service_type[0]( self.account_url(storage_account_name, "queue"), @@ -226,6 +253,106 @@ def test_create_service_with_socket_timeout(self, **kwargs): assert service._client._client._pipeline._transport.connection_config.timeout == 22 assert default_service._client._client._pipeline._transport.connection_config.timeout in [20, (20, 2000)] + @pytest.mark.parametrize( + "account_url, expected_primary, expected_secondary", + [ + ( + "https://myaccount.queue.core.windows.net/", + "myaccount.queue.core.windows.net", + "myaccount-secondary.queue.core.windows.net", + ), + ( + "https://myaccount-secondary.queue.core.windows.net/", + "myaccount-secondary.queue.core.windows.net", + "myaccount-secondary.queue.core.windows.net", + ), + ( + "https://myaccount-dualstack.queue.core.windows.net/", + "myaccount-dualstack.queue.core.windows.net", + "myaccount-secondary-dualstack.queue.core.windows.net", + ), + ( + "https://myaccount-ipv6.queue.core.windows.net/", + "myaccount-ipv6.queue.core.windows.net", + "myaccount-secondary-ipv6.queue.core.windows.net", + ), + ( + "https://myaccount-secondary-dualstack.queue.core.windows.net/", + "myaccount-secondary-dualstack.queue.core.windows.net", + "myaccount-secondary-dualstack.queue.core.windows.net", + ), + ( + "https://myaccount-secondary-ipv6.queue.core.windows.net/", + "myaccount-secondary-ipv6.queue.core.windows.net", + "myaccount-secondary-ipv6.queue.core.windows.net", + ), + ], + ) + @QueuePreparer() + def test_create_service_ipv6(self, account_url, expected_primary, expected_secondary, **kwargs): + storage_account_name = "myaccount" + storage_account_key = kwargs.pop("storage_account_key") + + queue_name = "queue" + + for service_type in SERVICES.keys(): + service = service_type( + account_url, + credential=storage_account_key.secret, + queue_name=queue_name, + ) + self.validate_ipv6_account_endpoints( + service, + storage_account_name, + storage_account_key.secret, + expected_primary, + expected_secondary, + ) + + conn_str = ( + "DefaultEndpointsProtocol=https;" + f"AccountName={storage_account_name};" + f"AccountKey={storage_account_key.secret};" + f"QueueEndpoint={account_url};" + ) + service = service_type.from_connection_string( + conn_str, credential=storage_account_key.secret, queue_name=queue_name + ) + self.validate_ipv6_account_endpoints( + service, + storage_account_name, + storage_account_key.secret, + expected_primary, + expected_secondary, + ) + + service = QueueClient.from_queue_url( + queue_url=f"{account_url}/{queue_name}-secondary", + credential=storage_account_key.secret, + ) + self.validate_ipv6_account_endpoints( + service, + storage_account_name, + storage_account_key.secret, + expected_primary, + expected_secondary, + ) + + @QueuePreparer() + def test_create_service_ipv6_custom_domain(self): + token_credential = self.get_credential(QueueServiceClient, is_async=True) + + hostname = "github.com" + account_url = f"https://{hostname}" + for service_type in SERVICES.keys(): + service = service_type(account_url, credential=token_credential, queue_name="foo") + assert service is not None + assert service.scheme == "https" + assert service.account_name is None + assert service.credential is not None + assert service._hosts[LocationMode.PRIMARY] == hostname + assert service._hosts[LocationMode.SECONDARY] == "" + # --Connection String Test Cases -------------------------------------------- @QueuePreparer() def test_create_service_with_connection_string_key(self, **kwargs): @@ -484,7 +611,8 @@ async def test_request_callback_signed_header(self, **kwargs): # Arrange service = QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, ) name = self.get_resource_name("cont") @@ -508,7 +636,8 @@ async def test_response_callback(self, **kwargs): # Arrange service = QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, ) name = self.get_resource_name("cont") queue = service.get_queue_client(name) @@ -528,7 +657,8 @@ async def test_user_agent_default(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") service = QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, ) def callback(response): @@ -576,7 +706,8 @@ async def test_user_agent_append(self, **kwargs): storage_account_key = kwargs.pop("storage_account_key") service = QueueServiceClient( - self.account_url(storage_account_name, "queue"), credential=storage_account_key.secret + self.account_url(storage_account_name, "queue"), + credential=storage_account_key.secret, ) def callback(response): diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_encryption.py b/sdk/storage/azure-storage-queue/tests/test_queue_encryption.py index 9aab6293c2f7..08feb2090119 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_encryption.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_encryption.py @@ -41,7 +41,6 @@ from encryption_test_helper import KeyResolver, KeyWrapper, mock_urandom, RSAKeyWrapper from settings.testcase import QueuePreparer - # ------------------------------------------------------------------------------ TEST_QUEUE_PREFIX = "encryptionqueue" # ------------------------------------------------------------------------------ @@ -204,7 +203,9 @@ def test_update_encrypted_binary_message(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) queue = self._create_queue( - qsc, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy() + qsc, + message_encode_policy=BinaryBase64EncodePolicy(), + message_decode_policy=BinaryBase64DecodePolicy(), ) queue.key_encryption_key = KeyWrapper("key1") @@ -438,7 +439,8 @@ def test_validate_encryption(self, **kwargs): message = message["EncryptedMessageContents"] content_encryption_key = kek.unwrap_key( - encryption_data.wrapped_content_key.encrypted_key, encryption_data.wrapped_content_key.algorithm + encryption_data.wrapped_content_key.encrypted_key, + encryption_data.wrapped_content_key.algorithm, ) # Create decryption cipher @@ -664,7 +666,9 @@ def test_update_encrypted_binary_message_v2(self, **kwargs): key_encryption_key=KeyWrapper("key1"), ) queue = self._create_queue( - qsc, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy() + qsc, + message_encode_policy=BinaryBase64EncodePolicy(), + message_decode_policy=BinaryBase64DecodePolicy(), ) queue.key_encryption_key = KeyWrapper("key1") diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_encryption_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_encryption_async.py index 05b881c686d8..955c6ebdad7c 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_encryption_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_encryption_async.py @@ -12,7 +12,11 @@ import pytest from azure.core.exceptions import HttpResponseError, ResourceExistsError -from azure.storage.queue import BinaryBase64DecodePolicy, BinaryBase64EncodePolicy, VERSION +from azure.storage.queue import ( + BinaryBase64DecodePolicy, + BinaryBase64EncodePolicy, + VERSION, +) from azure.storage.queue.aio import QueueServiceClient from azure.storage.queue._encryption import ( _dict_to_encryption_data, @@ -206,7 +210,9 @@ async def test_update_encrypted_binary_message(self, **kwargs): qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) # Arrange queue = await self._create_queue( - qsc, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy() + qsc, + message_encode_policy=BinaryBase64EncodePolicy(), + message_decode_policy=BinaryBase64DecodePolicy(), ) queue.key_encryption_key = KeyWrapper("key1") @@ -446,7 +452,8 @@ async def test_validate_encryption(self, **kwargs): message = message["EncryptedMessageContents"] content_encryption_key = kek.unwrap_key( - encryption_data.wrapped_content_key.encrypted_key, encryption_data.wrapped_content_key.algorithm + encryption_data.wrapped_content_key.encrypted_key, + encryption_data.wrapped_content_key.algorithm, ) # Create decryption cipher @@ -676,7 +683,9 @@ async def test_update_encrypted_binary_message_v2(self, **kwargs): key_encryption_key=KeyWrapper("key1"), ) queue = await self._create_queue( - qsc, message_encode_policy=BinaryBase64EncodePolicy(), message_decode_policy=BinaryBase64DecodePolicy() + qsc, + message_encode_policy=BinaryBase64EncodePolicy(), + message_decode_policy=BinaryBase64DecodePolicy(), ) queue.key_encryption_key = KeyWrapper("key1") diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_service_properties.py b/sdk/storage/azure-storage-queue/tests/test_queue_service_properties.py index 6d154ecf46a2..bfb063905112 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_service_properties.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_service_properties.py @@ -9,13 +9,18 @@ import pytest from azure.core.exceptions import HttpResponseError -from azure.storage.queue import CorsRule, Metrics, QueueAnalyticsLogging, QueueServiceClient, RetentionPolicy +from azure.storage.queue import ( + CorsRule, + Metrics, + QueueAnalyticsLogging, + QueueServiceClient, + RetentionPolicy, +) from devtools_testutils import recorded_by_proxy from devtools_testutils.storage import StorageRecordedTestCase from settings.testcase import QueuePreparer - # ------------------------------------------------------------------------------ @@ -106,7 +111,10 @@ def test_queue_service_properties(self, **kwargs): qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) # Act resp = qsc.set_service_properties( - analytics_logging=QueueAnalyticsLogging(), hour_metrics=Metrics(), minute_metrics=Metrics(), cors=[] + analytics_logging=QueueAnalyticsLogging(), + hour_metrics=Metrics(), + minute_metrics=Metrics(), + cors=[], ) # Assert @@ -124,7 +132,10 @@ def test_set_logging(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) logging = QueueAnalyticsLogging( - read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5) + read=True, + write=True, + delete=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) # Act @@ -142,7 +153,11 @@ def test_set_hour_metrics(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) - hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5)) + hour_metrics = Metrics( + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=5), + ) # Act qsc.set_service_properties(hour_metrics=hour_metrics) @@ -160,7 +175,9 @@ def test_set_minute_metrics(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) minute_metrics = Metrics( - enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) # Act @@ -183,8 +200,18 @@ def test_set_cors(self, **kwargs): allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"] allowed_methods = ["GET", "PUT"] max_age_in_seconds = 500 - exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"] - allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"] + exposed_headers = [ + "x-ms-meta-data*", + "x-ms-meta-source*", + "x-ms-meta-abc", + "x-ms-meta-bcd", + ] + allowed_headers = [ + "x-ms-meta-data*", + "x-ms-meta-target*", + "x-ms-meta-xyz", + "x-ms-meta-foo", + ] cors_rule2 = CorsRule( allowed_origins, allowed_methods, @@ -235,7 +262,9 @@ def test_retention_too_long(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) minute_metrics = Metrics( - enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=366) + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=366), ) # Assert diff --git a/sdk/storage/azure-storage-queue/tests/test_queue_service_properties_async.py b/sdk/storage/azure-storage-queue/tests/test_queue_service_properties_async.py index f8bb7e39231e..d461bac0d042 100644 --- a/sdk/storage/azure-storage-queue/tests/test_queue_service_properties_async.py +++ b/sdk/storage/azure-storage-queue/tests/test_queue_service_properties_async.py @@ -7,7 +7,12 @@ import pytest from azure.core.exceptions import HttpResponseError -from azure.storage.queue import CorsRule, Metrics, QueueAnalyticsLogging, RetentionPolicy +from azure.storage.queue import ( + CorsRule, + Metrics, + QueueAnalyticsLogging, + RetentionPolicy, +) from azure.storage.queue.aio import QueueServiceClient from devtools_testutils.aio import recorded_by_proxy_async @@ -103,7 +108,10 @@ async def test_queue_service_properties(self, **kwargs): # Act resp = await qsc.set_service_properties( - analytics_logging=QueueAnalyticsLogging(), hour_metrics=Metrics(), minute_metrics=Metrics(), cors=[] + analytics_logging=QueueAnalyticsLogging(), + hour_metrics=Metrics(), + minute_metrics=Metrics(), + cors=[], ) # Assert @@ -121,7 +129,10 @@ async def test_set_logging(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) logging = QueueAnalyticsLogging( - read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5) + read=True, + write=True, + delete=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) # Act @@ -139,7 +150,11 @@ async def test_set_hour_metrics(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) - hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5)) + hour_metrics = Metrics( + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=5), + ) # Act await qsc.set_service_properties(hour_metrics=hour_metrics) @@ -157,7 +172,9 @@ async def test_set_minute_metrics(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) minute_metrics = Metrics( - enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5) + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=5), ) # Act @@ -180,8 +197,18 @@ async def test_set_cors(self, **kwargs): allowed_origins = ["www.xyz.com", "www.ab.com", "www.bc.com"] allowed_methods = ["GET", "PUT"] max_age_in_seconds = 500 - exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"] - allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"] + exposed_headers = [ + "x-ms-meta-data*", + "x-ms-meta-source*", + "x-ms-meta-abc", + "x-ms-meta-bcd", + ] + allowed_headers = [ + "x-ms-meta-data*", + "x-ms-meta-target*", + "x-ms-meta-xyz", + "x-ms-meta-foo", + ] cors_rule2 = CorsRule( allowed_origins, allowed_methods, @@ -235,7 +262,9 @@ async def test_retention_too_long(self, **kwargs): # Arrange qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key.secret) minute_metrics = Metrics( - enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=366) + enabled=True, + include_apis=True, + retention_policy=RetentionPolicy(enabled=True, days=366), ) # Assert