Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion sdk/storage/azure-storage-blob/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Release History

## 12.30.0b1 (Unreleased)
## 12.30.0b1 (2026-03-26)

### Features Added
- Added support for service version 2026-06-06.
Expand All @@ -12,6 +12,7 @@ for `BlobServiceClient`, `ContainerClient`, and `BlobClient`.
which is optimized to automatically determine the most cost-effective access with no performance impact.
When set, `BlobProperties.smart_access_tier` will reveal the service's current access
tier choice between `Hot`, `Cool`, and `Archive`.
- Added support for `is_directory` keyword in `generate_blob_sas` that generates directory-level SAS for blobs.

### Other Changes
- Consolidated the behavior of `max_concurrency=None` by defaulting to the shared `DEFAULT_MAX_CONCURRENCY` constant.
Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-blob/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/storage/azure-storage-blob",
"Tag": "python/storage/azure-storage-blob_054396a10c"
"Tag": "python/storage/azure-storage-blob_a6eb270886"
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,19 @@ class BlobQueryStringConstants(object):


class BlobSharedAccessSignature(SharedAccessSignature):
'''
"""
Provides a factory for creating blob and container access
signature tokens with a common account name and account key. Users can either
use the factory or can construct the appropriate service and use the
generate_*_shared_access_signature method directly.
'''
generate_*_sas method directly.
"""

def __init__(
self, account_name: str,
account_key: Optional[str] = None,
user_delegation_key: Optional[UserDelegationKey] = None
) -> None:
'''
"""
:param str account_name:
The storage account name used to generate the shared access signatures.
:param Optional[str] account_key:
Expand All @@ -47,7 +47,7 @@ def __init__(
Instead of an account key, the user could pass in a user delegation key.
A user delegation key can be obtained from the service by authenticating with an AAD identity;
this can be accomplished by calling get_user_delegation_key on any Blob service object.
'''
"""
super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION)
self.user_delegation_key = user_delegation_key

Expand All @@ -74,7 +74,7 @@ def generate_blob(
sts_hook: Optional[Callable[[str], None]] = None,
**kwargs: Any
) -> str:
'''
"""
Generates a shared access signature for the blob or one of its snapshots.
Use the returned signature with the sas_token parameter of any BlobService.

Expand Down Expand Up @@ -160,7 +160,7 @@ def generate_blob(
:type sts_hook: Optional[Callable[[str], None]]
:return: A Shared Access Signature (sas) token.
:rtype: str
'''
"""
resource_path = container_name + '/' + blob_name

sas = _BlobSharedAccessHelper()
Expand All @@ -170,14 +170,18 @@ def generate_blob(

resource = 'bs' if snapshot else 'b'
resource = 'bv' if version_id else resource
resource = 'd' if kwargs.pop("is_directory", None) else resource
resource = 'd' if is_directory else resource
sas.add_resource(resource)

sas.add_timestamp(snapshot or version_id)
sas.add_override_response_headers(cache_control, content_disposition,
content_encoding, content_language,
content_type)
sas.add_encryption_scope(**kwargs)

if is_directory:
sas.add_directory_depth(blob_name, kwargs.pop('sdd', None))

sas.add_info_for_hns_account(**kwargs)
sas.add_resource_signature(
self.account_name,
Expand Down Expand Up @@ -212,7 +216,7 @@ def generate_container(
sts_hook: Optional[Callable[[str], None]] = None,
**kwargs: Any
) -> str:
'''
"""
Generates a shared access signature for the container.
Use the returned signature with the sas_token parameter of any BlobService.

Expand Down Expand Up @@ -284,7 +288,7 @@ def generate_container(
:type sts_hook: Optional[Callable[[str], None]]
:return: A Shared Access Signature (sas) token.
:rtype: str
'''
"""
sas = _BlobSharedAccessHelper()
sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version)
sas.add_id(policy_id)
Expand Down Expand Up @@ -315,8 +319,17 @@ class _BlobSharedAccessHelper(_SharedAccessHelper):
def add_timestamp(self, timestamp):
self._add_query(BlobQueryStringConstants.SIGNED_TIMESTAMP, timestamp)

def add_directory_depth(self, blob_name, sdd):
# sdd may be provided from Datalake
# If not provided, it will be manually computed from blob_name
if sdd is None:
if blob_name == "" or blob_name == "/":
sdd = 0
else:
sdd = len(blob_name.strip("/").split("/"))
self._add_query(QueryStringConstants.SIGNED_DIRECTORY_DEPTH, str(sdd))

def add_info_for_hns_account(self, **kwargs):
self._add_query(QueryStringConstants.SIGNED_DIRECTORY_DEPTH, kwargs.pop('sdd', None))
self._add_query(QueryStringConstants.SIGNED_AUTHORIZED_OID, kwargs.pop('preauthorized_agent_object_id', None))
self._add_query(QueryStringConstants.SIGNED_UNAUTHORIZED_OID, kwargs.pop('agent_object_id', None))
self._add_query(QueryStringConstants.SIGNED_CORRELATION_ID, kwargs.pop('correlation_id', None))
Expand Down Expand Up @@ -792,6 +805,7 @@ def generate_blob_sas(
user_delegation_oid=user_delegation_oid,
request_headers=request_headers,
request_query_params=request_query_params,
is_directory=is_directory,
sts_hook=sts_hook,
**kwargs
)
Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-blob/swagger/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ autorest --v3 --python

### Settings
``` yaml
input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2026-04-06/blob.json
input-file: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/main/specification/storage/data-plane/Microsoft.BlobStorage/stable/2026-06-06/blob.json
output-folder: ../azure/storage/blob/_generated
namespace: azure.storage.blob
no-namespace-folders: true
Expand Down
86 changes: 85 additions & 1 deletion sdk/storage/azure-storage-blob/tests/test_common_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -3830,4 +3830,88 @@ def test_smart_rehydrate(self, **kwargs):
assert props is not None
assert props.archive_status == "rehydrate-pending-to-smart"

# ------------------------------------------------------------------------------
@BlobPreparer()
@recorded_by_proxy
def test_blob_fns_directory(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
variables = kwargs.pop("variables", {})

token_credential = self.get_credential(BlobServiceClient)
service = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=token_credential
)
container_name = self.get_resource_name("directorysascontainer")

try:
service.create_container(container_name)

start = self.get_datetime_variable(variables, 'start', datetime.utcnow())
expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1))
user_delegation_key = service.get_user_delegation_key(start, expiry)

for blob_name in ["foo", "foo/bar", "foo/bar/hello"]:
token = self.generate_sas(
generate_blob_sas,
account_name=storage_account_name,
container_name=container_name,
blob_name=blob_name,
user_delegation_key=user_delegation_key,
permission=BlobSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=expiry,
is_directory=True,
)

exact_blob = service.get_blob_client(container_name, blob_name)
BlobClient.from_blob_url(exact_blob.url, credential=token).upload_blob(b"data", overwrite=True)

# Blob whose name has the SAS directory name as a prefix should also succeed
child_blob = service.get_blob_client(container_name, blob_name + "/test")
BlobClient.from_blob_url(child_blob.url, credential=token).upload_blob(b"data", overwrite=True)
finally:
service.delete_container(container_name)

return variables

@BlobPreparer()
@recorded_by_proxy
def test_blob_fns_directory_fail(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
variables = kwargs.pop("variables", {})

token_credential = self.get_credential(BlobServiceClient)
service = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=token_credential
)
container_name = self.get_resource_name("directorysascontainer")

try:
service.create_container(container_name)

start = self.get_datetime_variable(variables, 'start', datetime.utcnow())
expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1))
user_delegation_key = service.get_user_delegation_key(start, expiry)

blob_name = "foo/bar/baz/"
token = self.generate_sas(
generate_blob_sas,
account_name=storage_account_name,
container_name=container_name,
blob_name=blob_name,
user_delegation_key=user_delegation_key,
permission=BlobSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=expiry,
is_directory=True,
)

non_prefix_blob = service.get_blob_client(container_name, "foo/bar")
non_prefix_blob_with_sas = BlobClient.from_blob_url(non_prefix_blob.url, credential=token)
with pytest.raises(HttpResponseError):
non_prefix_blob_with_sas.upload_blob(b"data", overwrite=True)
finally:
service.delete_container(container_name)

return variables

# ------------------------------------------------------------------------------
86 changes: 86 additions & 0 deletions sdk/storage/azure-storage-blob/tests/test_common_blob_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -3768,4 +3768,90 @@ async def test_smart_rehydrate(self, **kwargs):
assert props is not None
assert props.archive_status == "rehydrate-pending-to-smart"

@BlobPreparer()
@recorded_by_proxy_async
async def test_blob_fns_directory(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
variables = kwargs.pop("variables", {})

token_credential = self.get_credential(BlobServiceClient, is_async=True)
service = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=token_credential
)
container_name = self.get_resource_name("directorysascontainer")

try:
await service.create_container(container_name)

start = self.get_datetime_variable(variables, 'start', datetime.utcnow())
expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1))
user_delegation_key = await service.get_user_delegation_key(start, expiry)

for blob_name in ["foo", "foo/bar", "foo/bar/hello"]:
token = self.generate_sas(
generate_blob_sas,
account_name=storage_account_name,
container_name=container_name,
blob_name=blob_name,
user_delegation_key=user_delegation_key,
permission=BlobSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=expiry,
is_directory=True,
)

exact_blob = service.get_blob_client(container_name, blob_name)
await BlobClient.from_blob_url(
exact_blob.url, credential=token).upload_blob(b"data", overwrite=True)

# Blob whose name has the SAS directory name as a prefix should also succeed
child_blob = service.get_blob_client(container_name, blob_name + "/test")
await BlobClient.from_blob_url(
child_blob.url, credential=token).upload_blob(b"data", overwrite=True)
finally:
await service.delete_container(container_name)

return variables

@BlobPreparer()
@recorded_by_proxy_async
async def test_blob_fns_directory_fail(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
variables = kwargs.pop("variables", {})

token_credential = self.get_credential(BlobServiceClient, is_async=True)
service = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=token_credential
)
container_name = self.get_resource_name("directorysascontainer")

try:
await service.create_container(container_name)

start = self.get_datetime_variable(variables, 'start', datetime.utcnow())
expiry = self.get_datetime_variable(variables, 'expiry', datetime.utcnow() + timedelta(hours=1))
user_delegation_key = await service.get_user_delegation_key(start, expiry)

blob_name = "foo/bar/baz/"
token = self.generate_sas(
generate_blob_sas,
account_name=storage_account_name,
container_name=container_name,
blob_name=blob_name,
user_delegation_key=user_delegation_key,
permission=BlobSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=expiry,
is_directory=True,
)

non_prefix_blob = service.get_blob_client(container_name, "foo/bar")
non_prefix_blob_with_sas = BlobClient.from_blob_url(non_prefix_blob.url, credential=token)
with pytest.raises(HttpResponseError):
await non_prefix_blob_with_sas.upload_blob(b"data", overwrite=True)
finally:
await service.delete_container(container_name)

return variables

# ------------------------------------------------------------------------------
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-file-datalake/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Release History

## 12.25.0b1 (Unreleased)
## 12.25.0b1 (2026-03-26)

### Features Added
- Added support for service version 2026-06-06.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,13 @@ def __init__(
# ADLS doesn't support secondary endpoint, make sure it's empty
self._hosts[LocationMode.SECONDARY] = ""

self._client = AzureDataLakeStorageRESTAPI(self.url, base_url=self.url, pipeline=self._pipeline)
self._client._config.version = get_api_version(kwargs) # type: ignore [assignment]
self._api_version = get_api_version(kwargs)
self._client = AzureDataLakeStorageRESTAPI(
self.url,
version=self._api_version,
base_url=self.url,
pipeline=self._pipeline
)

def __enter__(self) -> Self:
self._client.__enter__()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,11 +157,11 @@ def close(self) -> None:
def _build_generated_client(self, url: str) -> AzureDataLakeStorageRESTAPI:
client = AzureDataLakeStorageRESTAPI(
url,
version=self._api_version,
base_url=url,
file_system=self.file_system_name,
pipeline=self._pipeline
)
client._config.version = self._api_version # type: ignore [assignment] # pylint: disable=protected-access
return client

def _format_url(self, hostname: str) -> str:
Expand Down Expand Up @@ -968,6 +968,7 @@ def _undelete_path(
)
path_client = AzureDataLakeStorageRESTAPI(
url,
version=self._api_version,
filesystem=self.file_system_name,
path=deleted_path_name,
pipeline=pipeline
Expand Down
Loading
Loading