Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -87,4 +87,24 @@ def blob_to_proto(blob):
retain_until_time=retain_until_time_proto,
)

contexts = getattr(blob, "contexts", None)
if contexts:
custom_contexts = {}
for key, payload in contexts.custom.items():
payload_params = {"value": payload.value}
if payload.create_time is not None:
create_time_proto = timestamp_pb2.Timestamp()
create_time_proto.FromDatetime(payload.create_time)
payload_params["create_time"] = create_time_proto
if payload.update_time is not None:
update_time_proto = timestamp_pb2.Timestamp()
update_time_proto.FromDatetime(payload.update_time)
payload_params["update_time"] = update_time_proto

custom_contexts[key] = _storage_v2.ObjectCustomContextPayload(
**payload_params
)

resource_params["contexts"] = _storage_v2.ObjectContexts(custom=custom_contexts)

return _storage_v2.Object(**resource_params)
144 changes: 144 additions & 0 deletions packages/google-cloud-storage/google/cloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@
"name",
"retention",
"storageClass",
"contexts",
)
_READ_LESS_THAN_SIZE = (
"Size {:d} was specified but the file-like object only had {:d} bytes remaining."
Expand Down Expand Up @@ -5008,6 +5009,29 @@ def retention(self):
info = self._properties.get("retention", {})
return Retention.from_api_repr(info, self)

@property
def contexts(self):
"""Retrieve the contexts for this object.

:rtype: :class:`ObjectContexts`
:returns: an instance for managing the object's contexts.
"""
info = self._properties.get("contexts", {})
return ObjectContexts.from_api_repr(info, self)

@contexts.setter
def contexts(self, value):
"""Update the contexts for this object.

:type value: :class:`ObjectContexts` or dict or None
:param value: the new contexts for the object.
"""
if value is None:
self._properties["contexts"] = None
else:
self._properties["contexts"] = value
self._patch_property("contexts", value)

@property
def soft_delete_time(self):
"""If this object has been soft-deleted, returns the time at which it became soft-deleted.
Expand Down Expand Up @@ -5300,3 +5324,123 @@ def retention_expiration_time(self):
retention_expiration_time = self.get("retentionExpirationTime")
if retention_expiration_time is not None:
return _rfc3339_nanos_to_datetime(retention_expiration_time)


class ObjectCustomContextPayload(dict):
"""Payload for a custom context.

:type value: str or ``NoneType``
:param value: (Optional) The value of the custom context.

:type create_time: :class:`datetime.datetime` or ``NoneType``
:param create_time: (Optional) Creation time of the custom context.

:type update_time: :class:`datetime.datetime` or ``NoneType``
:param update_time: (Optional) Last update time of the custom context.
"""

def __init__(self, value=None, create_time=None, update_time=None):
data = {"value": value}
if create_time is not None:
data["createTime"] = _datetime_to_rfc3339(create_time)
if update_time is not None:
data["updateTime"] = _datetime_to_rfc3339(update_time)
super(ObjectCustomContextPayload, self).__init__(data)

@property
def value(self):
"""The value of the custom context.

:rtype: str or ``NoneType``
:returns: The value of the custom context.
"""
return self.get("value")

@value.setter
def value(self, value):
self["value"] = value

@property
def create_time(self):
"""Creation time of the custom context.

:rtype: :class:`datetime.datetime` or ``NoneType``
:returns: Datetime object parsed from RFC3339 valid timestamp.
"""
create_time = self.get("createTime")
if create_time is not None:
return _rfc3339_nanos_to_datetime(create_time)

@property
def update_time(self):
"""Last update time of the custom context.

:rtype: :class:`datetime.datetime` or ``NoneType``
:returns: Datetime object parsed from RFC3339 valid timestamp.
"""
update_time = self.get("updateTime")
if update_time is not None:
return _rfc3339_nanos_to_datetime(update_time)


class ObjectContexts(dict):
"""Container for an object's contexts.

:type blob: :class:`Blob`
:param blob: blob for which these contexts apply to.

:type custom: dict or ``NoneType``
:param custom: (Optional) Custom contexts mapping.
"""

def __init__(self, blob, custom=None):
data = {}
if custom is not None:
data["custom"] = custom
super(ObjectContexts, self).__init__(data)
self._blob = blob

@classmethod
def from_api_repr(cls, resource, blob):
"""Factory: construct instance from resource.

:type resource: dict
:param resource: mapping as returned from API call.

:type blob: :class:`Blob`
:param blob: Blob for which these contexts apply to.

:rtype: :class:`ObjectContexts`
:returns: ObjectContexts instance created from resource.
"""
custom = {}
for key, payload_resource in resource.get("custom", {}).items():
payload = ObjectCustomContextPayload()
payload.update(payload_resource)
custom[key] = payload
return cls(blob, custom=custom)

@property
def blob(self):
"""Blob for which these contexts apply to.

:rtype: :class:`Blob`
:returns: the instance's blob.
"""
return self._blob

@property
def custom(self):
"""Custom contexts mapping.

:rtype: dict
:returns: Mapping of keys to :class:`ObjectCustomContextPayload` instances.
"""
if "custom" not in self:
self["custom"] = {}
return self["custom"]

@custom.setter
def custom(self, value):
self["custom"] = value
Comment thread
nidhiii-27 marked this conversation as resolved.
self.blob._patch_property("contexts", self)
Original file line number Diff line number Diff line change
Expand Up @@ -1423,6 +1423,7 @@ def list_blobs(
include_folders_as_prefixes=None,
soft_deleted=None,
page_size=None,
filter_=None,
):
"""Return an iterator used to find blobs in the bucket.

Expand Down Expand Up @@ -1516,6 +1517,10 @@ def list_blobs(
Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See:
https://cloud.google.com/storage/docs/soft-delete

:type filter_: str
:param filter_:
(Optional) Filter string used to filter objects.

:type page_size: int
:param page_size:
(Optional) Maximum number of blobs to return in each page.
Expand Down Expand Up @@ -1545,6 +1550,7 @@ def list_blobs(
match_glob=match_glob,
include_folders_as_prefixes=include_folders_as_prefixes,
soft_deleted=soft_deleted,
filter_=filter_,
)

def list_notifications(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1291,6 +1291,7 @@ def list_blobs(
match_glob=None,
include_folders_as_prefixes=None,
soft_deleted=None,
filter_=None,
):
"""Return an iterator used to find blobs in the bucket.

Expand Down Expand Up @@ -1400,6 +1401,9 @@ def list_blobs(
Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See:
https://cloud.google.com/storage/docs/soft-delete

filter_ (str):
(Optional) Filter string used to filter objects.

Returns:
Iterator of all :class:`~google.cloud.storage.blob.Blob`
in this bucket matching the arguments. The RPC call
Expand Down Expand Up @@ -1443,6 +1447,9 @@ def list_blobs(
if soft_deleted is not None:
extra_params["softDeleted"] = soft_deleted

if filter_ is not None:
extra_params["filter"] = filter_

if bucket.user_project is not None:
extra_params["userProject"] = bucket.user_project

Expand Down
32 changes: 32 additions & 0 deletions packages/google-cloud-storage/tests/system/test_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -554,6 +554,38 @@ def test_blob_patch_metadata(
assert blob.metadata == {"foo": "Foo"}


def test_blob_contexts_crud(
shared_bucket,
blobs_to_delete,
file_data,
service_account,
):
from google.cloud.storage.blob import ObjectContexts, ObjectCustomContextPayload

filename = file_data["logo"]["path"]
blob_name = os.path.basename(filename)

blob = shared_bucket.blob(blob_name)
blob.upload_from_filename(filename)
blobs_to_delete.append(blob)

custom = {"foo": ObjectCustomContextPayload(value="bar")}
blob.contexts = ObjectContexts(blob, custom=custom)
blob.patch()
blob.reload()
assert "foo" in blob.contexts.custom
assert blob.contexts.custom["foo"].value == "bar"
assert blob.contexts.custom["foo"].create_time is not None
assert blob.contexts.custom["foo"].update_time is not None

# Ensure that context keys can be deleted by setting equal to None.
new_custom = {"foo": None}
blob.contexts = ObjectContexts(blob, custom=new_custom)
blob.patch()
blob.reload()
assert "foo" not in blob.contexts.custom


def test_blob_direct_write_and_read_into_file(
shared_bucket,
blobs_to_delete,
Expand Down
29 changes: 29 additions & 0 deletions packages/google-cloud-storage/tests/system/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -731,6 +731,35 @@ def test_bucket_list_blobs_w_match_glob(
assert [blob.name for blob in blobs] == expected_names


@_helpers.retry_failures
def test_bucket_list_blobs_w_filter(
storage_client,
buckets_to_delete,
blobs_to_delete,
):
from google.cloud.storage.blob import ObjectContexts, ObjectCustomContextPayload

bucket_name = _helpers.unique_name("w-filter")
bucket = _helpers.retry_429_503(storage_client.create_bucket)(bucket_name)
buckets_to_delete.append(bucket)

payload = b"helloworld"
blob_names = ["foo", "bar", "baz"]
for name in blob_names:
blob = bucket.blob(name)
blob.upload_from_string(payload)
if name == "bar":
custom = {"target": ObjectCustomContextPayload(value="match")}
blob.contexts = ObjectContexts(blob, custom=custom)
blob.patch()
blobs_to_delete.append(blob)

# List with filter matching only 'bar'
blob_iter = bucket.list_blobs(filter_='contexts."target"="match"')
blobs = list(blob_iter)
assert [blob.name for blob in blobs] == ["bar"]


def test_bucket_list_blobs_include_managed_folders(
storage_client,
buckets_to_delete,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from google.cloud import _storage_v2
from google.cloud.storage import Blob, Bucket
from google.cloud.storage.blob import ObjectContexts, ObjectCustomContextPayload
from google.cloud.storage.asyncio.async_write_object_stream import (
_AsyncWriteObjectStream,
)
Expand Down Expand Up @@ -198,6 +199,9 @@ async def test_open_new_object_with_blob_sync_attrs(
"retain_until_time": retain_until_time,
}

payload = ObjectCustomContextPayload(value="context-value")
mock_blob.contexts = ObjectContexts(mock_blob, custom={"context-key": payload})

stream = _AsyncWriteObjectStream(mock_client, BUCKET, OBJECT, blob=mock_blob)
await stream.open()

Expand Down Expand Up @@ -226,6 +230,9 @@ async def test_open_new_object_with_blob_sync_attrs(
retain_until_time.timestamp()
)

assert "context-key" in resource.contexts.custom
assert resource.contexts.custom["context-key"].value == "context-value"

@pytest.mark.asyncio
async def test_open_already_open_raises(self, mock_client):
stream = _AsyncWriteObjectStream(mock_client, BUCKET, OBJECT)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,3 +134,31 @@ def test_blob_to_proto_retention():
assert int(proto.retention.retain_until_time.timestamp()) == int(
retain_until_time.timestamp()
)


def test_blob_to_proto_contexts():
blob = mock.Mock(
spec=["name", "bucket", "contexts", "custom_time", "acl", "retention"]
)
blob.name = "blob-name"
blob.bucket.name = "bucket-name"

from google.cloud.storage.blob import ObjectContexts, ObjectCustomContextPayload

create_time = datetime.datetime(2025, 1, 1, tzinfo=datetime.timezone.utc)
payload = ObjectCustomContextPayload(value="val", create_time=create_time)
blob.contexts = ObjectContexts(blob, custom={"key": payload})

blob.custom_time = None
blob.acl = None
blob.retention = None
for attr in _grpc_conversions._BLOB_ATTR_TO_PROTO_FIELD:
setattr(blob, attr, None)

proto = _grpc_conversions.blob_to_proto(blob)

assert "key" in proto.contexts.custom
assert proto.contexts.custom["key"].value == "val"
assert int(proto.contexts.custom["key"].create_time.timestamp()) == int(
create_time.timestamp()
)
Loading
Loading