Skip to content

Commit d59c30b

Browse files
committed
chore: add system and unit tests
1 parent 7156787 commit d59c30b

7 files changed

Lines changed: 188 additions & 25 deletions

File tree

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
# Copyright 2026 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
from google.cloud import _storage_v2
16+
17+
# Map Python Blob attributes to GCS V2 Object proto field names.
18+
_BLOB_ATTR_TO_PROTO_FIELD = {
19+
"content_type": "content_type",
20+
"metadata": "metadata",
21+
"kms_key_name": "kms_key",
22+
}
23+
24+
25+
def blob_to_proto(blob):
26+
"""Converts a Blob instance to a GCS V2 Object proto message."""
27+
28+
resource_params = {
29+
"name": blob.name,
30+
}
31+
32+
if blob.bucket:
33+
resource_params["bucket"] = f"projects/_/buckets/{blob.bucket.name}"
34+
35+
for attr_name, proto_field in _BLOB_ATTR_TO_PROTO_FIELD.items():
36+
value = getattr(blob, attr_name, None)
37+
if value is not None:
38+
resource_params[proto_field] = value
39+
40+
return _storage_v2.Object(**resource_params)

packages/google-cloud-storage/google/cloud/storage/asyncio/async_appendable_object_writer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,7 @@ def __init__(
212212
self._routing_token: Optional[str] = None
213213
self.object_resource: Optional[_storage_v2.Object] = None
214214
self._flush_count = 0
215+
self.blob: Optional[Blob] = None
215216

216217
@classmethod
217218
def from_blob(

packages/google-cloud-storage/google/cloud/storage/asyncio/async_write_object_stream.py

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,13 @@
1919

2020
from google.cloud import _storage_v2
2121
from google.cloud.storage import Blob
22+
from google.cloud.storage import _grpc_conversions
2223
from google.cloud.storage.asyncio import _utils
2324
from google.cloud.storage.asyncio.async_abstract_object_stream import (
2425
_AsyncAbstractObjectStream,
2526
)
2627
from google.cloud.storage.asyncio.async_grpc_client import AsyncGrpcClient
2728

28-
_BLOB_SYNC_ATTRS = ("content_type", "metadata")
2929

3030
class _AsyncWriteObjectStream(_AsyncAbstractObjectStream):
3131
"""Class representing a gRPC bidi-stream for writing data from a GCS
@@ -121,21 +121,15 @@ async def open(self, metadata: Optional[List[Tuple[str, str]]] = None) -> None:
121121
# if `generation_number` == 0 new object will be created only if there
122122
# isn't any existing object.
123123
if self.generation_number is None or self.generation_number == 0:
124-
resource_params = {
125-
"name": self.object_name,
126-
"bucket": self._full_bucket_name,
127-
}
128124
if self.blob:
129-
resource_params.update({
130-
attr: getattr(self.blob, attr)
131-
for attr in _BLOB_SYNC_ATTRS
132-
if getattr(self.blob, attr, None) is not None
133-
})
125+
resource = _grpc_conversions.blob_to_proto(self.blob)
126+
else:
127+
resource = _storage_v2.Object(
128+
name=self.object_name, bucket=self._full_bucket_name
129+
)
134130
self.first_bidi_write_req = _storage_v2.BidiWriteObjectRequest(
135131
write_object_spec=_storage_v2.WriteObjectSpec(
136-
resource=_storage_v2.Object(
137-
**resource_params
138-
),
132+
resource=resource,
139133
appendable=True,
140134
if_generation_match=self.generation_number,
141135
),

packages/google-cloud-storage/google/cloud/storage/asyncio/test_snippet.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from google.cloud.storage.asyncio.async_appendable_object_writer import (
1919
AsyncAppendableObjectWriter,
2020
)
21+
from google.cloud import storage
2122
from google.cloud.storage import Blob
2223
from google.cloud.storage._experimental.asyncio.async_grpc_client import AsyncGrpcClient
2324

@@ -32,7 +33,11 @@ async def storage_create_and_write_appendable_object(
3233

3334
if grpc_client is None:
3435
grpc_client = AsyncGrpcClient()
35-
blob = Blob.from_uri("gs://{}/{}".format(bucket_name, object_name))
36+
storage_client = storage.Client()
37+
bucket = storage_client.get_bucket(bucket_name)
38+
bucket_location = bucket.location.lower()
39+
kms_key_name = f"projects/{storage_client.project}/locations/{bucket_location}/keyRings/gcs-test/cryptoKeys/gcs-test"
40+
blob = Blob(bucket=bucket, name=object_name, kms_key_name=kms_key_name)
3641
blob.content_type = "text/plain"
3742
blob.metadata = {"environment": "dev"}
3843
writer = AsyncAppendableObjectWriter.from_blob(
@@ -53,6 +58,8 @@ async def storage_create_and_write_appendable_object(
5358
print(new_object)
5459
print(new_object.size)
5560
print(new_object.content_type)
61+
print(new_object.metadata)
62+
print(new_object.kms_key)
5663
print(
5764
f"Appended object {object_name} created of size {writer.persisted_size} bytes."
5865
)
@@ -72,4 +79,4 @@ async def storage_create_and_write_appendable_object(
7279
bucket_name=args.bucket_name,
7380
object_name=args.object_name,
7481
)
75-
)
82+
)

packages/google-cloud-storage/tests/system/test_zonal.py

Lines changed: 79 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828

2929

3030
# TODO: replace this with a fixture once zonal bucket creation / deletion
31-
# is supported in grpc client or json client client.
31+
# is supported in grpc client or json client.
3232
_ZONAL_BUCKET = os.getenv("ZONAL_BUCKET")
3333
_CROSS_REGION_BUCKET = os.getenv("CROSS_REGION_BUCKET")
3434
_BYTES_TO_UPLOAD = b"dummy_bytes_to_write_read_and_delete_appendable_object"
@@ -285,6 +285,84 @@ async def _run():
285285
event_loop.run_until_complete(_run())
286286

287287

288+
def test_write_from_blob(
289+
storage_client,
290+
blobs_to_delete,
291+
event_loop,
292+
grpc_client,
293+
):
294+
object_name = f"test_from_blob-{str(uuid.uuid4())[:4]}"
295+
content_type = "text/plain"
296+
metadata = {"environment": "system-test"}
297+
test_data = b"system-test-data"
298+
299+
async def _run():
300+
# 1. Create a Blob instance
301+
blob = storage_client.bucket(_ZONAL_BUCKET).blob(object_name)
302+
blob.content_type = content_type
303+
blob.metadata = metadata
304+
305+
# 2. Use from_blob to create the writer
306+
writer = AsyncAppendableObjectWriter.from_blob(grpc_client, blob)
307+
await writer.open()
308+
await writer.append(test_data)
309+
await writer.close(finalize_on_close=True)
310+
311+
# 3. Verify the object metadata
312+
obj = await grpc_client.get_object(
313+
bucket_name=_ZONAL_BUCKET,
314+
object_name=object_name,
315+
)
316+
317+
assert obj.content_type == content_type
318+
assert obj.metadata["environment"] == "system-test"
319+
320+
blobs_to_delete.append(blob)
321+
322+
event_loop.run_until_complete(_run())
323+
324+
325+
def test_write_from_blob_with_kms_key(
326+
storage_client,
327+
blobs_to_delete,
328+
event_loop,
329+
grpc_client,
330+
):
331+
"""Verifies AsyncAppendableObjectWriter.from_blob correctly applies KMS encryption."""
332+
333+
object_name = f"test_from_blob_kms-{str(uuid.uuid4())[:4]}"
334+
test_data = b"kms-protected-data"
335+
test_bucket = storage_client.bucket(_ZONAL_BUCKET)
336+
bucket_location = test_bucket.location.lower()
337+
# TODO: Use a fixture for a zonal kms key once we have fixture for zonal bucket
338+
kms_key_name = f"projects/{storage_client.project}/locations/{bucket_location}/keyRings/gcs-test/cryptoKeys/gcs-test"
339+
340+
async def _run():
341+
# Create a local Blob instance with the KMS key
342+
blob = test_bucket.blob(object_name, kms_key_name=kms_key_name)
343+
344+
writer = AsyncAppendableObjectWriter.from_blob(grpc_client, blob)
345+
346+
await writer.open()
347+
await writer.append(test_data)
348+
349+
await writer.close(finalize_on_close=True)
350+
351+
# Verify the encryption metadata
352+
obj = await grpc_client.get_object(
353+
bucket_name=_ZONAL_BUCKET,
354+
object_name=object_name,
355+
)
356+
357+
# Assert that the object was encrypted with the correct key
358+
# GCS appends a version suffix, so we use startswith()
359+
assert obj.kms_key_name.startswith(kms_key_name)
360+
361+
blobs_to_delete.append(blob)
362+
363+
event_loop.run_until_complete(_run())
364+
365+
288366
def test_read_unfinalized_appendable_object(
289367
storage_client, blobs_to_delete, event_loop, grpc_client_direct
290368
):

packages/google-cloud-storage/tests/unit/asyncio/test_async_appendable_object_writer.py

Lines changed: 28 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import pytest
2020
from google.api_core import exceptions
2121
from google.rpc import status_pb2
22+
from google.cloud.storage import Blob
2223

2324
from google.cloud._storage_v2.types import storage as storage_type
2425
from google.cloud._storage_v2.types.storage import BidiWriteObjectRedirectedError
@@ -166,6 +167,24 @@ def test_init_raises_if_crc32c_missing(self, mock_appendable_writer):
166167
with pytest.raises(exceptions.FailedPrecondition):
167168
self._make_one(mock_appendable_writer["mock_client"])
168169

170+
def test_from_blob(self, mock_appendable_writer):
171+
mock_blob = mock.Mock(spec=Blob)
172+
mock_blob.name = OBJECT
173+
mock_blob.bucket.name = BUCKET
174+
175+
writer = AsyncAppendableObjectWriter.from_blob(
176+
mock_appendable_writer["mock_client"],
177+
mock_blob,
178+
generation=GENERATION,
179+
writer_options={"FLUSH_INTERVAL_BYTES": EIGHT_MIB},
180+
)
181+
182+
assert writer.bucket_name == BUCKET
183+
assert writer.object_name == OBJECT
184+
assert writer.generation == GENERATION
185+
assert writer.flush_interval == EIGHT_MIB
186+
assert writer.blob == mock_blob
187+
169188
# -------------------------------------------------------------------------
170189
# Stream Lifecycle Tests
171190
# -------------------------------------------------------------------------
@@ -176,9 +195,9 @@ async def test_state_lookup(self, mock_appendable_writer):
176195
writer._is_stream_open = True
177196
writer.write_obj_stream = mock_appendable_writer["mock_stream"]
178197

179-
mock_appendable_writer[
180-
"mock_stream"
181-
].recv.return_value = storage_type.BidiWriteObjectResponse(persisted_size=100)
198+
mock_appendable_writer["mock_stream"].recv.return_value = (
199+
storage_type.BidiWriteObjectResponse(persisted_size=100)
200+
)
182201

183202
size = await writer.state_lookup()
184203

@@ -389,9 +408,9 @@ async def test_flush_resets_counters(self, mock_appendable_writer):
389408
writer.write_obj_stream = mock_appendable_writer["mock_stream"]
390409
writer.bytes_appended_since_last_flush = 100
391410

392-
mock_appendable_writer[
393-
"mock_stream"
394-
].recv.return_value = storage_type.BidiWriteObjectResponse(persisted_size=200)
411+
mock_appendable_writer["mock_stream"].recv.return_value = (
412+
storage_type.BidiWriteObjectResponse(persisted_size=200)
413+
)
395414

396415
await writer.flush()
397416

@@ -432,9 +451,9 @@ async def test_finalize_lifecycle(self, mock_appendable_writer):
432451
writer.write_obj_stream = mock_appendable_writer["mock_stream"]
433452

434453
resource = storage_type.Object(size=999)
435-
mock_appendable_writer[
436-
"mock_stream"
437-
].recv.return_value = storage_type.BidiWriteObjectResponse(resource=resource)
454+
mock_appendable_writer["mock_stream"].recv.return_value = (
455+
storage_type.BidiWriteObjectResponse(resource=resource)
456+
)
438457

439458
res = await writer.finalize()
440459

packages/google-cloud-storage/tests/unit/asyncio/test_async_write_object_stream.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import pytest
2020

2121
from google.cloud import _storage_v2
22+
from google.cloud.storage import Blob
2223
from google.cloud.storage.asyncio.async_write_object_stream import (
2324
_AsyncWriteObjectStream,
2425
)
@@ -151,6 +152,29 @@ async def test_open_metadata_merging(self, mock_rpc_cls, mock_client):
151152
assert f"bucket={FULL_BUCKET_PATH}" in params
152153
assert "extra=param" in params
153154

155+
@mock.patch("google.cloud.storage.asyncio.async_write_object_stream.AsyncBidiRpc")
156+
@pytest.mark.asyncio
157+
async def test_open_new_object_with_blob_sync_attrs(
158+
self, mock_rpc_cls, mock_client
159+
):
160+
mock_rpc = mock_rpc_cls.return_value
161+
mock_rpc.open = AsyncMock()
162+
mock_rpc.recv = AsyncMock(return_value=MagicMock(resource=None))
163+
164+
mock_blob = mock.Mock(spec=Blob)
165+
mock_blob.content_type = "text/plain"
166+
mock_blob.metadata = {"test-key": "test-value"}
167+
168+
stream = _AsyncWriteObjectStream(mock_client, BUCKET, OBJECT, blob=mock_blob)
169+
await stream.open()
170+
171+
# Verify initial request contains synced attributes from blob
172+
initial_request = mock_rpc_cls.call_args.kwargs["initial_request"]
173+
resource = initial_request.write_object_spec.resource
174+
175+
assert resource.content_type == "text/plain"
176+
assert resource.metadata == {"test-key": "test-value"}
177+
154178
@pytest.mark.asyncio
155179
async def test_open_already_open_raises(self, mock_client):
156180
stream = _AsyncWriteObjectStream(mock_client, BUCKET, OBJECT)

0 commit comments

Comments
 (0)