Skip to content

Commit 20c0aad

Browse files
Ygnasjgarciao
andcommitted
fix: replace broken S3 cleanup with LlamaStack files API (opendatahub-io#1210)
The S3 file cleanup was failing because boto3 is not a project dependency. Replace the direct S3 access with the LlamaStack client's files API (client.files.list/delete), which requires no additional dependencies. - Remove _cleanup_s3_files helper and its boto3 dependency - Remove S3 credential parameters from distribution fixtures - Add _cleanup_files using the LlamaStack files API - Snapshot existing file IDs before tests to only delete files created during the current test execution Signed-off-by: Ignas Baranauskas <ibaranau@redhat.com> Co-authored-by: Jorge <jgarciao@users.noreply.github.com>
1 parent 7adb482 commit 20c0aad

1 file changed

Lines changed: 29 additions & 104 deletions

File tree

tests/llama_stack/conftest.py

Lines changed: 29 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import pytest
77
from _pytest.fixtures import FixtureRequest
88
from kubernetes.dynamic import DynamicClient
9-
from llama_stack_client import LlamaStackClient
9+
from llama_stack_client import APIError, LlamaStackClient
1010
from llama_stack_client.types.vector_store import VectorStore
1111
from ocp_resources.data_science_cluster import DataScienceCluster
1212
from ocp_resources.deployment import Deployment
@@ -69,59 +69,6 @@
6969
distribution_name = generate_random_name(prefix="llama-stack-distribution")
7070

7171

72-
def _cleanup_s3_files(
73-
bucket_name: str,
74-
endpoint_url: str,
75-
region: str,
76-
access_key_id: str,
77-
secret_access_key: str,
78-
) -> None:
79-
"""
80-
Clean up files from S3 bucket that were uploaded during tests.
81-
82-
Args:
83-
bucket_name: S3 bucket name
84-
endpoint_url: S3 endpoint URL
85-
region: S3 region
86-
access_key_id: AWS access key ID
87-
secret_access_key: AWS secret access key
88-
"""
89-
90-
try:
91-
import boto3
92-
from botocore.exceptions import ClientError
93-
94-
s3_client = boto3.client(
95-
service_name="s3",
96-
endpoint_url=endpoint_url,
97-
aws_access_key_id=access_key_id,
98-
aws_secret_access_key=secret_access_key,
99-
region_name=region,
100-
)
101-
102-
response = s3_client.list_objects_v2(Bucket=bucket_name)
103-
104-
if "Contents" not in response:
105-
LOGGER.info("No files found to clean up from S3")
106-
return
107-
108-
# We only want to delete files that start with "file-"
109-
for obj in response["Contents"]:
110-
key = obj["Key"]
111-
if key.startswith("file-"):
112-
s3_client.delete_object(Bucket=bucket_name, Key=key)
113-
LOGGER.debug(f"Deleted file from S3: {key}")
114-
115-
response = s3_client.list_objects_v2(Bucket=bucket_name)
116-
117-
if "Contents" not in response:
118-
LOGGER.info("No files found to clean up from S3")
119-
return
120-
121-
except ClientError as e:
122-
LOGGER.warning(f"Failed to clean up S3 files: {e}")
123-
124-
12572
@pytest.fixture(scope="class")
12673
def enabled_llama_stack_operator(dsc_resource: DataScienceCluster) -> Generator[DataScienceCluster, Any, Any]:
12774
with update_components_in_dsc(
@@ -380,12 +327,7 @@ def unprivileged_llama_stack_distribution(
380327
unprivileged_model_namespace: Namespace,
381328
enabled_llama_stack_operator: DataScienceCluster,
382329
request: FixtureRequest,
383-
llama_stack_server_config: Dict[str, Any],
384-
ci_s3_bucket_name: str,
385-
ci_s3_bucket_endpoint: str,
386-
ci_s3_bucket_region: str,
387-
aws_access_key_id: str,
388-
aws_secret_access_key: str,
330+
llama_stack_server_config: dict[str, Any],
389331
unprivileged_llama_stack_distribution_secret: Secret,
390332
unprivileged_postgres_deployment: Deployment,
391333
unprivileged_postgres_service: Service,
@@ -402,38 +344,14 @@ def unprivileged_llama_stack_distribution(
402344
lls_dist.wait_for_status(status=LlamaStackDistribution.Status.READY, timeout=600)
403345
yield lls_dist
404346

405-
try:
406-
env_vars = llama_stack_server_config.get("containerSpec", {}).get("env", [])
407-
enable_s3 = any(env.get("name") == "ENABLE_S3" and env.get("value") == "s3" for env in env_vars)
408-
409-
if enable_s3:
410-
try:
411-
_cleanup_s3_files(
412-
bucket_name=ci_s3_bucket_name,
413-
endpoint_url=ci_s3_bucket_endpoint,
414-
region=ci_s3_bucket_region,
415-
access_key_id=aws_access_key_id,
416-
secret_access_key=aws_secret_access_key,
417-
)
418-
except Exception as e:
419-
LOGGER.warning(f"Failed to clean up S3 files: {e}")
420-
421-
except Exception as e:
422-
LOGGER.warning(f"Failed to clean up S3 files: {e}")
423-
424347

425348
@pytest.fixture(scope="class")
426349
def llama_stack_distribution(
427350
admin_client: DynamicClient,
428351
model_namespace: Namespace,
429352
enabled_llama_stack_operator: DataScienceCluster,
430353
request: FixtureRequest,
431-
llama_stack_server_config: Dict[str, Any],
432-
ci_s3_bucket_name: str,
433-
ci_s3_bucket_endpoint: str,
434-
ci_s3_bucket_region: str,
435-
aws_access_key_id: str,
436-
aws_secret_access_key: str,
354+
llama_stack_server_config: dict[str, Any],
437355
llama_stack_distribution_secret: Secret,
438356
postgres_deployment: Deployment,
439357
postgres_service: Service,
@@ -449,25 +367,6 @@ def llama_stack_distribution(
449367
lls_dist.wait_for_status(status=LlamaStackDistribution.Status.READY, timeout=600)
450368
yield lls_dist
451369

452-
try:
453-
env_vars = llama_stack_server_config.get("containerSpec", {}).get("env", [])
454-
enable_s3 = any(env.get("name") == "ENABLE_S3" and env.get("value") == "s3" for env in env_vars)
455-
456-
if enable_s3:
457-
try:
458-
_cleanup_s3_files(
459-
bucket_name=ci_s3_bucket_name,
460-
endpoint_url=ci_s3_bucket_endpoint,
461-
region=ci_s3_bucket_region,
462-
access_key_id=aws_access_key_id,
463-
secret_access_key=aws_secret_access_key,
464-
)
465-
except Exception as e:
466-
LOGGER.warning(f"Failed to clean up S3 files: {e}")
467-
468-
except Exception as e:
469-
LOGGER.warning(f"Failed to clean up S3 files: {e}")
470-
471370

472371
def _get_llama_stack_distribution_deployment(
473372
client: DynamicClient,
@@ -636,11 +535,37 @@ def _create_llama_stack_client(
636535
http_client=http_client,
637536
)
638537
wait_for_llama_stack_client_ready(client=client)
538+
existing_file_ids = {f.id for f in client.files.list().data}
539+
639540
yield client
541+
542+
_cleanup_files(client=client, existing_file_ids=existing_file_ids)
640543
finally:
641544
http_client.close()
642545

643546

547+
def _cleanup_files(client: LlamaStackClient, existing_file_ids: set[str]) -> None:
548+
"""Delete files created during test execution via the LlamaStack files API.
549+
550+
Only deletes files whose IDs were not present before the test ran,
551+
avoiding interference with other test sessions.
552+
553+
Args:
554+
client: The LlamaStackClient used during the test
555+
existing_file_ids: File IDs that existed before the test started
556+
"""
557+
try:
558+
for file in client.files.list().data:
559+
if file.id not in existing_file_ids:
560+
try:
561+
client.files.delete(file_id=file.id)
562+
LOGGER.debug(f"Deleted file: {file.id}")
563+
except APIError as e:
564+
LOGGER.warning(f"Failed to delete file {file.id}: {e}")
565+
except APIError as e:
566+
LOGGER.warning(f"Failed to clean up files: {e}")
567+
568+
644569
@pytest.fixture(scope="class")
645570
def unprivileged_llama_stack_client(
646571
unprivileged_llama_stack_test_route: Route,

0 commit comments

Comments
 (0)