3434distribution_name = generate_random_name (prefix = "llama-stack-distribution" )
3535
3636
37+ def _cleanup_s3_files (
38+ bucket_name : str ,
39+ endpoint_url : str ,
40+ region : str ,
41+ access_key_id : str ,
42+ secret_access_key : str ,
43+ ) -> None :
44+ """
45+ Clean up files from S3 bucket that were uploaded during tests.
46+
47+ Args:
48+ bucket_name: S3 bucket name
49+ endpoint_url: S3 endpoint URL
50+ region: S3 region
51+ access_key_id: AWS access key ID
52+ secret_access_key: AWS secret access key
53+ """
54+
55+ try :
56+ import boto3
57+ from botocore .exceptions import ClientError
58+
59+ s3_client = boto3 .client (
60+ service_name = "s3" ,
61+ endpoint_url = endpoint_url ,
62+ aws_access_key_id = access_key_id ,
63+ aws_secret_access_key = secret_access_key ,
64+ region_name = region ,
65+ )
66+
67+ response = s3_client .list_objects_v2 (Bucket = bucket_name )
68+
69+ if "Contents" not in response :
70+ LOGGER .info ("No files found to clean up from S3" )
71+ return
72+
73+ # We only want to delete files that start with "file-"
74+ for obj in response ["Contents" ]:
75+ key = obj ["Key" ]
76+ if key .startswith ("file-" ):
77+ s3_client .delete_object (Bucket = bucket_name , Key = key )
78+ LOGGER .debug (f"Deleted file from S3: { key } " )
79+
80+ response = s3_client .list_objects_v2 (Bucket = bucket_name )
81+
82+ if "Contents" not in response :
83+ LOGGER .info ("No files found to clean up from S3" )
84+ return
85+
86+ except ClientError as e :
87+ LOGGER .warning (f"Failed to clean up S3 files: { e } " )
88+
89+
3790@pytest .fixture (scope = "class" )
3891def enabled_llama_stack_operator (dsc_resource : DataScienceCluster ) -> Generator [DataScienceCluster , Any , Any ]:
3992 with update_components_in_dsc (
@@ -65,6 +118,8 @@ def llama_stack_server_config(
65118 request: Pytest fixture request object containing test parameters
66119 vector_io_provider_deployment_config_factory: Factory function to deploy vector I/O providers
67120 and return their configuration environment variables
121+ files_provider_config_factory: Factory function to configure files storage providers
122+ and return their configuration environment variables
68123
69124 Returns:
70125 Dict containing server configuration with the following structure:
@@ -79,6 +134,8 @@ def llama_stack_server_config(
79134 - VLLM_URL: URL for VLLM service endpoint
80135 - VLLM_TLS_VERIFY: TLS verification setting (defaults to "false")
81136 - FMS_ORCHESTRATOR_URL: FMS orchestrator service URL
137+ - Files provider specific variables (configured via factory):
138+ * For "s3": S3_BUCKET_NAME, S3_ENDPOINT_URL, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, etc.
82139 - Vector I/O provider specific variables (deployed via factory):
83140 * For "milvus": MILVUS_DB_PATH
84141 * For "milvus-remote": MILVUS_ENDPOINT, MILVUS_TOKEN, MILVUS_CONSISTENCY_LEVEL
@@ -89,6 +146,7 @@ def llama_stack_server_config(
89146 - vllm_api_token: Override for VLLM_API_TOKEN environment variable
90147 - vllm_url_fixture: Fixture name to get VLLM URL from
91148 - fms_orchestrator_url_fixture: Fixture name to get FMS orchestrator URL from
149+ - files_provider: Files storage provider type ("local" or "s3", defaults to "local")
92150 - vector_io_provider: Vector I/O provider type ("milvus" or "milvus-remote")
93151 - llama_stack_storage_size: Storage size for the deployment
94152 - embedding_model: Embedding model identifier for inference
@@ -225,6 +283,7 @@ def unprivileged_llama_stack_distribution(
225283 unprivileged_client : DynamicClient ,
226284 unprivileged_model_namespace : Namespace ,
227285 enabled_llama_stack_operator : DataScienceCluster ,
286+ request : FixtureRequest ,
228287 llama_stack_server_config : Dict [str , Any ],
229288) -> Generator [LlamaStackDistribution , None , None ]:
230289 # Distribution name needs a random substring due to bug RHAIENG-999 / RHAIENG-1139
@@ -239,12 +298,32 @@ def unprivileged_llama_stack_distribution(
239298 lls_dist .wait_for_status (status = LlamaStackDistribution .Status .READY , timeout = 600 )
240299 yield lls_dist
241300
301+ try :
302+ env_vars = llama_stack_server_config .get ("containerSpec" , {}).get ("env" , [])
303+ enable_s3 = any (env .get ("name" ) == "ENABLE_S3" and env .get ("value" ) == "s3" for env in env_vars )
304+
305+ if enable_s3 :
306+ try :
307+ _cleanup_s3_files (
308+ bucket_name = request .getfixturevalue (argname = "ci_s3_bucket_name" ),
309+ endpoint_url = request .getfixturevalue (argname = "ci_s3_bucket_endpoint" ),
310+ region = request .getfixturevalue (argname = "ci_s3_bucket_region" ),
311+ access_key_id = request .getfixturevalue (argname = "aws_access_key_id" ),
312+ secret_access_key = request .getfixturevalue (argname = "aws_secret_access_key" ),
313+ )
314+ except Exception as e :
315+ LOGGER .warning (f"Failed to clean up S3 files: { e } " )
316+
317+ except Exception as e :
318+ LOGGER .warning (f"Failed to clean up S3 files: { e } " )
319+
242320
243321@pytest .fixture (scope = "class" )
244322def llama_stack_distribution (
245323 admin_client : DynamicClient ,
246324 model_namespace : Namespace ,
247325 enabled_llama_stack_operator : DataScienceCluster ,
326+ request : FixtureRequest ,
248327 llama_stack_server_config : Dict [str , Any ],
249328) -> Generator [LlamaStackDistribution , None , None ]:
250329 # Distribution name needs a random substring due to bug RHAIENG-999 / RHAIENG-1139
@@ -258,6 +337,25 @@ def llama_stack_distribution(
258337 lls_dist .wait_for_status (status = LlamaStackDistribution .Status .READY , timeout = 600 )
259338 yield lls_dist
260339
340+ try :
341+ env_vars = llama_stack_server_config .get ("containerSpec" , {}).get ("env" , [])
342+ enable_s3 = any (env .get ("name" ) == "ENABLE_S3" and env .get ("value" ) == "s3" for env in env_vars )
343+
344+ if enable_s3 :
345+ try :
346+ _cleanup_s3_files (
347+ bucket_name = request .getfixturevalue (argname = "ci_s3_bucket_name" ),
348+ endpoint_url = request .getfixturevalue (argname = "ci_s3_bucket_endpoint" ),
349+ region = request .getfixturevalue (argname = "ci_s3_bucket_region" ),
350+ access_key_id = request .getfixturevalue (argname = "aws_access_key_id" ),
351+ secret_access_key = request .getfixturevalue (argname = "aws_secret_access_key" ),
352+ )
353+ except Exception as e :
354+ LOGGER .warning (f"Failed to clean up S3 files: { e } " )
355+
356+ except Exception as e :
357+ LOGGER .warning (f"Failed to clean up S3 files: { e } " )
358+
261359
262360def _get_llama_stack_distribution_deployment (
263361 client : DynamicClient ,
0 commit comments