Skip to content

Commit 4bb92e2

Browse files
committed
fix: use secretKeyRef for VLLM tokens and AWS credentials to prevent credential leaks in logs
Move VLLM_API_TOKEN and VLLM_EMBEDDING_API_TOKEN from plain env var values to K8s secretKeyRef, matching the existing pattern used for POSTGRES_PASSWORD. Add both tokens to LLAMA_STACK_DISTRIBUTION_SECRET_DATA and relocate the dict after all secret variables are defined. Add AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY to the llamastack-distribution-secret and reference them via secretKeyRef instead of passing plaintext values, consistent with the VLLM token handling. Signed-off-by: Ignas Baranauskas <[email protected]>
1 parent cc43a76 commit 4bb92e2

File tree

2 files changed

+32
-13
lines changed

2 files changed

+32
-13
lines changed

tests/fixtures/files.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,11 +64,15 @@ def _factory(provider_name: str) -> list[dict[str, str]]:
6464
})
6565
env_vars.append({
6666
"name": "AWS_ACCESS_KEY_ID",
67-
"value": request.getfixturevalue(argname="aws_access_key_id"),
67+
"valueFrom": {
68+
"secretKeyRef": {"name": "llamastack-distribution-secret", "key": "aws-access-key-id"}
69+
},
6870
})
6971
env_vars.append({
7072
"name": "AWS_SECRET_ACCESS_KEY",
71-
"value": request.getfixturevalue(argname="aws_secret_access_key"),
73+
"valueFrom": {
74+
"secretKeyRef": {"name": "llamastack-distribution-secret", "key": "aws-secret-access-key"}
75+
},
7276
})
7377
env_vars.append({"name": "S3_AUTO_CREATE_BUCKET", "value": S3_AUTO_CREATE_BUCKET})
7478

tests/llama_stack/conftest.py

Lines changed: 26 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -48,11 +48,6 @@
4848
POSTGRESQL_USER = os.getenv("LLS_VECTOR_IO_POSTGRESQL_USER", "ps_user")
4949
POSTGRESQL_PASSWORD = os.getenv("LLS_VECTOR_IO_POSTGRESQL_PASSWORD", "ps_password")
5050

51-
LLAMA_STACK_DISTRIBUTION_SECRET_DATA = {
52-
"postgres-user": POSTGRESQL_USER,
53-
"postgres-password": POSTGRESQL_PASSWORD,
54-
}
55-
5651
LLS_CORE_INFERENCE_MODEL = os.getenv("LLS_CORE_INFERENCE_MODEL", "")
5752
LLS_CORE_VLLM_URL = os.getenv("LLS_CORE_VLLM_URL", "")
5853
LLS_CORE_VLLM_API_TOKEN = os.getenv("LLS_CORE_VLLM_API_TOKEN", "")
@@ -68,6 +63,18 @@
6863
LLS_CORE_VLLM_EMBEDDING_MAX_TOKENS = os.getenv("LLS_CORE_VLLM_EMBEDDING_MAX_TOKENS", "8192")
6964
LLS_CORE_VLLM_EMBEDDING_TLS_VERIFY = os.getenv("LLS_CORE_VLLM_EMBEDDING_TLS_VERIFY", "true")
7065

66+
LLS_CORE_AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID", "")
67+
LLS_CORE_AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY", "")
68+
69+
LLAMA_STACK_DISTRIBUTION_SECRET_DATA = {
70+
"postgres-user": POSTGRESQL_USER,
71+
"postgres-password": POSTGRESQL_PASSWORD,
72+
"vllm-api-token": LLS_CORE_VLLM_API_TOKEN,
73+
"vllm-embedding-api-token": LLS_CORE_VLLM_EMBEDDING_API_TOKEN,
74+
"aws-access-key-id": LLS_CORE_AWS_ACCESS_KEY_ID,
75+
"aws-secret-access-key": LLS_CORE_AWS_SECRET_ACCESS_KEY,
76+
}
77+
7178
IBM_EARNINGS_DOC_URL = "https://www.ibm.com/downloads/documents/us-en/1550f7eea8c0ded6"
7279

7380
UPGRADE_DISTRIBUTION_NAME = "llama-stack-distribution-upgrade"
@@ -171,11 +178,12 @@ def test_with_remote_milvus(llama_stack_server_config):
171178
inference_model = LLS_CORE_INFERENCE_MODEL
172179
env_vars.append({"name": "INFERENCE_MODEL", "value": inference_model})
173180

174-
if params.get("vllm_api_token"):
175-
vllm_api_token = str(params.get("vllm_api_token"))
176-
else:
177-
vllm_api_token = LLS_CORE_VLLM_API_TOKEN
178-
env_vars.append({"name": "VLLM_API_TOKEN", "value": vllm_api_token})
181+
env_vars.append(
182+
{
183+
"name": "VLLM_API_TOKEN",
184+
"valueFrom": {"secretKeyRef": {"name": "llamastack-distribution-secret", "key": "vllm-api-token"}},
185+
},
186+
)
179187

180188
if params.get("vllm_url_fixture"):
181189
vllm_url = str(request.getfixturevalue(argname=params.get("vllm_url_fixture")))
@@ -200,7 +208,14 @@ def test_with_remote_milvus(llama_stack_server_config):
200208
env_vars.append({"name": "EMBEDDING_MODEL", "value": LLS_CORE_EMBEDDING_MODEL})
201209
env_vars.append({"name": "EMBEDDING_PROVIDER_MODEL_ID", "value": LLS_CORE_EMBEDDING_PROVIDER_MODEL_ID})
202210
env_vars.append({"name": "VLLM_EMBEDDING_URL", "value": LLS_CORE_VLLM_EMBEDDING_URL})
203-
env_vars.append({"name": "VLLM_EMBEDDING_API_TOKEN", "value": LLS_CORE_VLLM_EMBEDDING_API_TOKEN})
211+
env_vars.append(
212+
{
213+
"name": "VLLM_EMBEDDING_API_TOKEN",
214+
"valueFrom": {
215+
"secretKeyRef": {"name": "llamastack-distribution-secret", "key": "vllm-embedding-api-token"}
216+
},
217+
},
218+
)
204219
env_vars.append({"name": "VLLM_EMBEDDING_MAX_TOKENS", "value": LLS_CORE_VLLM_EMBEDDING_MAX_TOKENS})
205220
env_vars.append({"name": "VLLM_EMBEDDING_TLS_VERIFY", "value": LLS_CORE_VLLM_EMBEDDING_TLS_VERIFY})
206221
elif embedding_provider == "sentence-transformers":

0 commit comments

Comments
 (0)