11import pytest
2- import os
3- from tests .llama_stack .constants import LlamaStackProviders
42from llama_stack_client import LlamaStackClient , NotFoundError
5- from llama_stack_client .types import Model
3+ from llama_stack_client .types import Model , ModelRetrieveResponse
64
75
86@pytest .mark .parametrize (
@@ -39,24 +37,25 @@ def test_models_list(
3937 assert isinstance (models , list ), "models.list() should return a list"
4038 assert len (models ) > 0 , "At least one model should be available"
4139
42- llm_model = next ((model for model in models if model .api_model_type == "llm" ), None )
40+ llm_model = next ((model for model in models if model .custom_metadata [ "model_type" ] == "llm" ), None )
4341 assert llm_model is not None , "No LLM model found in available models"
4442 assert isinstance (llm_model , Model ), "LLM model should be a Model instance"
45- assert llm_model .identifier is not None , "No identifier set in LLM model"
46- assert len (llm_model .identifier ) > 0 , "LLM model identifier should not be empty"
43+ assert llm_model .id is not None , "No identifier set in LLM model"
44+ assert len (llm_model .id ) > 0 , "LLM model identifier should not be empty"
4745
48- embedding_model = next ((model for model in models if model .api_model_type == "embedding" ), None )
46+ embedding_model = next ((model for model in models if model .custom_metadata [ "model_type" ] == "embedding" ), None )
4947 assert embedding_model is not None , "No embedding model found in available models"
5048 assert isinstance (embedding_model , Model ), "Embedding model should be a Model instance"
51- assert embedding_model .identifier is not None , "No identifier set in embedding model"
52- assert len (embedding_model .identifier ) > 0 , "Embedding model identifier should not be empty"
53- assert "embedding_dimension" in embedding_model .metadata , "embedding_dimension not found in model metadata"
54- embedding_dimension = embedding_model .metadata ["embedding_dimension" ]
49+ assert embedding_model .id is not None , "No identifier set in embedding model"
50+ assert len (embedding_model .id ) > 0 , "Embedding model identifier should not be empty"
51+ assert "embedding_dimension" in embedding_model .custom_metadata , (
52+ "embedding_dimension not found in custom_metadata"
53+ )
54+ embedding_dimension = embedding_model .custom_metadata ["embedding_dimension" ]
5555 assert embedding_dimension is not None , "No embedding_dimension set in embedding model"
56- # API returns dimension as float (e.g., 768.0) though conceptually an integer
57- assert isinstance (embedding_dimension , float ), "embedding_dimension should be a float "
56+ # API returns dimension as integer (e.g., 768)
57+ assert isinstance (embedding_dimension , int ), "embedding_dimension should be an integer "
5858 assert embedding_dimension > 0 , "embedding_dimension should be positive"
59- assert embedding_dimension .is_integer (), "embedding_dimension should be a whole number"
6059
6160 def test_models_list_structure (
6261 self ,
@@ -71,15 +70,14 @@ def test_models_list_structure(
7170 assert models is not None , "No models returned from LlamaStackClient"
7271
7372 for model in models :
74- assert hasattr (model , "identifier" ), "Model should have identifier attribute"
75- assert hasattr (model , "api_model_type" ), "Model should have api_model_type attribute"
76- assert model .identifier is not None , f"Model { model } should have a non-None identifier"
77- assert model .api_model_type in ["llm" , "embedding" ], (
78- f"Model { model .identifier } should have api_model_type 'llm' or 'embedding', "
79- f"got '{ model .api_model_type } '"
73+ assert hasattr (model , "id" ), "Model should have identifier attribute"
74+ assert hasattr (model , "custom_metadata" ), "Model should have custom_metadata attribute"
75+ assert isinstance (model .custom_metadata , dict ), "Model custom_metadata should be a dictionary"
76+ assert model .id is not None , f"Model { model } should have a non-None identifier"
77+ assert model .custom_metadata ["model_type" ] in ["llm" , "embedding" ], (
78+ f"Model { model .id } should have custom_metadata[\" model_type\" ] 'llm' or 'embedding', "
79+ f"got '{ model .custom_metadata ['model_type' ]} '"
8080 )
81- assert hasattr (model , "metadata" ), "Model should have metadata attribute"
82- assert isinstance (model .metadata , dict ), "Model metadata should be a dictionary"
8381
8482 def test_models_retrieve_existing (
8583 self ,
@@ -94,17 +92,16 @@ def test_models_retrieve_existing(
9492 assert len (models ) > 0 , "At least one model should be available"
9593
9694 test_model = models [0 ]
97- retrieved_model = unprivileged_llama_stack_client .models .retrieve (model_id = test_model .identifier )
95+ retrieved_model = unprivileged_llama_stack_client .models .retrieve (model_id = test_model .id )
9896
99- assert retrieved_model is not None , f"Model { test_model .identifier } should be retrievable"
100- assert isinstance (retrieved_model , Model ), "Retrieved model should be a Model instance"
101- assert retrieved_model .identifier == test_model .identifier , (
102- f"Retrieved model identifier '{ retrieved_model .identifier } ' "
103- f"should match requested '{ test_model .identifier } '"
97+ assert retrieved_model is not None , f"Model { test_model .id } should be retrievable"
98+ assert isinstance (retrieved_model , ModelRetrieveResponse ), "Retrieved model should be a ModelRetrieveResponse"
99+ assert retrieved_model .identifier == test_model .id , (
100+ f"Retrieved model identifier '{ retrieved_model .identifier } ' should match requested '{ test_model .id } '"
104101 )
105- assert retrieved_model .api_model_type == test_model .api_model_type , (
102+ assert retrieved_model .api_model_type == test_model .custom_metadata [ "model_type" ] , (
106103 f"Retrieved model type '{ retrieved_model .api_model_type } ' "
107- f"should match original '{ test_model .api_model_type } '"
104+ f"should match original '{ test_model .custom_metadata [ 'model_type' ] } '"
108105 )
109106
110107 def test_models_retrieve_nonexistent (
@@ -120,75 +117,3 @@ def test_models_retrieve_nonexistent(
120117
121118 with pytest .raises (NotFoundError ):
122119 unprivileged_llama_stack_client .models .retrieve (model_id = nonexistent_model_id )
123-
124- def test_models_register (
125- self ,
126- unprivileged_llama_stack_client : LlamaStackClient ,
127- ) -> None :
128- """Test registering a new model.
129-
130- Verifies that models.register() successfully registers a new model
131- and it appears in the models list.
132- """
133- inference_model = os .getenv ("LLS_CORE_INFERENCE_MODEL" )
134- assert inference_model , "LLS_CORE_INFERENCE_MODEL environment variable must be set"
135- test_model_id = f"{ inference_model } -test-register"
136-
137- response = unprivileged_llama_stack_client .models .register (
138- model_id = test_model_id ,
139- model_type = "llm" ,
140- provider_id = LlamaStackProviders .Inference .VLLM_INFERENCE ,
141- )
142- assert response is not None , "Model registration should return a response"
143-
144- registered_model_id = f"{ LlamaStackProviders .Inference .VLLM_INFERENCE .value } /{ test_model_id } "
145- try :
146- models = unprivileged_llama_stack_client .models .list ()
147- registered_model_ids = [model .identifier for model in models ]
148- assert registered_model_id in registered_model_ids , (
149- f"Registered model { registered_model_id } should appear in models list"
150- )
151- finally :
152- unprivileged_llama_stack_client .models .unregister (model_id = registered_model_id )
153-
154- def test_models_register_retrieve_unregister (
155- self ,
156- unprivileged_llama_stack_client : LlamaStackClient ,
157- ) -> None :
158- """Test complete model lifecycle: register, retrieve, and unregister.
159-
160- Verifies the full workflow of registering a model, retrieving it,
161- verifying its properties, and then unregistering it.
162- """
163- inference_model = os .getenv ("LLS_CORE_INFERENCE_MODEL" )
164- assert inference_model , "LLS_CORE_INFERENCE_MODEL environment variable must be set"
165- test_model_id = f"{ inference_model } -test-lifecycle"
166-
167- response = unprivileged_llama_stack_client .models .register (
168- model_id = test_model_id ,
169- model_type = "llm" ,
170- provider_id = LlamaStackProviders .Inference .VLLM_INFERENCE ,
171- )
172- assert response is not None , "Model registration should return a response"
173-
174- registered_model_id = f"{ LlamaStackProviders .Inference .VLLM_INFERENCE .value } /{ test_model_id } "
175- try :
176- registered_model = unprivileged_llama_stack_client .models .retrieve (model_id = registered_model_id )
177- assert registered_model is not None , f"LLM { registered_model_id } not found using models.retrieve"
178- assert isinstance (registered_model , Model ), "Retrieved model should be a Model instance"
179- expected_id_suffix = f"/{ test_model_id } "
180- assert registered_model .identifier .endswith (expected_id_suffix ), (
181- f"Model identifier '{ registered_model .identifier } ' should end with '{ expected_id_suffix } '"
182- )
183- assert registered_model .api_model_type == "llm" , (
184- f"Registered model should have api_model_type 'llm', got '{ registered_model .api_model_type } '"
185- )
186- assert registered_model .provider_id == LlamaStackProviders .Inference .VLLM_INFERENCE .value , (
187- f"Registered model provider_id should be '{ LlamaStackProviders .Inference .VLLM_INFERENCE .value } ', "
188- f"got '{ registered_model .provider_id } '"
189- )
190- finally :
191- unprivileged_llama_stack_client .models .unregister (model_id = registered_model_id )
192-
193- with pytest .raises (NotFoundError ):
194- unprivileged_llama_stack_client .models .retrieve (model_id = registered_model_id )
0 commit comments