|
23 | 23 | def mock_llm() -> Mock: |
24 | 24 | llm = Mock(spec=HuggingFaceEndpoint) |
25 | 25 | llm.inference_server_url = "test endpoint url" |
| 26 | + llm.temperature = 0.7 |
| 27 | + llm.max_new_tokens = 512 |
| 28 | + llm.top_p = 0.9 |
| 29 | + llm.seed = 42 |
| 30 | + llm.streaming = True |
| 31 | + llm.repetition_penalty = 1.1 |
| 32 | + llm.stop_sequences = ["</s>", "<|end|>"] |
| 33 | + llm.model_kwargs = {"do_sample": True, "top_k": 50} |
| 34 | + llm.server_kwargs = {"timeout": 120} |
| 35 | + llm.repo_id = "test/model" |
| 36 | + llm.model = "test/model" |
26 | 37 | return llm |
27 | 38 |
|
28 | 39 |
|
@@ -209,3 +220,108 @@ def test_bind_tools(chat_hugging_face: Any) -> None: |
209 | 220 | _, kwargs = mock_super_bind.call_args |
210 | 221 | assert kwargs["tools"] == tools |
211 | 222 | assert kwargs["tool_choice"] == "auto" |
| 223 | + |
| 224 | + |
| 225 | +def test_property_inheritance_integration(chat_hugging_face: Any) -> None: |
| 226 | + """Test that ChatHuggingFace inherits params from LLM object.""" |
| 227 | + assert getattr(chat_hugging_face, "temperature", None) == 0.7 |
| 228 | + assert getattr(chat_hugging_face, "max_tokens", None) == 512 |
| 229 | + assert getattr(chat_hugging_face, "top_p", None) == 0.9 |
| 230 | + assert getattr(chat_hugging_face, "streaming", None) is True |
| 231 | + |
| 232 | + |
| 233 | +def test_default_params_includes_inherited_values(chat_hugging_face: Any) -> None: |
| 234 | + """Test that _default_params includes inherited max_tokens from max_new_tokens.""" |
| 235 | + params = chat_hugging_face._default_params |
| 236 | + assert params["max_tokens"] == 512 # inherited from LLM's max_new_tokens |
| 237 | + assert params["temperature"] == 0.7 # inherited from LLM's temperature |
| 238 | + assert params["stream"] is True # inherited from LLM's streaming |
| 239 | + |
| 240 | + |
| 241 | +def test_create_message_dicts_includes_inherited_params(chat_hugging_face: Any) -> None: |
| 242 | + """Test that _create_message_dicts includes inherited parameters in API call.""" |
| 243 | + messages = [HumanMessage(content="test message")] |
| 244 | + message_dicts, params = chat_hugging_face._create_message_dicts(messages, None) |
| 245 | + |
| 246 | + # Verify inherited parameters are included |
| 247 | + assert params["max_tokens"] == 512 |
| 248 | + assert params["temperature"] == 0.7 |
| 249 | + assert params["stream"] is True |
| 250 | + |
| 251 | + # Verify message conversion |
| 252 | + assert len(message_dicts) == 1 |
| 253 | + assert message_dicts[0]["role"] == "user" |
| 254 | + assert message_dicts[0]["content"] == "test message" |
| 255 | + |
| 256 | + |
| 257 | +def test_model_kwargs_inheritance(mock_llm: Any) -> None: |
| 258 | + """Test that model_kwargs are inherited when not explicitly set.""" |
| 259 | + with patch( |
| 260 | + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" |
| 261 | + ): |
| 262 | + chat = ChatHuggingFace(llm=mock_llm) |
| 263 | + assert chat.model_kwargs == {"do_sample": True, "top_k": 50} |
| 264 | + |
| 265 | + |
| 266 | +def test_huggingface_endpoint_specific_inheritance(mock_llm: Any) -> None: |
| 267 | + """Test HuggingFaceEndpoint specific parameter inheritance.""" |
| 268 | + with ( |
| 269 | + patch( |
| 270 | + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" |
| 271 | + ), |
| 272 | + patch( |
| 273 | + "langchain_huggingface.chat_models.huggingface._is_huggingface_endpoint", |
| 274 | + return_value=True, |
| 275 | + ), |
| 276 | + ): |
| 277 | + chat = ChatHuggingFace(llm=mock_llm) |
| 278 | + assert ( |
| 279 | + getattr(chat, "frequency_penalty", None) == 1.1 |
| 280 | + ) # from repetition_penalty |
| 281 | + |
| 282 | + |
| 283 | +def test_parameter_precedence_explicit_over_inherited(mock_llm: Any) -> None: |
| 284 | + """Test that explicitly set parameters take precedence over inherited ones.""" |
| 285 | + with patch( |
| 286 | + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" |
| 287 | + ): |
| 288 | + # Explicitly set max_tokens to override inheritance |
| 289 | + chat = ChatHuggingFace(llm=mock_llm, max_tokens=256, temperature=0.5) |
| 290 | + assert chat.max_tokens == 256 # explicit value, not inherited 512 |
| 291 | + assert chat.temperature == 0.5 # explicit value, not inherited 0.7 |
| 292 | + |
| 293 | + |
| 294 | +def test_inheritance_with_no_llm_properties(mock_llm: Any) -> None: |
| 295 | + """Test inheritance when LLM doesn't have expected properties.""" |
| 296 | + # Remove some properties from mock |
| 297 | + del mock_llm.temperature |
| 298 | + del mock_llm.top_p |
| 299 | + |
| 300 | + with patch( |
| 301 | + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" |
| 302 | + ): |
| 303 | + chat = ChatHuggingFace(llm=mock_llm) |
| 304 | + # Should still inherit available properties |
| 305 | + assert chat.max_tokens == 512 # max_new_tokens still available |
| 306 | + # Missing properties should remain None/default |
| 307 | + assert getattr(chat, "temperature", None) is None |
| 308 | + assert getattr(chat, "top_p", None) is None |
| 309 | + |
| 310 | + |
| 311 | +def test_inheritance_with_empty_llm() -> None: |
| 312 | + """Test that inheritance handles LLM with no relevant attributes gracefully.""" |
| 313 | + with patch( |
| 314 | + "langchain_huggingface.chat_models.huggingface.ChatHuggingFace._resolve_model_id" |
| 315 | + ): |
| 316 | + # Create a minimal mock LLM that passes validation but has no |
| 317 | + # inheritance attributes |
| 318 | + empty_llm = Mock(spec=HuggingFaceEndpoint) |
| 319 | + empty_llm.repo_id = "test/model" |
| 320 | + empty_llm.model = "test/model" |
| 321 | + # Mock doesn't have the inheritance attributes by default |
| 322 | + |
| 323 | + chat = ChatHuggingFace(llm=empty_llm) |
| 324 | + # Properties should remain at their default values when LLM has no |
| 325 | + # relevant attrs |
| 326 | + assert chat.max_tokens is None |
| 327 | + assert chat.temperature is None |
0 commit comments