Skip to content

Commit 2ba3ce8

Browse files
Abhinavexistsmdrxy
andauthored
fix(openai): make GPT-5 temperature validation case-insensitive (#34012)
Fixed a bug where GPT-5 temperature validation was case-sensitive, causing issues when users specified Azure deployment names or model names in uppercase (e.g., `"GPT-5-2025-01-01"`, `"GPT-5-NANO"`). The validation now correctly handles model names regardless of case. Changes made: - Updated `validate_temperature()` method in `BaseChatOpenAI` to perform case-insensitive model name comparisons - Updated `_get_encoding_model()` method to use case-insensitive checks for tiktoken encoder selection - Added comprehensive unit tests to verify case-insensitive behavior with various case combinations **Issue:** Fixes #34003 **Dependencies:** None **Test Coverage:** - All existing tests pass - New test `test_gpt_5_temperature_case_insensitive` covers uppercase, lowercase, and mixed-case model names - Tests verify both non-chat GPT-5 models (temperature removed) and chat models (temperature preserved) - Lint and format checks pass (`make lint`, `make format`) --------- Co-authored-by: Mason Daugherty <[email protected]>
1 parent 4e4e5d7 commit 2ba3ce8

File tree

2 files changed

+36
-7
lines changed
  • libs/partners/openai

2 files changed

+36
-7
lines changed

libs/partners/openai/langchain_openai/chat_models/base.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -826,14 +826,15 @@ def validate_temperature(cls, values: dict[str, Any]) -> Any:
826826
(Defaults to 1)
827827
"""
828828
model = values.get("model_name") or values.get("model") or ""
829+
model_lower = model.lower()
829830

830831
# For o1 models, set temperature=1 if not provided
831-
if model.startswith("o1") and "temperature" not in values:
832+
if model_lower.startswith("o1") and "temperature" not in values:
832833
values["temperature"] = 1
833834

834835
# For gpt-5 models, handle temperature restrictions
835836
# Note that gpt-5-chat models do support temperature
836-
if model.startswith("gpt-5") and "chat" not in model:
837+
if model_lower.startswith("gpt-5") and "chat" not in model_lower:
837838
temperature = values.get("temperature")
838839
if temperature is not None and temperature != 1:
839840
# For gpt-5 (non-chat), only temperature=1 is supported
@@ -1668,15 +1669,13 @@ def _get_encoding_model(self) -> tuple[str, tiktoken.Encoding]:
16681669
model = self.tiktoken_model_name
16691670
else:
16701671
model = self.model_name
1672+
16711673
try:
16721674
encoding = tiktoken.encoding_for_model(model)
16731675
except KeyError:
1676+
model_lower = model.lower()
16741677
encoder = "cl100k_base"
1675-
if (
1676-
self.model_name.startswith("gpt-4o")
1677-
or self.model_name.startswith("gpt-4.1")
1678-
or self.model_name.startswith("gpt-5")
1679-
):
1678+
if model_lower.startswith(("gpt-4o", "gpt-4.1", "gpt-5")):
16801679
encoder = "o200k_base"
16811680
encoding = tiktoken.get_encoding(encoder)
16821681
return model, encoding

libs/partners/openai/tests/unit_tests/chat_models/test_base.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3026,3 +3026,33 @@ def test_gpt_5_temperature(use_responses_api: bool) -> None:
30263026
messages = [HumanMessage(content="Hello")]
30273027
payload = llm._get_request_payload(messages)
30283028
assert payload["temperature"] == 0.5 # gpt-5-chat is exception
3029+
3030+
3031+
@pytest.mark.parametrize("use_responses_api", [False, True])
3032+
@pytest.mark.parametrize(
3033+
"model_name",
3034+
[
3035+
"GPT-5-NANO",
3036+
"GPT-5-2025-01-01",
3037+
"Gpt-5-Turbo",
3038+
"gPt-5-mini",
3039+
],
3040+
)
3041+
def test_gpt_5_temperature_case_insensitive(
3042+
use_responses_api: bool, model_name: str
3043+
) -> None:
3044+
llm = ChatOpenAI(
3045+
model=model_name, temperature=0.5, use_responses_api=use_responses_api
3046+
)
3047+
3048+
messages = [HumanMessage(content="Hello")]
3049+
payload = llm._get_request_payload(messages)
3050+
assert "temperature" not in payload
3051+
3052+
for chat_model in ["GPT-5-CHAT", "Gpt-5-Chat", "gpt-5-chat"]:
3053+
llm = ChatOpenAI(
3054+
model=chat_model, temperature=0.7, use_responses_api=use_responses_api
3055+
)
3056+
messages = [HumanMessage(content="Hello")]
3057+
payload = llm._get_request_payload(messages)
3058+
assert payload["temperature"] == 0.7

0 commit comments

Comments
 (0)