Skip to content

Commit f5c1332

Browse files
committed
chore(core): drop gpt-3.5-turbo from docstrings
1 parent dd63731 commit f5c1332

File tree

3 files changed

+6
-6
lines changed

3 files changed

+6
-6
lines changed

libs/core/langchain_core/caches.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -166,14 +166,14 @@ class InMemoryCache(BaseCache):
166166
# Update cache
167167
cache.update(
168168
prompt="What is the capital of France?",
169-
llm_string="model='gpt-3.5-turbo', temperature=0.1",
169+
llm_string="model='gpt-5.4-mini',
170170
return_val=[Generation(text="Paris")],
171171
)
172172
173173
# Lookup cache
174174
result = cache.lookup(
175175
prompt="What is the capital of France?",
176-
llm_string="model='gpt-3.5-turbo', temperature=0.1",
176+
llm_string="model='gpt-5.4-mini',
177177
)
178178
# result is [Generation(text="Paris")]
179179
```

libs/core/langchain_core/runnables/configurable.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -499,7 +499,7 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
499499
# When invoking the created RunnableSequence, you can pass in the
500500
# value for your ConfigurableField's id which in this case will either be
501501
# `joke` or `poem`.
502-
chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
502+
chain = prompt | ChatOpenAI(model="gpt-5.4-mini")
503503
504504
# The `with_config` method brings in the desired Prompt Runnable in your
505505
# Runnable Sequence.
@@ -525,7 +525,7 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
525525
"poem": PromptTemplate.from_template("Write a short poem about {topic}")
526526
},
527527
)
528-
chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
528+
chain = prompt | ChatOpenAI(model="gpt-5.4-mini")
529529
chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"})
530530
```
531531
"""

libs/core/langchain_core/runnables/fallbacks.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
5454
from langchain_core.chat_models.openai import ChatOpenAI
5555
from langchain_core.chat_models.anthropic import ChatAnthropic
5656
57-
model = ChatAnthropic(model="claude-3-haiku-20240307").with_fallbacks(
58-
[ChatOpenAI(model="gpt-3.5-turbo-0125")]
57+
model = ChatAnthropic(model="claude-sonnet-4-6").with_fallbacks(
58+
[ChatOpenAI(model="gpt-5.4-mini")]
5959
)
6060
# Will usually use ChatAnthropic, but fallback to ChatOpenAI
6161
# if ChatAnthropic fails.

0 commit comments

Comments
 (0)