Skip to content

Commit 5071da1

Browse files
committed
core: deprecate problematic dict() method
1 parent 9c21f83 commit 5071da1

File tree

4 files changed

+78
-38
lines changed

4 files changed

+78
-38
lines changed

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 23 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33
from __future__ import annotations
44

55
import asyncio
6+
import builtins
67
import inspect
78
import json
8-
import typing
99
from abc import ABC, abstractmethod
1010
from collections.abc import AsyncIterator, Callable, Iterator, Sequence
1111
from functools import cached_property
@@ -15,6 +15,7 @@
1515
from pydantic import BaseModel, ConfigDict, Field
1616
from typing_extensions import override
1717

18+
from langchain_core._api import deprecated
1819
from langchain_core.caches import BaseCache
1920
from langchain_core.callbacks import (
2021
AsyncCallbackManager,
@@ -357,7 +358,7 @@ class BaseChatModel(BaseLanguageModel[AIMessage], ABC):
357358
)
358359

359360
@cached_property
360-
def _serialized(self) -> dict[str, Any]:
361+
def _serialized(self) -> builtins.dict[str, Any]:
361362
return dumpd(self)
362363

363364
# --- Runnable methods ---
@@ -730,7 +731,10 @@ async def astream(
730731

731732
# --- Custom methods ---
732733

733-
def _combine_llm_outputs(self, llm_outputs: list[dict | None]) -> dict: # noqa: ARG002
734+
def _combine_llm_outputs(
735+
self,
736+
llm_outputs: list[builtins.dict | None], # noqa: ARG002
737+
) -> builtins.dict:
734738
return {}
735739

736740
def _convert_cached_generations(self, cache_val: list) -> list[ChatGeneration]:
@@ -776,8 +780,8 @@ def _get_invocation_params(
776780
self,
777781
stop: list[str] | None = None,
778782
**kwargs: Any,
779-
) -> dict:
780-
params = self.dict()
783+
) -> builtins.dict:
784+
params = self.asdict()
781785
params["stop"] = stop
782786
return {**params, **kwargs}
783787

@@ -842,7 +846,7 @@ def generate(
842846
callbacks: Callbacks = None,
843847
*,
844848
tags: list[str] | None = None,
845-
metadata: dict[str, Any] | None = None,
849+
metadata: builtins.dict[str, Any] | None = None,
846850
run_name: str | None = None,
847851
run_id: uuid.UUID | None = None,
848852
**kwargs: Any,
@@ -965,7 +969,7 @@ async def agenerate(
965969
callbacks: Callbacks = None,
966970
*,
967971
tags: list[str] | None = None,
968-
metadata: dict[str, Any] | None = None,
972+
metadata: builtins.dict[str, Any] | None = None,
969973
run_name: str | None = None,
970974
run_id: uuid.UUID | None = None,
971975
**kwargs: Any,
@@ -1491,18 +1495,23 @@ async def _call_async(
14911495
def _llm_type(self) -> str:
14921496
"""Return type of chat model."""
14931497

1494-
@override
1495-
def dict(self, **kwargs: Any) -> dict:
1498+
@deprecated("1.0.2", alternative="asdict", removal="2.0")
1499+
def dict(self, **_kwargs: Any) -> builtins.dict[str, Any]:
1500+
"""DEPRECATED - use `asdict()` instead.
1501+
1502+
Return a dictionary of the LLM.
1503+
"""
1504+
return self.asdict()
1505+
1506+
def asdict(self) -> builtins.dict[str, Any]:
14961507
"""Return a dictionary of the LLM."""
14971508
starter_dict = dict(self._identifying_params)
14981509
starter_dict["_type"] = self._llm_type
14991510
return starter_dict
15001511

15011512
def bind_tools(
15021513
self,
1503-
tools: Sequence[
1504-
typing.Dict[str, Any] | type | Callable | BaseTool # noqa: UP006
1505-
],
1514+
tools: Sequence[builtins.dict[str, Any] | type | Callable | BaseTool],
15061515
*,
15071516
tool_choice: str | None = None,
15081517
**kwargs: Any,
@@ -1521,11 +1530,11 @@ def bind_tools(
15211530

15221531
def with_structured_output(
15231532
self,
1524-
schema: typing.Dict | type, # noqa: UP006
1533+
schema: builtins.dict | type,
15251534
*,
15261535
include_raw: bool = False,
15271536
**kwargs: Any,
1528-
) -> Runnable[LanguageModelInput, typing.Dict | BaseModel]: # noqa: UP006
1537+
) -> Runnable[LanguageModelInput, builtins.dict | BaseModel]:
15291538
"""Model wrapper that returns outputs formatted to match the given schema.
15301539
15311540
Args:

libs/core/langchain_core/language_models/llms.py

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from __future__ import annotations
77

88
import asyncio
9+
import builtins
910
import functools
1011
import inspect
1112
import json
@@ -32,6 +33,7 @@
3233
)
3334
from typing_extensions import override
3435

36+
from langchain_core._api import deprecated
3537
from langchain_core.caches import BaseCache
3638
from langchain_core.callbacks import (
3739
AsyncCallbackManager,
@@ -298,7 +300,7 @@ class BaseLLM(BaseLanguageModel[str], ABC):
298300
)
299301

300302
@functools.cached_property
301-
def _serialized(self) -> dict[str, Any]:
303+
def _serialized(self) -> builtins.dict[str, Any]:
302304
return dumpd(self)
303305

304306
# --- Runnable methods ---
@@ -514,7 +516,7 @@ def stream(
514516
else:
515517
prompt = self._convert_input(input).to_string()
516518
config = ensure_config(config)
517-
params = self.dict()
519+
params = self.asdict()
518520
params["stop"] = stop
519521
params = {**params, **kwargs}
520522
options = {"stop": stop}
@@ -584,7 +586,7 @@ async def astream(
584586

585587
prompt = self._convert_input(input).to_string()
586588
config = ensure_config(config)
587-
params = self.dict()
589+
params = self.asdict()
588590
params["stop"] = stop
589591
params = {**params, **kwargs}
590592
options = {"stop": stop}
@@ -839,7 +841,7 @@ def generate(
839841
callbacks: Callbacks | list[Callbacks] | None = None,
840842
*,
841843
tags: list[str] | list[list[str]] | None = None,
842-
metadata: dict[str, Any] | list[dict[str, Any]] | None = None,
844+
metadata: builtins.dict[str, Any] | list[builtins.dict[str, Any]] | None = None,
843845
run_name: str | list[str] | None = None,
844846
run_id: uuid.UUID | list[uuid.UUID | None] | None = None,
845847
**kwargs: Any,
@@ -972,7 +974,7 @@ def generate(
972974
] * len(prompts)
973975
run_name_list = [cast("str | None", run_name)] * len(prompts)
974976
run_ids_list = self._get_run_ids_list(run_id, prompts)
975-
params = self.dict()
977+
params = self.asdict()
976978
params["stop"] = stop
977979
options = {"stop": stop}
978980
(
@@ -1114,7 +1116,7 @@ async def agenerate(
11141116
callbacks: Callbacks | list[Callbacks] | None = None,
11151117
*,
11161118
tags: list[str] | list[list[str]] | None = None,
1117-
metadata: dict[str, Any] | list[dict[str, Any]] | None = None,
1119+
metadata: builtins.dict[str, Any] | list[builtins.dict[str, Any]] | None = None,
11181120
run_name: str | list[str] | None = None,
11191121
run_id: uuid.UUID | list[uuid.UUID | None] | None = None,
11201122
**kwargs: Any,
@@ -1236,7 +1238,7 @@ async def agenerate(
12361238
] * len(prompts)
12371239
run_name_list = [cast("str | None", run_name)] * len(prompts)
12381240
run_ids_list = self._get_run_ids_list(run_id, prompts)
1239-
params = self.dict()
1241+
params = self.asdict()
12401242
params["stop"] = stop
12411243
options = {"stop": stop}
12421244
(
@@ -1328,7 +1330,7 @@ async def _call_async(
13281330
callbacks: Callbacks = None,
13291331
*,
13301332
tags: list[str] | None = None,
1331-
metadata: dict[str, Any] | None = None,
1333+
metadata: builtins.dict[str, Any] | None = None,
13321334
**kwargs: Any,
13331335
) -> str:
13341336
"""Check Cache and run the LLM on the given prompt and input."""
@@ -1352,8 +1354,15 @@ def __str__(self) -> str:
13521354
def _llm_type(self) -> str:
13531355
"""Return type of llm."""
13541356

1355-
@override
1356-
def dict(self, **kwargs: Any) -> dict:
1357+
@deprecated("1.0.2", alternative="asdict", removal="2.0")
1358+
def dict(self, **_kwargs: Any) -> builtins.dict[str, Any]:
1359+
"""DEPRECATED - use `asdict()` instead.
1360+
1361+
Return a dictionary of the LLM.
1362+
"""
1363+
return self.asdict()
1364+
1365+
def asdict(self) -> builtins.dict[str, Any]:
13571366
"""Return a dictionary of the LLM."""
13581367
starter_dict = dict(self._identifying_params)
13591368
starter_dict["_type"] = self._llm_type
@@ -1380,7 +1389,7 @@ def save(self, file_path: Path | str) -> None:
13801389
directory_path.mkdir(parents=True, exist_ok=True)
13811390

13821391
# Fetch dictionary to save
1383-
prompt_dict = self.dict()
1392+
prompt_dict = self.asdict()
13841393

13851394
if save_path.suffix == ".json":
13861395
with save_path.open("w", encoding="utf-8") as f:

libs/core/langchain_core/output_parsers/base.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from __future__ import annotations
44

5+
import builtins
56
import contextlib
67
from abc import ABC, abstractmethod
78
from typing import (
@@ -13,6 +14,7 @@
1314

1415
from typing_extensions import override
1516

17+
from langchain_core._api import deprecated
1618
from langchain_core.language_models import LanguageModelOutput
1719
from langchain_core.messages import AnyMessage, BaseMessage
1820
from langchain_core.outputs import ChatGeneration, Generation
@@ -328,7 +330,12 @@ def _type(self) -> str:
328330
)
329331
raise NotImplementedError(msg)
330332

331-
def dict(self, **kwargs: Any) -> dict:
333+
@deprecated("1.0.2", alternative="asdict", removal="2.0")
334+
@override
335+
def dict(self, **kwargs: Any) -> builtins.dict[str, Any]:
336+
return self.asdict()
337+
338+
def asdict(self, **kwargs: Any) -> builtins.dict[str, Any]:
332339
"""Return dictionary representation of output parser."""
333340
output_parser_dict = super().model_dump(**kwargs)
334341
with contextlib.suppress(NotImplementedError):

libs/core/langchain_core/prompts/base.py

Lines changed: 27 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@
22

33
from __future__ import annotations
44

5+
import builtins
56
import contextlib
67
import json
7-
import typing
88
from abc import ABC, abstractmethod
99
from collections.abc import Mapping
1010
from functools import cached_property
@@ -20,6 +20,7 @@
2020
from pydantic import BaseModel, ConfigDict, Field, model_validator
2121
from typing_extensions import Self, override
2222

23+
from langchain_core._api import deprecated
2324
from langchain_core.exceptions import ErrorCode, create_message
2425
from langchain_core.load import dumpd
2526
from langchain_core.output_parsers.base import BaseOutputParser
@@ -56,7 +57,7 @@ class BasePromptTemplate(
5657
5758
These variables are auto inferred from the prompt and user need not provide them.
5859
"""
59-
input_types: typing.Dict[str, Any] = Field(default_factory=dict, exclude=True) # noqa: UP006
60+
input_types: builtins.dict[str, Any] = Field(default_factory=dict, exclude=True)
6061
"""A dictionary of the types of the variables the prompt template expects.
6162
6263
If not provided, all variables are assumed to be strings.
@@ -69,7 +70,7 @@ class BasePromptTemplate(
6970
Partial variables populate the template so that you don't need to pass them in every
7071
time you call the prompt.
7172
"""
72-
metadata: typing.Dict[str, Any] | None = None # noqa: UP006
73+
metadata: builtins.dict[str, Any] | None = None
7374
"""Metadata to be used for tracing."""
7475
tags: list[str] | None = None
7576
"""Tags to be used for tracing."""
@@ -121,7 +122,7 @@ def is_lc_serializable(cls) -> bool:
121122
)
122123

123124
@cached_property
124-
def _serialized(self) -> dict[str, Any]:
125+
def _serialized(self) -> builtins.dict[str, Any]:
125126
return dumpd(self)
126127

127128
@property
@@ -152,7 +153,7 @@ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseMod
152153
field_definitions={**required_input_variables, **optional_input_variables},
153154
)
154155

155-
def _validate_input(self, inner_input: Any) -> dict:
156+
def _validate_input(self, inner_input: Any) -> builtins.dict:
156157
if not isinstance(inner_input, dict):
157158
if len(self.input_variables) == 1:
158159
var_name = self.input_variables[0]
@@ -186,19 +187,23 @@ def _validate_input(self, inner_input: Any) -> dict:
186187
)
187188
return inner_input
188189

189-
def _format_prompt_with_error_handling(self, inner_input: dict) -> PromptValue:
190+
def _format_prompt_with_error_handling(
191+
self,
192+
inner_input: builtins.dict,
193+
) -> PromptValue:
190194
inner_input_ = self._validate_input(inner_input)
191195
return self.format_prompt(**inner_input_)
192196

193197
async def _aformat_prompt_with_error_handling(
194-
self, inner_input: dict
198+
self,
199+
inner_input: builtins.dict,
195200
) -> PromptValue:
196201
inner_input_ = self._validate_input(inner_input)
197202
return await self.aformat_prompt(**inner_input_)
198203

199204
@override
200205
def invoke(
201-
self, input: dict, config: RunnableConfig | None = None, **kwargs: Any
206+
self, input: builtins.dict, config: RunnableConfig | None = None, **kwargs: Any
202207
) -> PromptValue:
203208
"""Invoke the prompt.
204209
@@ -224,7 +229,7 @@ def invoke(
224229

225230
@override
226231
async def ainvoke(
227-
self, input: dict, config: RunnableConfig | None = None, **kwargs: Any
232+
self, input: builtins.dict, config: RunnableConfig | None = None, **kwargs: Any
228233
) -> PromptValue:
229234
"""Async invoke the prompt.
230235
@@ -286,7 +291,9 @@ def partial(self, **kwargs: str | Callable[[], str]) -> BasePromptTemplate:
286291
prompt_dict["partial_variables"] = {**self.partial_variables, **kwargs}
287292
return type(self)(**prompt_dict)
288293

289-
def _merge_partial_and_user_variables(self, **kwargs: Any) -> dict[str, Any]:
294+
def _merge_partial_and_user_variables(
295+
self, **kwargs: Any
296+
) -> builtins.dict[str, Any]:
290297
# Get partial params:
291298
partial_kwargs = {
292299
k: v if not callable(v) else v() for k, v in self.partial_variables.items()
@@ -330,7 +337,15 @@ def _prompt_type(self) -> str:
330337
"""Return the prompt type key."""
331338
raise NotImplementedError
332339

333-
def dict(self, **kwargs: Any) -> dict:
340+
@deprecated("1.0.2", alternative="asdict", removal="2.0")
341+
def dict(self, **kwargs: Any) -> builtins.dict[str, Any]:
342+
"""DEPRECATED - use `asdict()` instead.
343+
344+
Return a dictionary of the LLM.
345+
"""
346+
return self.asdict(**kwargs)
347+
348+
def asdict(self, **kwargs: Any) -> builtins.dict[str, Any]:
334349
"""Return dictionary representation of prompt.
335350
336351
Args:
@@ -365,7 +380,7 @@ def save(self, file_path: Path | str) -> None:
365380
raise ValueError(msg)
366381

367382
# Fetch dictionary to save
368-
prompt_dict = self.dict()
383+
prompt_dict = self.asdict()
369384
if "_type" not in prompt_dict:
370385
msg = f"Prompt {self} does not support saving."
371386
raise NotImplementedError(msg)

0 commit comments

Comments
 (0)