Skip to content

Commit 02c3fa5

Browse files
authored
Merge pull request #219 from an7oine/async
Enable langgraph astream usage
2 parents d52ba1b + aa97586 commit 02c3fa5

3 files changed

Lines changed: 312 additions & 19 deletions

File tree

django_ai_assistant/helpers/assistants.py

Lines changed: 92 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,20 @@
11
import abc
22
import inspect
33
import re
4-
from typing import Annotated, Any, ClassVar, Dict, Sequence, Type, TypedDict, cast
4+
from typing import (
5+
Annotated,
6+
Any,
7+
AsyncIterable,
8+
AsyncIterator,
9+
ClassVar,
10+
Dict,
11+
Literal,
12+
Sequence,
13+
Type,
14+
TypedDict,
15+
cast,
16+
overload,
17+
)
518

619
from langchain_core.language_models import BaseChatModel
720
from langchain_core.messages import (
@@ -415,16 +428,20 @@ def get_history_aware_retriever(self) -> Runnable[dict, RetrieverOutput]:
415428
)
416429

417430
@with_cast_id
418-
def as_graph(self, thread_id: Any | None = None) -> Runnable[dict, dict]:
431+
def as_graph(
432+
self, thread_id: Any | None = None, thread: Any | None = None
433+
) -> Runnable[dict, dict]:
419434
"""Create the LangGraph graph for the assistant.\n
420435
This graph is an agent that supports chat history, tool calling, and RAG (if `has_rag=True`).\n
421436
`as_graph` uses many other methods to create the graph for the assistant.
422437
Prefer to override the other methods to customize the graph for the assistant.
423438
Only override this method if you need to customize the graph at a lower level.
424439
440+
If both arguments are `None`, an in-memory chat message history is used.
441+
425442
Args:
426443
thread_id (Any | None): The thread ID for the chat message history.
427-
If `None`, an in-memory chat message history is used.
444+
thread (Any | None): The thread object for the chat message history.
428445
429446
Returns:
430447
the compiled graph
@@ -434,21 +451,11 @@ def as_graph(self, thread_id: Any | None = None) -> Runnable[dict, dict]:
434451
llm = self.get_llm()
435452
tools = self.get_tools()
436453
llm_with_tools = llm.bind_tools(tools) if tools else llm
437-
if thread_id:
454+
if thread is None and thread_id is not None:
438455
thread = Thread.objects.get(id=thread_id)
439-
else:
440-
thread = None
441-
442-
def custom_add_messages(left: list[BaseMessage], right: list[BaseMessage]):
443-
result = add_messages(left, right) # type: ignore
444-
if thread:
445-
# Save all messages, except the initial system message:
446-
thread_messages = [m for m in result if not isinstance(m, SystemMessage)]
447-
save_django_messages(cast(list[BaseMessage], thread_messages), thread=thread)
448-
return result
449456

450457
class AgentState(TypedDict):
451-
messages: Annotated[list[AnyMessage], custom_add_messages]
458+
messages: Annotated[list[AnyMessage], add_messages]
452459
input: str | None # noqa: A003
453460
output: Any
454461

@@ -522,6 +529,10 @@ def record_response(state: AgentState):
522529
else:
523530
response = state["messages"][-1].content
524531

532+
if thread:
533+
# Save all messages, except the initial system message:
534+
thread_messages = [m for m in state["messages"] if not isinstance(m, SystemMessage)]
535+
save_django_messages(cast(list[BaseMessage], thread_messages), thread=thread)
525536
return {"output": response}
526537

527538
workflow = StateGraph(AgentState)
@@ -550,28 +561,62 @@ def record_response(state: AgentState):
550561

551562
return workflow.compile()
552563

564+
@overload
565+
def invoke(
566+
self,
567+
*args: Any,
568+
thread_id: Any | None,
569+
thread: Any | None = None,
570+
mode: Literal["invoke"] = "invoke",
571+
**kwargs: Any,
572+
) -> dict:
573+
... # pragma: no cover
574+
575+
@overload
576+
def invoke(
577+
self,
578+
*args: Any,
579+
thread_id: Any | None,
580+
thread: Any | None = None,
581+
mode: Literal["astream"],
582+
**kwargs: Any,
583+
) -> AsyncIterator[dict]:
584+
... # pragma: no cover
585+
553586
@with_cast_id
554-
def invoke(self, *args: Any, thread_id: Any | None, **kwargs: Any) -> dict:
587+
def invoke(
588+
self,
589+
*args: Any,
590+
thread_id: Any | None = None,
591+
thread: Any | None = None,
592+
mode: Literal["invoke", "astream"] = "invoke",
593+
**kwargs: Any,
594+
) -> dict | AsyncIterator[dict]:
555595
"""Invoke the assistant LangChain graph with the given arguments and keyword arguments.\n
556596
This is the lower-level method to run the assistant.\n
557597
The graph is created by the `as_graph` method.\n
558598
599+
If thread_id and thread are `None`, an in-memory chat message history is used.
600+
559601
Args:
560602
*args: Positional arguments to pass to the graph.
561603
To add a new message, use a dict like `{"input": "user message"}`.
562604
If thread already has a `HumanMessage` in the end, you can invoke without args.
563605
thread_id (Any | None): The thread ID for the chat message history.
564-
If `None`, an in-memory chat message history is used.
606+
thread (Any | None): The thread object for the chat message history.
607+
mode (invoke | astream): call named graph method
565608
**kwargs: Keyword arguments to pass to the graph.
566609
567610
Returns:
568611
dict: The output of the assistant graph,
569612
structured like `{"output": "assistant response", "history": ...}`.
570613
"""
571-
graph = self.as_graph(thread_id)
614+
graph = self.as_graph(thread_id=thread_id, thread=thread)
572615
config = kwargs.pop("config", {})
573616
config["max_concurrency"] = config.pop("max_concurrency", self.tool_max_concurrency)
574-
return graph.invoke(*args, config=config, **kwargs)
617+
if mode not in ("invoke", "astream"):
618+
raise NotImplementedError(f"mode={mode!r}")
619+
return getattr(graph, mode)(*args, config=config, **kwargs)
575620

576621
@with_cast_id
577622
def run(self, message: str, thread_id: Any | None = None, **kwargs: Any) -> Any:
@@ -595,6 +640,34 @@ def run(self, message: str, thread_id: Any | None = None, **kwargs: Any) -> Any:
595640
**kwargs,
596641
)["output"]
597642

643+
@with_cast_id
644+
async def astream(
645+
self, message: str, thread: Any | None = None, **kwargs: Any
646+
) -> AsyncIterable[Any]:
647+
"""Async-stream the assistant with the given message and thread.\n
648+
This is the higher-level method to run the assistant.\n
649+
650+
Args:
651+
message (str): The user message to pass to the assistant.
652+
thread (Any | None): The thread object for the chat message history.
653+
If `None`, an in-memory chat message history is used.
654+
**kwargs: Additional keyword arguments to pass to the graph.
655+
656+
Yields:
657+
Any: The assistant response to the user message.
658+
"""
659+
async for output, metadata in self.invoke(
660+
{
661+
"input": message,
662+
},
663+
thread=thread,
664+
mode="astream",
665+
stream_mode="messages",
666+
**kwargs,
667+
):
668+
if metadata.get("langgraph_node") == "agent" and (content := output.content):
669+
yield content
670+
598671
def _run_as_tool(self, message: str, **kwargs: Any) -> Any:
599672
return self.run(message, thread_id=None, **kwargs)
600673

Lines changed: 200 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,200 @@
1+
interactions:
2+
- request:
3+
body: '{"messages":[{"content":"You are a temperature bot. Today is 2024-06-09.","role":"system"},{"content":"What
4+
is the temperature today in Recife?","role":"user"}],"model":"gpt-4o","stream":true,"stream_options":{"include_usage":true},"temperature":1.0,"tools":[{"type":"function","function":{"name":"fetch_current_temperature","description":"Fetch
5+
the current temperature data for a location","parameters":{"properties":{"location":{"type":"string"}},"required":["location"],"type":"object"}}},{"type":"function","function":{"name":"fetch_forecast_temperature","description":"Fetch
6+
the forecast temperature data for a location","parameters":{"type":"object","properties":{"location":{"type":"string"},"dt_str":{"description":"Date
7+
in the format ''YYYY-MM-DD''","type":"string"}},"required":["location","dt_str"]}}}]}'
8+
headers:
9+
Accept:
10+
- application/json
11+
Accept-Encoding:
12+
- gzip, deflate, zstd
13+
Connection:
14+
- keep-alive
15+
Content-Length:
16+
- '812'
17+
Content-Type:
18+
- application/json
19+
Host:
20+
- api.openai.com
21+
X-Stainless-Raw-Response:
22+
- 'true'
23+
authorization:
24+
- DUMMY
25+
user-agent:
26+
- OpenAI/Python
27+
x-stainless-retry-count:
28+
- '0'
29+
method: POST
30+
uri: https://api.openai.com/v1/chat/completions
31+
response:
32+
body:
33+
string: 'data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_zuq818gZWJJlDhdrLcI5IPMm","type":"function","function":{"name":"fetch_current_temperature","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"NW7i9IqiUa8"}
34+
35+
36+
data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"2K6"}
37+
38+
39+
data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"location"}}]},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"sQRXefPeqyM0h3"}
40+
41+
42+
data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"1"}
43+
44+
45+
data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Rec"}}]},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"bbt"}
46+
47+
48+
data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ife"}}]},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"2tw"}
49+
50+
51+
data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"sf0"}
52+
53+
54+
data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null,"obfuscation":"gBbl"}
55+
56+
57+
data: {"id":"chatcmpl-DIDpzDJSqT4EW7UPZCL857JRcFTq6","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[],"usage":{"prompt_tokens":111,"completion_tokens":16,"total_tokens":127,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}},"obfuscation":"UEfMPsiTrLLjA"}
58+
59+
60+
data: [DONE]
61+
62+
63+
'
64+
headers:
65+
Connection:
66+
- keep-alive
67+
Content-Type:
68+
- text/event-stream; charset=utf-8
69+
Date: Sun, 09 Jun 2024 23:39:08 GMT
70+
Server: DUMMY
71+
Strict-Transport-Security:
72+
- max-age=31536000; includeSubDomains; preload
73+
Transfer-Encoding:
74+
- chunked
75+
X-Content-Type-Options:
76+
- nosniff
77+
access-control-expose-headers:
78+
- X-Request-ID
79+
cf-cache-status:
80+
- DYNAMIC
81+
openai-project:
82+
- proj_dummy_openai_id
83+
x-openai-proxy-wasm:
84+
- v0.1
85+
status:
86+
code: 200
87+
message: OK
88+
- request:
89+
body: '{"messages":[{"content":"You are a temperature bot. Today is 2024-06-09.","role":"system"},{"content":"What
90+
is the temperature today in Recife?","role":"user"},{"content":null,"role":"assistant","tool_calls":[{"type":"function","id":"call_zuq818gZWJJlDhdrLcI5IPMm","function":{"name":"fetch_current_temperature","arguments":"{\"location\":
91+
\"Recife\"}"}}]},{"content":"32 degrees Celsius","role":"tool","tool_call_id":"call_zuq818gZWJJlDhdrLcI5IPMm"}],"model":"gpt-4o","stream":true,"stream_options":{"include_usage":true},"temperature":1.0,"tools":[{"type":"function","function":{"name":"fetch_current_temperature","description":"Fetch
92+
the current temperature data for a location","parameters":{"properties":{"location":{"type":"string"}},"required":["location"],"type":"object"}}},{"type":"function","function":{"name":"fetch_forecast_temperature","description":"Fetch
93+
the forecast temperature data for a location","parameters":{"type":"object","properties":{"location":{"type":"string"},"dt_str":{"description":"Date
94+
in the format ''YYYY-MM-DD''","type":"string"}},"required":["location","dt_str"]}}}]}'
95+
headers:
96+
Accept:
97+
- application/json
98+
Accept-Encoding:
99+
- gzip, deflate, zstd
100+
Connection:
101+
- keep-alive
102+
Content-Length:
103+
- '1103'
104+
Content-Type:
105+
- application/json
106+
Host:
107+
- api.openai.com
108+
X-Stainless-Raw-Response:
109+
- 'true'
110+
authorization:
111+
- DUMMY
112+
cookie:
113+
- DUMMY
114+
user-agent:
115+
- OpenAI/Python
116+
x-stainless-retry-count:
117+
- '0'
118+
method: POST
119+
uri: https://api.openai.com/v1/chat/completions
120+
response:
121+
body:
122+
string: 'data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"XpqH8r6lKhOKG6"}
123+
124+
125+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"cIlWkGBpC6Lml"}
126+
127+
128+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"
129+
current"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"t2j4AdCr"}
130+
131+
132+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"
133+
temperature"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"d4F5"}
134+
135+
136+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"
137+
in"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"HMWCJkSoPRIze"}
138+
139+
140+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"
141+
Recife"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"b21LgWyaZ"}
142+
143+
144+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"
145+
is"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"si0Fdd77rKVjw"}
146+
147+
148+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"
149+
"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"sXqWNkI7XGaTUss"}
150+
151+
152+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"32"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"RrCp3ZZkst7zWH"}
153+
154+
155+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"
156+
degrees"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"SsSscPaq"}
157+
158+
159+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"
160+
Celsius"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"SbS9VfIu"}
161+
162+
163+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"uKYplWNzsKkx9s3"}
164+
165+
166+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null,"obfuscation":"doOrarXmUb"}
167+
168+
169+
data: {"id":"chatcmpl-DIDpzBVzIdaiHSEUtACeflIWDRaWo","object":"chat.completion.chunk","created":1773235467,"model":"gpt-4o-2024-08-06","service_tier":"default","system_fingerprint":"fp_c7a156cce7","choices":[],"usage":{"prompt_tokens":139,"completion_tokens":12,"total_tokens":151,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}},"obfuscation":"jW6QWGp9HDgt3"}
170+
171+
172+
data: [DONE]
173+
174+
175+
'
176+
headers:
177+
Connection:
178+
- keep-alive
179+
Content-Type:
180+
- text/event-stream; charset=utf-8
181+
Date: Sun, 09 Jun 2024 23:39:08 GMT
182+
Server: DUMMY
183+
Strict-Transport-Security:
184+
- max-age=31536000; includeSubDomains; preload
185+
Transfer-Encoding:
186+
- chunked
187+
X-Content-Type-Options:
188+
- nosniff
189+
access-control-expose-headers:
190+
- X-Request-ID
191+
cf-cache-status:
192+
- DYNAMIC
193+
openai-project:
194+
- proj_dummy_openai_id
195+
x-openai-proxy-wasm:
196+
- v0.1
197+
status:
198+
code: 200
199+
message: OK
200+
version: 1

0 commit comments

Comments
 (0)