Skip to content

Commit 8340dd6

Browse files
committed
This good for now merge it
1 parent da9f6e2 commit 8340dd6

6 files changed

Lines changed: 85 additions & 11 deletions

File tree

backend/src/flows/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
from src.utils.format import init_system_prompt
2222
from src.schemas.contexts import ContextSchema
2323
from src.schemas.entities.a2a import A2AServers
24+
from src.utils.middleware import add_ai_message_metadata, pii_middleware
2425

2526

2627
async def add_memories_to_system():
@@ -51,7 +52,7 @@ def graph_builder(
5152
subagents: list[SubAgent] = [],
5253
prompt: str = "You are a helpful assistant.",
5354
model: str = "openai:gpt-5-nano",
54-
context_schema: Type[Any] | None = None,
55+
context_schema: Type[ContextSchema] | None = None,
5556
checkpointer: BaseCheckpointSaver | None = None,
5657
store: BaseStore | None = None,
5758
graph_id: Literal[
@@ -75,6 +76,7 @@ def graph_builder(
7576
system_prompt=prompt,
7677
checkpointer=checkpointer,
7778
context_schema=context_schema,
79+
middleware=[add_ai_message_metadata] + pii_middleware(),
7880
store=store,
7981
)
8082
return deep_agent

backend/src/schemas/contexts/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,5 @@
33

44
@dataclass
55
class ContextSchema:
6-
user_id: str
6+
model: str = None
7+
user_id: str = None

backend/src/utils/middleware.py

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
from langchain.agents.middleware import after_model
2+
from langchain.agents import AgentState
3+
from langchain_core.messages import AIMessage
4+
from langgraph.runtime import Runtime
5+
from src.schemas.contexts import ContextSchema
6+
from langchain.agents.middleware import PIIMiddleware
7+
8+
@after_model
9+
def add_ai_message_metadata(state: AgentState, runtime: Runtime[ContextSchema]) -> dict | None:
10+
"""Attach AI message metadata to final response."""
11+
if state["messages"]:
12+
last_msg = state["messages"][-1]
13+
if isinstance(last_msg, AIMessage) and not last_msg.tool_calls:
14+
last_msg.model = runtime.context.model
15+
return None
16+
17+
def pii_middleware() -> dict | None:
18+
return [
19+
# Redact email addresses
20+
# PIIMiddleware(
21+
# "email",
22+
# strategy="redact",
23+
# apply_to_input=True,
24+
# ),
25+
# Mask credit card numbers
26+
PIIMiddleware(
27+
"credit_card",
28+
strategy="mask",
29+
apply_to_input=True,
30+
),
31+
# Block API keys - raise error if detected
32+
PIIMiddleware(
33+
"api_key",
34+
detector=r"sk-[A-Za-z0-9]+",
35+
strategy="block",
36+
apply_to_input=True,
37+
),
38+
]

backend/src/utils/stream.py

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from langchain.agents.middleware import PIIDetectionError
12
import ujson
23
from langchain_core.language_models import BaseChatModel
34
from langchain_core.runnables import RunnableConfig
@@ -7,16 +8,16 @@
78
from langgraph.types import StreamMode
89
from deepagents import SubAgent
910

11+
from src.schemas.contexts import ContextSchema
1012
from src.contexts.service import ServiceContext
11-
from src.schemas.entities import LLMInput, LLMRequest
13+
from src.schemas.entities import LLMInput
1214
from src.constants import APP_LOG_LEVEL
1315
from src.flows import construct_agent
1416
from src.services.db import get_checkpoint_db
1517
from src.utils.messages import from_message_to_dict
1618
from langchain_core.messages import (
17-
AIMessage,
1819
AIMessageChunk,
19-
BaseMessage,
20+
HumanMessage,
2021
ToolMessage,
2122
)
2223
from src.utils.logger import log_to_file, logger
@@ -189,9 +190,9 @@ async def stream_generator(
189190
{"messages": input.messages},
190191
stream_mode=["messages", "values"],
191192
config=config,
192-
context=None
193+
context=ContextSchema(model=agent.model)
193194
):
194-
# Serialize and yield each chunk as SSEq
195+
# Serialize and yield each chunk as SSE
195196
stream_chunk = handle_multi_mode(chunk)
196197
if stream_chunk:
197198
stream_type = stream_chunk[0]
@@ -202,10 +203,16 @@ async def stream_generator(
202203
log_to_file(str(data), agent.model) and APP_LOG_LEVEL == "DEBUG"
203204
logger.debug(f"data: {str(data)}")
204205
yield f"data: {data}\n\n"
206+
except PIIDetectionError as e:
207+
# Yield error as SSE if streaming fails
208+
logger.warning(f"Sensitive data detected in the query: {e}")
209+
# raise HTTPException(status_code=500, detail=str(e))
210+
error_msg = ujson.dumps(("error", str(e)))
211+
yield f"data: {error_msg}\n\n"
205212

206213
except Exception as e:
207214
# Yield error as SSE if streaming fails
208-
logger.exception("Error in event_generator: %s", e)
215+
logger.exception("Error in stream_generator: %s", e)
209216
# raise HTTPException(status_code=500, detail=str(e))
210217
error_msg = ujson.dumps(("error", str(e)))
211218
yield f"data: {error_msg}\n\n"
@@ -214,12 +221,20 @@ async def stream_generator(
214221
final_state = await agent.graph.aget_state(config)
215222
configurable = final_state.config.get("configurable", {})
216223
messages = final_state.values.get('messages', [])
224+
225+
# Get the last HumanMessage
226+
last_human_message = None
227+
for message in reversed(messages):
228+
if isinstance(message, HumanMessage):
229+
last_human_message = message
230+
break
231+
217232
await service_context.thread_service.update(
218233
thread_id=configurable.get("thread_id"),
219234
data={
220235
"thread_id": configurable.get("thread_id"),
221236
"checkpoint_id": configurable.get("checkpoint_id"),
222-
"messages": [messages[-1].model_dump()],
237+
"messages": [last_human_message.model_dump()] if last_human_message else [],
223238
"files": files_map,
224239
"updated_at": get_time(),
225240
}

frontend/src/components/lists/ChatMessages.tsx

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ import SearchEngineTool from "../tools/SearchEngine";
1212
import ChartRenderWidget from "../tools/ChartRenderWidget";
1313
import CopyTextButton from "../buttons/CopyTextButton";
1414
import FileViewer from "../viewers/FileViewer";
15+
import { latestHumanMessage } from "@/lib/utils/message";
1516

1617
const MAX_LENGTH = 1000;
1718

@@ -62,9 +63,11 @@ function ToolAction({
6263
export function Message({
6364
message,
6465
isLatest = false,
66+
messages,
6567
}: {
6668
message: any;
6769
isLatest?: boolean;
70+
messages: any[];
6871
}) {
6972
const ICON_SIZE = 4;
7073
const [isEditing, setIsEditing] = useState(false);
@@ -256,14 +259,14 @@ export function Message({
256259

257260
<div className="flex items-center gap-2">
258261
<button className="text-sm text-muted-foreground">
259-
{message.model}
262+
{message.model || latestHumanMessage(messages)?.model || "Unknown model"}
260263
</button>
261264

262265
{isLatest && streamingRate?.rate && (
263266
<span
264267
className={`text-sm text-muted-foreground/70 ${loading ? "animate-pulse" : ""}`}
265268
>
266-
{streamingRate.rate} tok/s
269+
{streamingRate.rate} tok/s{streamingRate.count} tokens
267270
</span>
268271
)}
269272
</div>
@@ -315,6 +318,7 @@ const ChatMessages = ({ messages }: { messages: any[] }) => {
315318
key={message.id}
316319
message={message}
317320
isLatest={index === messages.length - 1}
321+
messages={messages}
318322
/>
319323
))
320324
) : (

frontend/src/lib/utils/message.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
2+
3+
export function latestHumanMessage(messages: any[] | undefined | null) {
4+
if (!Array.isArray(messages) || messages.length === 0) {
5+
return null;
6+
}
7+
for (let i = messages.length - 1; i >= 0; i--) {
8+
const msg = messages[i];
9+
if (msg && msg.type === "human") {
10+
return msg;
11+
}
12+
}
13+
return null;
14+
}

0 commit comments

Comments
 (0)