Skip to content

Commit d23ba31

Browse files
authored
feat: map conversational input to attachments for nodes [JAR-9193] (#535)
1 parent 487659a commit d23ba31

10 files changed

Lines changed: 576 additions & 9 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.5.70"
3+
version = "0.5.71"
44
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"

src/uipath_langchain/agent/react/init_node.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99
from .job_attachments import (
1010
get_job_attachments,
11+
parse_attachments_from_conversation_messages,
1112
)
1213
from .types import AgentSettings
1314

@@ -43,6 +44,12 @@ def graph_state_init(state: Any) -> Any:
4344
job_attachments_dict = {
4445
str(att.id): att for att in job_attachments if att.id is not None
4546
}
47+
# Merge attachments from preserved messages for conversational agents
48+
if is_conversational:
49+
message_attachments = parse_attachments_from_conversation_messages(
50+
preserved_messages
51+
)
52+
job_attachments_dict.update(message_attachments)
4653

4754
return {
4855
"messages": resolved_messages,

src/uipath_langchain/agent/react/job_attachments.py

Lines changed: 43 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,10 @@
22

33
import copy
44
import uuid
5-
from typing import Any
5+
from typing import Any, Sequence
66

77
from jsonpath_ng import parse # type: ignore[import-untyped]
8+
from langchain_core.messages import BaseMessage, HumanMessage
89
from pydantic import BaseModel
910
from uipath.platform.attachments import Attachment
1011

@@ -125,3 +126,44 @@ def _create_job_attachment_error_message(attachment_id_str: str) -> str:
125126
f"Try invoking the tool again and please make sure that you pass "
126127
f"valid JobAttachment IDs associated with existing JobAttachments in the current context."
127128
)
129+
130+
131+
def parse_attachments_from_conversation_messages(
132+
messages: Sequence[BaseMessage],
133+
) -> dict[str, Attachment]:
134+
"""Parse attachments from HumanMessage additional_kwargs.
135+
136+
Extracts attachment information from HumanMessages where additional_kwargs
137+
contains an 'attachments' list with attachment details.
138+
139+
Args:
140+
messages: Sequence of messages to parse
141+
142+
Returns:
143+
Dictionary mapping attachment ID to Attachment objects
144+
"""
145+
attachments: dict[str, Attachment] = {}
146+
147+
for message in messages:
148+
if not isinstance(message, HumanMessage):
149+
continue
150+
151+
kwargs = getattr(message, "additional_kwargs", None)
152+
if not kwargs:
153+
continue
154+
155+
# Handle attachments list in additional_kwargs
156+
attachment_list = kwargs.get("attachments", [])
157+
for att in attachment_list:
158+
id = att.get("id")
159+
full_name = att.get("full_name")
160+
mime_type = att.get("mime_type")
161+
162+
if id and full_name:
163+
attachments[str(id)] = Attachment(
164+
id=id,
165+
full_name=full_name,
166+
mime_type=mime_type,
167+
)
168+
169+
return attachments

src/uipath_langchain/agent/tools/internal_tools/analyze_files_tool.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,10 @@ def create_analyze_file_tool(
5151
input_model = create_model(resource.input_schema)
5252
output_model = create_model(resource.output_schema)
5353

54+
# Disable streaming so for conversational loops, the internal LLM call doesn't leak
55+
# AIMessageChunk events into the graph stream.
56+
non_streaming_llm = llm.model_copy(update={"disable_streaming": True})
57+
5458
@mockable(
5559
name=resource.name,
5660
description=resource.description,
@@ -81,7 +85,7 @@ async def tool_fn(**kwargs: Any):
8185
cast(AnyMessage, human_message_with_files),
8286
]
8387
config = var_child_runnable_config.get(None)
84-
result = await llm.ainvoke(messages, config=config)
88+
result = await non_streaming_llm.ainvoke(messages, config=config)
8589

8690
analysis_result = extract_text_content(result)
8791
return analysis_result

src/uipath_langchain/runtime/messages.py

Lines changed: 57 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import asyncio
22
import json
33
import logging
4+
import uuid
45
from datetime import datetime, timezone
56
from typing import Any, cast
67

@@ -28,6 +29,7 @@
2829
UiPathConversationToolCallEndEvent,
2930
UiPathConversationToolCallEvent,
3031
UiPathConversationToolCallStartEvent,
32+
UiPathExternalValue,
3133
UiPathInlineValue,
3234
)
3335
from uipath.runtime import UiPathRuntimeStorageProtocol
@@ -90,7 +92,6 @@ def map_messages(self, messages: list[Any]) -> list[Any]:
9092
return self._map_messages_internal(
9193
cast(list[UiPathConversationMessage], messages)
9294
)
93-
9495
# Case3: List[dict] -> parse to List[UiPathConversationMessage]
9596
if isinstance(first, dict):
9697
try:
@@ -118,9 +119,9 @@ def _map_messages_internal(
118119

119120
for uipath_message in messages:
120121
content_blocks: list[ContentBlock] = []
122+
attachments: list[dict[str, Any]] = []
121123

122124
# Convert content_parts to content_blocks
123-
# TODO: Convert file-attachment content-parts to content_blocks as well
124125
if uipath_message.content_parts:
125126
for uipath_content_part in uipath_message.content_parts:
126127
data = uipath_content_part.data
@@ -134,13 +135,36 @@ def _map_messages_internal(
134135
text, id=uipath_content_part.content_part_id
135136
)
136137
)
138+
elif isinstance(data, UiPathExternalValue):
139+
attachment_id = self.parse_attachment_id_from_content_part_uri(
140+
data.uri
141+
)
142+
full_name = uipath_content_part.name
143+
if attachment_id and full_name:
144+
attachments.append(
145+
{
146+
"id": attachment_id,
147+
"full_name": full_name,
148+
"mime_type": uipath_content_part.mime_type,
149+
}
150+
)
151+
152+
# Add attachment references as a text block for LLM visibility
153+
if attachments:
154+
content_blocks.append(
155+
create_text_block(
156+
f"<uip:attachments>{json.dumps(attachments)}</uip:attachments>"
157+
)
158+
)
137159

138160
# Metadata for the user/assistant message
139-
metadata = {
161+
metadata: dict[str, Any] = {
140162
"message_id": uipath_message.message_id,
141163
"created_at": uipath_message.created_at,
142164
"updated_at": uipath_message.updated_at,
143165
}
166+
if attachments:
167+
metadata["attachments"] = attachments
144168

145169
role = uipath_message.role
146170
if role == "user":
@@ -244,6 +268,36 @@ def get_timestamp(self):
244268
def get_content_part_id(self, message_id: str) -> str:
245269
return f"chunk-{message_id}-0"
246270

271+
def parse_attachment_id_from_content_part_uri(self, uri: str) -> str | None:
272+
"""Parse attachment ID from a URI.
273+
274+
Extracts the UUID from URIs like:
275+
"urn:uipath:cas:file:orchestrator:a940a416-b97b-4146-3089-08de5f4d0a87"
276+
277+
Args:
278+
uri: The URI to parse
279+
280+
Returns:
281+
The attachment ID if found, None otherwise
282+
"""
283+
if not uri:
284+
return None
285+
286+
# The UUID is the last segment after the final colon
287+
parts = uri.rsplit(":", 1)
288+
if len(parts) != 2:
289+
return None
290+
291+
potential_uuid = parts[1]
292+
if not potential_uuid:
293+
return None
294+
295+
# Validate it's a proper UUID and normalize to lowercase
296+
try:
297+
return str(uuid.UUID(potential_uuid))
298+
except (ValueError, AttributeError):
299+
return None
300+
247301
async def map_ai_message_chunk_to_events(
248302
self, message: AIMessageChunk
249303
) -> list[UiPathConversationMessageEvent]:

tests/agent/react/test_init_node.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -214,3 +214,35 @@ def test_inner_state_present_in_conversational_mode(self):
214214

215215
assert "inner_state" in result
216216
assert "job_attachments" in result["inner_state"]
217+
218+
def test_conversational_merges_attachments_from_preserved_messages(self):
219+
"""Conversational mode should merge attachments from preserved message metadata."""
220+
attachment_id = "a940a416-b97b-4146-3089-08de5f4d0a87"
221+
old_system_message = SystemMessage(content="Old system")
222+
preserved_human_message = HumanMessage(
223+
content="File here",
224+
additional_kwargs={
225+
"attachments": [
226+
{
227+
"id": attachment_id,
228+
"full_name": "document.pdf",
229+
"mime_type": "application/pdf",
230+
}
231+
],
232+
},
233+
)
234+
state = MockState(messages=[old_system_message, preserved_human_message])
235+
236+
new_messages: list[SystemMessage | HumanMessage] = [
237+
SystemMessage(content="New system"),
238+
]
239+
init_node = create_init_node(
240+
new_messages, input_schema=None, is_conversational=True
241+
)
242+
243+
result = init_node(state)
244+
245+
job_attachments = result["inner_state"]["job_attachments"]
246+
assert attachment_id in job_attachments
247+
assert job_attachments[attachment_id].full_name == "document.pdf"
248+
assert job_attachments[attachment_id].mime_type == "application/pdf"

0 commit comments

Comments
 (0)