Skip to content

Commit 189a721

Browse files
.
1 parent c79364d commit 189a721

2 files changed

Lines changed: 138 additions & 13 deletions

File tree

deepeval/integrations/portkey/patcher.py

Lines changed: 131 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
1+
import json
12
import functools
23
from typing import Optional, List
34
from deepeval.tracing.context import current_span_context
45
from deepeval.tracing.tracing import Observer
56
from deepeval.tracing.utils import make_json_serializable
67
from deepeval.metrics import BaseMetric
8+
from deepeval.tracing.types import LlmOutput, LlmToolCall
79
try:
810
from portkey_ai import Portkey
911
from portkey_ai.api_resources.apis.chat_complete import Completions, ChatCompletions
@@ -27,7 +29,6 @@ def new_init(*args, metric_collection: Optional[str] = None, metrics: Optional[L
2729

2830
Portkey.__init__ = new_init
2931

30-
3132
def _patch_portkey_chat_completions(completions: Completions, metrics=None, metric_collection=None):
3233
if getattr(completions, "_deepeval_patched", False):
3334
return
@@ -44,36 +45,159 @@ def new_create(*args, **kwargs):
4445
func_name="LLM",
4546
) as observer:
4647
result = original_create(*args, **kwargs)
47-
observer.result = extract_chat_completion_messages(result)
48+
observer.result = extract_llm_output_from_chat_completion_messages(result)
4849
current_span_context.get().input = kwargs.get("messages")
4950
return result
5051

5152
completions.create = new_create
5253
setattr(completions, "_deepeval_patched", True)
5354

55+
5456
def instrument():
5557
is_portkey_available()
5658
_patch_portkey_init()
5759

58-
def extract_chat_completion_messages(result: ChatCompletions):
60+
def extract_llm_output_from_chat_completion_messages(result: ChatCompletions) -> List[LlmOutput]:
5961
try:
62+
# Local imports to avoid changing global import section/line numbers
63+
6064
choices = None
6165
if hasattr(result, "choices"):
6266
choices = result.choices
6367
elif isinstance(result, dict):
6468
choices = result.get("choices")
6569

66-
messages = []
70+
outputs = []
6771
if isinstance(choices, list):
6872
for c in choices:
6973
message = None
7074
if hasattr(c, "message"):
7175
message = c.message
7276
elif isinstance(c, dict):
7377
message = c.get("message")
74-
if message is not None:
75-
messages.append(make_json_serializable(message))
78+
if message is None:
79+
continue
80+
81+
# role
82+
role = None
83+
if hasattr(message, "role"):
84+
role = message.role
85+
elif isinstance(message, dict):
86+
role = message.get("role")
87+
role = "AI" if (role or "").lower() == "assistant" else (role or "AI")
88+
89+
# content
90+
raw_content = None
91+
if hasattr(message, "content"):
92+
raw_content = message.content
93+
elif isinstance(message, dict):
94+
raw_content = message.get("content")
95+
96+
content_str = ""
97+
if isinstance(raw_content, str):
98+
content_str = raw_content
99+
elif isinstance(raw_content, list):
100+
parts = []
101+
for part in raw_content:
102+
if isinstance(part, str):
103+
parts.append(part)
104+
elif isinstance(part, dict):
105+
text = part.get("text") or part.get("content") or part.get("value")
106+
if text is not None:
107+
parts.append(str(text))
108+
elif part.get("type") == "text" and "text" in part:
109+
parts.append(str(part["text"]))
110+
content_str = "".join(parts)
111+
elif raw_content is not None:
112+
content_str = str(raw_content)
113+
114+
# tool calls (tool_calls or function_call)
115+
tool_calls = []
116+
tcs = None
117+
if hasattr(message, "tool_calls"):
118+
tcs = message.tool_calls
119+
elif isinstance(message, dict):
120+
tcs = message.get("tool_calls")
121+
122+
if isinstance(tcs, list):
123+
for tc in tcs:
124+
func = None
125+
tc_id = None
126+
if hasattr(tc, "function"):
127+
func = tc.function
128+
elif isinstance(tc, dict):
129+
func = tc.get("function")
130+
if hasattr(tc, "id"):
131+
tc_id = tc.id
132+
elif isinstance(tc, dict):
133+
tc_id = tc.get("id")
134+
135+
name = None
136+
arguments = None
137+
if func is not None:
138+
if hasattr(func, "name"):
139+
name = func.name
140+
elif isinstance(func, dict):
141+
name = func.get("name")
142+
if hasattr(func, "arguments"):
143+
arguments = func.arguments
144+
elif isinstance(func, dict):
145+
arguments = func.get("arguments")
146+
147+
args_obj = {}
148+
if isinstance(arguments, str):
149+
try:
150+
args_obj = json.loads(arguments)
151+
except Exception:
152+
args_obj = {"arguments": arguments}
153+
elif isinstance(arguments, dict):
154+
args_obj = arguments
155+
elif arguments is not None:
156+
args_obj = {"arguments": arguments}
157+
158+
if name:
159+
tool_calls.append(LlmToolCall(name=name, args=args_obj, id=tc_id))
160+
else:
161+
# fallback to single function_call
162+
fc = None
163+
if hasattr(message, "function_call"):
164+
fc = message.function_call
165+
elif isinstance(message, dict):
166+
fc = message.get("function_call")
167+
if fc:
168+
name = None
169+
arguments = None
170+
if hasattr(fc, "name"):
171+
name = fc.name
172+
elif isinstance(fc, dict):
173+
name = fc.get("name")
174+
if hasattr(fc, "arguments"):
175+
arguments = fc.arguments
176+
elif isinstance(fc, dict):
177+
arguments = fc.get("arguments")
178+
179+
args_obj = {}
180+
if isinstance(arguments, str):
181+
try:
182+
args_obj = json.loads(arguments)
183+
except Exception:
184+
args_obj = {"arguments": arguments}
185+
elif isinstance(arguments, dict):
186+
args_obj = arguments
187+
elif arguments is not None:
188+
args_obj = {"arguments": arguments}
189+
190+
if name:
191+
tool_calls.append(LlmToolCall(name=name, args=args_obj))
192+
193+
outputs.append(
194+
LlmOutput(
195+
role=role,
196+
content=content_str or "",
197+
tool_calls=tool_calls or None,
198+
)
199+
)
76200

77-
return messages if messages else make_json_serializable(result)
201+
return outputs if outputs else make_json_serializable(result)
78202
except Exception:
79203
return make_json_serializable(result)

tests/test_integrations/test_portkey/portkey_app.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,11 @@
1111

1212
client = Portkey(config = config, metric_collection="test_collection_1")
1313

14-
# Example: Send a chat completion request
15-
response = client.chat.completions.create(
16-
messages=[{"role": "user", "content": "Hello, how are you?"}],
17-
model="gpt-4o"
18-
)
1914

20-
print(response.choices[0].message.content)
15+
def execute_chat_completion():
16+
# Example: Send a chat completion request
17+
response = client.chat.completions.create(
18+
messages=[{"role": "user", "content": "Hello, how are you?"}],
19+
model="gpt-4o"
20+
)
21+
print(response.choices[0].message.content)

0 commit comments

Comments
 (0)