Skip to content

Commit a6c1240

Browse files
refactor(models): centralize Langfuse trace code in BaseModelBackend (#3642)
Co-authored-by: Wendong-Fan <[email protected]> Co-authored-by: Wendong-Fan <[email protected]>
1 parent e56fa93 commit a6c1240

15 files changed

+49
-347
lines changed

camel/models/azure_openai_model.py

Lines changed: 2 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,7 @@
3434
from camel.utils import (
3535
BaseTokenCounter,
3636
OpenAITokenCounter,
37-
get_current_agent_session_id,
3837
is_langfuse_available,
39-
update_langfuse_trace,
4038
)
4139

4240
AzureADTokenProvider = Callable[[], str]
@@ -275,18 +273,7 @@ def _run(
275273
`ChatCompletionStreamManager[BaseModel]` for
276274
structured output streaming.
277275
"""
278-
279-
# Update Langfuse trace with current agent session and metadata
280-
agent_session_id = get_current_agent_session_id()
281-
if agent_session_id:
282-
update_langfuse_trace(
283-
session_id=agent_session_id,
284-
metadata={
285-
"agent_id": agent_session_id,
286-
"model_type": str(self.model_type),
287-
},
288-
tags=["CAMEL-AI", str(self.model_type)],
289-
)
276+
self._log_and_trace()
290277

291278
response_format = response_format or self.model_config_dict.get(
292279
"response_format", None
@@ -333,18 +320,7 @@ async def _arun(
333320
`AsyncChatCompletionStreamManager[BaseModel]` for
334321
structured output streaming.
335322
"""
336-
337-
# Update Langfuse trace with current agent session and metadata
338-
agent_session_id = get_current_agent_session_id()
339-
if agent_session_id:
340-
update_langfuse_trace(
341-
session_id=agent_session_id,
342-
metadata={
343-
"agent_id": agent_session_id,
344-
"model_type": str(self.model_type),
345-
},
346-
tags=["CAMEL-AI", str(self.model_type)],
347-
)
323+
self._log_and_trace()
348324

349325
response_format = response_format or self.model_config_dict.get(
350326
"response_format", None

camel/models/base_model.py

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,12 @@
3333
ParsedChatCompletion,
3434
UnifiedModelType,
3535
)
36-
from camel.utils import BaseTokenCounter, Constants
36+
from camel.utils import (
37+
BaseTokenCounter,
38+
Constants,
39+
get_current_agent_session_id,
40+
update_langfuse_trace,
41+
)
3742

3843
if os.environ.get("TRACEROOT_ENABLED", "False").lower() == "true":
3944
try:
@@ -440,6 +445,25 @@ def _log_response(self, log_path: str, response: Any) -> None:
440445
json.dump(log_data, f, indent=4)
441446
f.truncate()
442447

448+
def _log_and_trace(self) -> None:
449+
r"""Update Langfuse trace with session metadata.
450+
451+
This method updates the current Langfuse trace with agent session
452+
information and model metadata. Called at the start of _run() and
453+
_arun() methods before API execution.
454+
"""
455+
agent_session_id = get_current_agent_session_id()
456+
update_langfuse_trace(
457+
session_id=agent_session_id,
458+
metadata={
459+
"source": "camel",
460+
"agent_id": agent_session_id,
461+
"agent_type": "camel_chat_agent",
462+
"model_type": str(self.model_type),
463+
},
464+
tags=["CAMEL-AI", str(self.model_type)],
465+
)
466+
443467
@abstractmethod
444468
def _run(
445469
self,

camel/models/cohere_model.py

Lines changed: 2 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,7 @@
3535
BaseTokenCounter,
3636
OpenAITokenCounter,
3737
api_keys_required,
38-
get_current_agent_session_id,
3938
update_current_observation,
40-
update_langfuse_trace,
4139
)
4240

4341
if os.environ.get("LANGFUSE_ENABLED", "False").lower() == "true":
@@ -315,19 +313,7 @@ def _run(
315313
model=str(self.model_type),
316314
model_parameters=self.model_config_dict,
317315
)
318-
# Update Langfuse trace with current agent session and metadata
319-
agent_session_id = get_current_agent_session_id()
320-
if agent_session_id:
321-
update_langfuse_trace(
322-
session_id=agent_session_id,
323-
metadata={
324-
"source": "camel",
325-
"agent_id": agent_session_id,
326-
"agent_type": "camel_chat_agent",
327-
"model_type": str(self.model_type),
328-
},
329-
tags=["CAMEL-AI", str(self.model_type)],
330-
)
316+
self._log_and_trace()
331317

332318
from cohere.core.api_error import ApiError
333319

@@ -396,19 +382,7 @@ async def _arun(
396382
model=str(self.model_type),
397383
model_parameters=self.model_config_dict,
398384
)
399-
# Update Langfuse trace with current agent session and metadata
400-
agent_session_id = get_current_agent_session_id()
401-
if agent_session_id:
402-
update_langfuse_trace(
403-
session_id=agent_session_id,
404-
metadata={
405-
"source": "camel",
406-
"agent_id": agent_session_id,
407-
"agent_type": "camel_chat_agent",
408-
"model_type": str(self.model_type),
409-
},
410-
tags=["CAMEL-AI", str(self.model_type)],
411-
)
385+
self._log_and_trace()
412386

413387
from cohere.core.api_error import ApiError
414388

camel/models/deepseek_model.py

Lines changed: 2 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,6 @@
3131
from camel.utils import (
3232
BaseTokenCounter,
3333
api_keys_required,
34-
get_current_agent_session_id,
35-
update_langfuse_trace,
3634
)
3735

3836
if os.environ.get("LANGFUSE_ENABLED", "False").lower() == "true":
@@ -183,18 +181,7 @@ def _run(
183181
`ChatCompletion` in the non-stream mode, or
184182
`Stream[ChatCompletionChunk]` in the stream mode.
185183
"""
186-
187-
# Update Langfuse trace with current agent session and metadata
188-
agent_session_id = get_current_agent_session_id()
189-
if agent_session_id:
190-
update_langfuse_trace(
191-
session_id=agent_session_id,
192-
metadata={
193-
"agent_id": agent_session_id,
194-
"model_type": str(self.model_type),
195-
},
196-
tags=["CAMEL-AI", str(self.model_type)],
197-
)
184+
self._log_and_trace()
198185

199186
request_config = self._prepare_request(
200187
messages, response_format, tools
@@ -226,18 +213,7 @@ async def _arun(
226213
`ChatCompletion` in the non-stream mode, or
227214
`AsyncStream[ChatCompletionChunk]` in the stream mode.
228215
"""
229-
230-
# Update Langfuse trace with current agent session and metadata
231-
agent_session_id = get_current_agent_session_id()
232-
if agent_session_id:
233-
update_langfuse_trace(
234-
session_id=agent_session_id,
235-
metadata={
236-
"agent_id": agent_session_id,
237-
"model_type": str(self.model_type),
238-
},
239-
tags=["CAMEL-AI", str(self.model_type)],
240-
)
216+
self._log_and_trace()
241217

242218
request_config = self._prepare_request(
243219
messages, response_format, tools

camel/models/function_gemma_model.py

Lines changed: 2 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,7 @@
2929
from camel.utils import (
3030
BaseTokenCounter,
3131
OpenAITokenCounter,
32-
get_current_agent_session_id,
3332
update_current_observation,
34-
update_langfuse_trace,
3533
)
3634

3735
# conditional observe import based on environment variables
@@ -822,18 +820,7 @@ def _run(
822820
model_parameters=self.model_config_dict,
823821
)
824822

825-
agent_session_id = get_current_agent_session_id()
826-
if agent_session_id:
827-
update_langfuse_trace(
828-
session_id=agent_session_id,
829-
metadata={
830-
"source": "camel",
831-
"agent_id": agent_session_id,
832-
"agent_type": "camel_chat_agent",
833-
"model_type": str(self.model_type),
834-
},
835-
tags=["CAMEL-AI", str(self.model_type)],
836-
)
823+
self._log_and_trace()
837824

838825
prompt = self._format_messages(messages, tools)
839826
logger.debug(f"FunctionGemma prompt:\n{prompt}")
@@ -876,18 +863,7 @@ async def _arun(
876863
model_parameters=self.model_config_dict,
877864
)
878865

879-
agent_session_id = get_current_agent_session_id()
880-
if agent_session_id:
881-
update_langfuse_trace(
882-
session_id=agent_session_id,
883-
metadata={
884-
"source": "camel",
885-
"agent_id": agent_session_id,
886-
"agent_type": "camel_chat_agent",
887-
"model_type": str(self.model_type),
888-
},
889-
tags=["CAMEL-AI", str(self.model_type)],
890-
)
866+
self._log_and_trace()
891867

892868
prompt = self._format_messages(messages, tools)
893869
logger.debug(f"FunctionGemma prompt:\n{prompt}")

camel/models/gemini_model.py

Lines changed: 2 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,6 @@
3737
from camel.utils import (
3838
BaseTokenCounter,
3939
api_keys_required,
40-
get_current_agent_session_id,
41-
update_langfuse_trace,
4240
)
4341

4442
if os.environ.get("LANGFUSE_ENABLED", "False").lower() == "true":
@@ -475,18 +473,7 @@ def _run(
475473
`ChatCompletion` in the non-stream mode, or
476474
`Stream[ChatCompletionChunk]` in the stream mode.
477475
"""
478-
479-
# Update Langfuse trace with current agent session and metadata
480-
agent_session_id = get_current_agent_session_id()
481-
if agent_session_id:
482-
update_langfuse_trace(
483-
session_id=agent_session_id,
484-
metadata={
485-
"agent_id": agent_session_id,
486-
"model_type": str(self.model_type),
487-
},
488-
tags=["CAMEL-AI", str(self.model_type)],
489-
)
476+
self._log_and_trace()
490477

491478
response_format = response_format or self.model_config_dict.get(
492479
"response_format", None
@@ -528,18 +515,7 @@ async def _arun(
528515
`ChatCompletion` in the non-stream mode, or
529516
`AsyncStream[ChatCompletionChunk]` in the stream mode.
530517
"""
531-
532-
# Update Langfuse trace with current agent session and metadata
533-
agent_session_id = get_current_agent_session_id()
534-
if agent_session_id:
535-
update_langfuse_trace(
536-
session_id=agent_session_id,
537-
metadata={
538-
"agent_id": agent_session_id,
539-
"model_type": str(self.model_type),
540-
},
541-
tags=["CAMEL-AI", str(self.model_type)],
542-
)
518+
self._log_and_trace()
543519

544520
response_format = response_format or self.model_config_dict.get(
545521
"response_format", None

camel/models/litellm_model.py

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,7 @@
2525
BaseTokenCounter,
2626
LiteLLMTokenCounter,
2727
dependencies_required,
28-
get_current_agent_session_id,
2928
update_current_observation,
30-
update_langfuse_trace,
3129
)
3230

3331
if os.environ.get("LANGFUSE_ENABLED", "False").lower() == "true":
@@ -188,19 +186,7 @@ def _run(
188186
model=str(self.model_type),
189187
model_parameters=self.model_config_dict,
190188
)
191-
# Update Langfuse trace with current agent session and metadata
192-
agent_session_id = get_current_agent_session_id()
193-
if agent_session_id:
194-
update_langfuse_trace(
195-
session_id=agent_session_id,
196-
metadata={
197-
"source": "camel",
198-
"agent_id": agent_session_id,
199-
"agent_type": "camel_chat_agent",
200-
"model_type": str(self.model_type),
201-
},
202-
tags=["CAMEL-AI", str(self.model_type)],
203-
)
189+
self._log_and_trace()
204190

205191
response = self.client(
206192
timeout=self._timeout,

camel/models/mistral_model.py

Lines changed: 2 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,7 @@
3535
OpenAITokenCounter,
3636
api_keys_required,
3737
dependencies_required,
38-
get_current_agent_session_id,
3938
update_current_observation,
40-
update_langfuse_trace,
4139
)
4240

4341
logger = get_logger(__name__)
@@ -277,19 +275,7 @@ async def _arun(
277275
model=str(self.model_type),
278276
model_parameters=self.model_config_dict,
279277
)
280-
# Update Langfuse trace with current agent session and metadata
281-
agent_session_id = get_current_agent_session_id()
282-
if agent_session_id:
283-
update_langfuse_trace(
284-
session_id=agent_session_id,
285-
metadata={
286-
"source": "camel",
287-
"agent_id": agent_session_id,
288-
"agent_type": "camel_chat_agent",
289-
"model_type": str(self.model_type),
290-
},
291-
tags=["CAMEL-AI", str(self.model_type)],
292-
)
278+
self._log_and_trace()
293279

294280
request_config = self._prepare_request(
295281
messages, response_format, tools
@@ -352,19 +338,7 @@ def _run(
352338
model=str(self.model_type),
353339
model_parameters=self.model_config_dict,
354340
)
355-
# Update Langfuse trace with current agent session and metadata
356-
agent_session_id = get_current_agent_session_id()
357-
if agent_session_id:
358-
update_langfuse_trace(
359-
session_id=agent_session_id,
360-
metadata={
361-
"source": "camel",
362-
"agent_id": agent_session_id,
363-
"agent_type": "camel_chat_agent",
364-
"model_type": str(self.model_type),
365-
},
366-
tags=["CAMEL-AI", str(self.model_type)],
367-
)
341+
self._log_and_trace()
368342

369343
request_config = self._prepare_request(
370344
messages, response_format, tools

0 commit comments

Comments
 (0)