Skip to content

Commit c678b22

Browse files
authored
Implement LangSmith tracing for enhanced monitoring and debugging (#13)
1 parent e27626a commit c678b22

File tree

7 files changed

+115
-12
lines changed

7 files changed

+115
-12
lines changed

.env.example

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,4 +26,12 @@ BACKOFFICE_DEFAULT_ID_ESTADO=1
2626
# Wizard guardrails
2727
WIZARD_DETECT_JAILBREAK_ENABLED=true
2828
WIZARD_DETECT_JAILBREAK_THRESHOLD=0.9
29+
2930
# GUARDRAILS_HUB_TOKEN=coloca_tu_token_del_guardrails_hub
31+
OPENAI_API_KEY=YOUR_API_KEY_HERE
32+
33+
# LangSmith configuration
34+
LANGCHAIN_TRACING_V2=true
35+
LANGCHAIN_ENDPOINT=https://api.smith.langchain.com
36+
LANGCHAIN_API_KEY=YOUR_LANGCHAIN_API_KEY_HERE
37+
LANGCHAIN_PROJECT=YOUR_LANGCHAIN_PROJECT_NAME_HERE

Dockerfile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,8 @@ RUN apt-get update \
2424

2525
# Install Python dependencies with uv
2626
COPY requirements.txt .
27-
RUN pip install --no-cache-dir "uv==${UV_VERSION}" \
28-
&& uv pip install --system -r requirements.txt
27+
RUN pip install --no-cache-dir --upgrade pip \
28+
&& pip install --no-cache-dir -r requirements.txt
2929

3030
# Copy project
3131
COPY . .

app/agents/faq.py

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,16 @@
1010
import logging
1111
import os
1212
from pathlib import Path
13+
from typing import Any
1314

15+
import numpy as np
1416
import yaml
17+
from jinja2 import Environment, FileSystemLoader
1518
from langchain_core.messages import AIMessage, SystemMessage
1619
from langchain_openai import ChatOpenAI
1720
from langgraph.prebuilt import ToolNode
21+
from langsmith import traceable
22+
from langchain_core.tracers.context import tracing_v2_enabled
1823

1924
from .base import AgentNode
2025
from ..db.config.database import get_async_session
@@ -32,14 +37,23 @@
3237

3338
class FAQAgent(AgentNode):
3439
"""Answers frequently-asked questions using a tool-calling loop."""
35-
36-
name: str = _config["name"]
37-
description: str = _config["description"]
40+
41+
name = "faq"
42+
description = "Answers frequently asked questions about Ithaka using the knowledge base"
3843

3944
def __init__(self):
45+
super().__init__()
46+
4047
model_name = os.getenv("OPENAI_MODEL", "gpt-4o-mini")
4148
model_cfg = _config["model"]
4249

50+
# LangSmith configuration
51+
self.tracing_enabled = os.getenv("LANGCHAIN_TRACING_V2", "false").lower() == "true"
52+
self.project_name = os.getenv("LANGCHAIN_PROJECT", "ithaka-project")
53+
54+
if self.tracing_enabled:
55+
logger.info(f"LangSmith tracing enabled for FAQ agent in project: {self.project_name}")
56+
4357
self.llm = ChatOpenAI(
4458
model=model_name,
4559
temperature=model_cfg["temperature_contextual"],
@@ -50,7 +64,15 @@ def __init__(self):
5064
self.system_message = SystemMessage(
5165
content=_config["system_prompts"]["contextual"]
5266
)
53-
67+
68+
# Cargar templates Jinja2 para uso futuro si es necesario
69+
template_dir = Path(__file__).parent / "prompts"
70+
env = Environment(loader=FileSystemLoader(template_dir))
71+
self.contextual_template = env.get_template("faq_contextual.j2")
72+
self.no_results_template = env.get_template("faq_no_results.j2")
73+
self.system_template = env.get_template("faq_system_contextual.j2")
74+
75+
@traceable(run_type="chain")
5476
async def __call__(self, state: ConversationState) -> ConversationState:
5577
"""Run the tool-calling loop and return the updated conversation state."""
5678

@@ -112,6 +134,7 @@ async def __call__(self, state: ConversationState) -> ConversationState:
112134
# Internal helpers
113135
# ------------------------------------------------------------------
114136

137+
@traceable(run_type="chain")
115138
async def _tool_calling_loop(
116139
self,
117140
messages: list,
@@ -141,6 +164,8 @@ async def _tool_calling_loop(
141164
faq_agent = FAQAgent()
142165

143166

167+
# Función para usar en el grafo LangGraph
168+
@traceable(run_type="chain")
144169
async def handle_faq_query(state: ConversationState) -> ConversationState:
145170
"""Wrapper function for LangGraph."""
146171
return await faq_agent(state)

app/agents/supervisor.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
import yaml
1313
from jinja2 import Environment, FileSystemLoader
1414
from openai import AsyncOpenAI
15+
from langsmith import traceable
16+
from langchain_core.tracers.context import tracing_v2_enabled
1517

1618
from ..graph.agent_descriptions import (
1719
DEFAULT_AGENT,
@@ -38,11 +40,19 @@ def __init__(self):
3840

3941
self.client = AsyncOpenAI(api_key=api_key)
4042
self.model = os.getenv("OPENAI_MODEL", "gpt-4o-mini")
43+
44+
# LangSmith configuration
45+
self.tracing_enabled = os.getenv("LANGCHAIN_TRACING_V2", "false").lower() == "true"
46+
self.project_name = os.getenv("LANGCHAIN_PROJECT", "ithaka-project")
47+
48+
if self.tracing_enabled:
49+
logger.info(f"LangSmith tracing enabled for Supervisor agent in project: {self.project_name}")
4150

4251
# ------------------------------------------------------------------
4352
# Public interface used by the LangGraph workflow
4453
# ------------------------------------------------------------------
4554

55+
@traceable(run_type="chain")
4656
async def route_message(self, state: ConversationState) -> ConversationState:
4757
"""Analiza el mensaje del usuario y decide el routing."""
4858

@@ -95,6 +105,7 @@ def decide_next_agent(self, state: ConversationState) -> str:
95105
# Internal helpers
96106
# ------------------------------------------------------------------
97107

108+
@traceable(run_type="llm")
98109
async def _route_by_descriptions(self, message: str, messages: list) -> str:
99110
"""Usa el LLM para elegir el agente cuya descripción mejor
100111
coincide con la intención del usuario."""
@@ -181,11 +192,13 @@ def _route_to(state: ConversationState, agent: str) -> ConversationState:
181192
supervisor_agent = SupervisorAgent()
182193

183194

195+
@traceable(run_type="chain")
184196
async def route_message(state: ConversationState) -> ConversationState:
185197
"""Función wrapper para LangGraph."""
186198
return await supervisor_agent.route_message(state)
187199

188200

201+
@traceable(run_type="chain")
189202
def decide_next_agent_wrapper(state: ConversationState) -> str:
190203
"""Función wrapper para routing condicional en LangGraph."""
191204
return supervisor_agent.decide_next_agent(state)

app/graph/workflow.py

Lines changed: 31 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,17 @@
33
"""
44

55
import logging
6+
import os
67
from typing import Any
78
import uuid
89

910
from langgraph.checkpoint.memory import InMemorySaver
1011
from langgraph.graph import StateGraph, END
1112
from langgraph.graph.state import CompiledStateGraph
1213
from langchain_core.messages import HumanMessage
14+
from langsmith import traceable
15+
from langchain_core.tracers.context import tracing_v2_enabled
16+
from langchain_core.tracers.langchain import LangChainTracer
1317

1418
from .agent_descriptions import ROUTABLE_AGENTS
1519
from .state import ConversationState
@@ -25,6 +29,13 @@ class IthakaWorkflow:
2529
"""Workflow principal que maneja toda la lógica de conversación"""
2630

2731
def __init__(self):
32+
# Initialize LangSmith tracing
33+
self.tracing_enabled = os.getenv("LANGCHAIN_TRACING_V2", "false").lower() == "true"
34+
self.project_name = os.getenv("LANGCHAIN_PROJECT", "ithaka-project")
35+
36+
if self.tracing_enabled:
37+
logger.info(f"LangSmith tracing enabled for project: {self.project_name}")
38+
2839
self.graph = self._build_graph()
2940

3041
def _build_graph(self) -> CompiledStateGraph:
@@ -59,6 +70,7 @@ def _build_graph(self) -> CompiledStateGraph:
5970
workflow.add_edge("faq", END)
6071

6172
# Compilar el grafo
73+
# Usar configuración básica para el checkpointer
6274
return workflow.compile(checkpointer=InMemorySaver())
6375

6476
def _create_initial_state(
@@ -105,6 +117,7 @@ def _create_initial_state(
105117

106118
return base_state
107119

120+
@traceable(run_type="chain")
108121
async def process_message(
109122
self,
110123
user_message: str,
@@ -142,8 +155,24 @@ async def process_message(
142155
)
143156

144157
logger.info(f"Processing message: {user_message[:50]}...")
145-
config = {"configurable": {"thread_id": thread_id}}
146-
result = await self.graph.ainvoke(initial_state, config=config)
158+
159+
# Use tracing context if enabled
160+
# Generar un ID único para esta conversación
161+
conversation_id = str(uuid.uuid4()) if thread_id == "default" else thread_id
162+
163+
# Configurar parámetros para el checkpointer
164+
config_params = {
165+
"configurable": {
166+
"thread_id": conversation_id,
167+
"checkpoint_ns": "ithaka"
168+
}
169+
}
170+
171+
if self.tracing_enabled:
172+
with tracing_v2_enabled(project_name=self.project_name):
173+
result = await self.graph.ainvoke(initial_state, config=config_params)
174+
else:
175+
result = await self.graph.ainvoke(initial_state, config=config_params)
147176

148177
logger.debug(f"[WORKFLOW] Graph result keys: {list(result.keys())}")
149178
logger.debug(f"[WORKFLOW] Result current_agent: {result.get('current_agent')}")

app/services/embedding_service.py

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@
66
from openai import AsyncOpenAI
77
from sqlalchemy import select
88
from sqlalchemy.ext.asyncio import AsyncSession
9+
from langsmith import traceable
10+
from langchain_core.tracers.context import tracing_v2_enabled
911

1012
from ..db.models import FAQEmbedding
1113

@@ -16,11 +18,21 @@ class EmbeddingService:
1618
"""Servicio para generar y gestionar embeddings usando OpenAI"""
1719

1820
def __init__(self):
21+
# Initialize OpenAI client
1922
self.client = AsyncOpenAI(api_key=os.getenv("OPENAI_API_KEY"))
2023
self.model = os.getenv("OPENAI_EMBEDDING_MODEL",
2124
"text-embedding-3-small")
2225
self.dimension = int(os.getenv("EMBEDDING_DIMENSION", "1536"))
23-
26+
27+
# Enable LangSmith tracing
28+
self.tracing_enabled = os.getenv("LANGCHAIN_TRACING_V2", "false").lower() == "true"
29+
self.project_name = os.getenv("LANGCHAIN_PROJECT", "ithaka-project")
30+
31+
logger.info(f"Embedding Service initialized with model: {self.model}")
32+
if self.tracing_enabled:
33+
logger.info(f"LangSmith tracing enabled for project: {self.project_name}")
34+
35+
@traceable(run_type="embedding")
2436
async def generate_embedding(self, text: str) -> List[float]:
2537
"""Genera embedding para un texto dado"""
2638
try:
@@ -33,6 +45,20 @@ async def generate_embedding(self, text: str) -> List[float]:
3345
logger.error(f"Error generating embedding: {e}")
3446
raise
3547

48+
@traceable(run_type="embedding")
49+
async def generate_batch_embeddings(self, texts: List[str]) -> List[List[float]]:
50+
"""Genera embeddings para múltiples textos en batch"""
51+
try:
52+
response = await self.client.embeddings.create(
53+
model=self.model,
54+
input=texts
55+
)
56+
return [data.embedding for data in response.data]
57+
except Exception as e:
58+
logger.error(f"Error generating batch embeddings: {e}")
59+
raise
60+
61+
@traceable(run_type="retriever")
3662
async def search_similar_faqs(
3763
self,
3864
query: str,
@@ -74,6 +100,7 @@ async def search_similar_faqs(
74100
logger.error(f"Error searching similar FAQs: {e}")
75101
return []
76102

103+
@traceable(run_type="tool")
77104
async def add_faq_embedding(
78105
self,
79106
question: str,

requirements.txt

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,14 @@ asyncpg==0.30.0
1111
pgvector==0.4.1
1212

1313
# AI and LLM
14-
openai==1.99.1
15-
langchain-openai
14+
openai==1.104.2
15+
langchain-openai==0.3.33
1616
langgraph==0.2.76
1717
langgraph-checkpoint==2.1.1
1818
langgraph-sdk==0.1.74
1919
guardrails-ai>=0.5.10
2020
rich<14
21+
langsmith==0.3.45
2122

2223
# Data validation and serialization
2324
pydantic==2.11.7
@@ -36,7 +37,7 @@ websockets==15.0.1
3637
twilio==8.13.0
3738

3839
# Numerical computing (used in embeddings)
39-
numpy==2.3.2
40+
numpy<2.0.0,>=1.26.2
4041

4142
# JWT authentication
4243
PyJWT>=2.8.0

0 commit comments

Comments
 (0)