1010import logging
1111import os
1212from pathlib import Path
13+ from typing import Any
1314
15+ import numpy as np
1416import yaml
17+ from jinja2 import Environment , FileSystemLoader
1518from langchain_core .messages import AIMessage , SystemMessage
1619from langchain_openai import ChatOpenAI
1720from langgraph .prebuilt import ToolNode
21+ from langsmith import traceable
22+ from langchain_core .tracers .context import tracing_v2_enabled
1823
1924from .base import AgentNode
2025from ..db .config .database import get_async_session
3237
3338class FAQAgent (AgentNode ):
3439 """Answers frequently-asked questions using a tool-calling loop."""
35-
36- name : str = _config [ "name" ]
37- description : str = _config [ "description" ]
40+
41+ name = "faq"
42+ description = "Answers frequently asked questions about Ithaka using the knowledge base"
3843
3944 def __init__ (self ):
45+ super ().__init__ ()
46+
4047 model_name = os .getenv ("OPENAI_MODEL" , "gpt-4o-mini" )
4148 model_cfg = _config ["model" ]
4249
50+ # LangSmith configuration
51+ self .tracing_enabled = os .getenv ("LANGCHAIN_TRACING_V2" , "false" ).lower () == "true"
52+ self .project_name = os .getenv ("LANGCHAIN_PROJECT" , "ithaka-project" )
53+
54+ if self .tracing_enabled :
55+ logger .info (f"LangSmith tracing enabled for FAQ agent in project: { self .project_name } " )
56+
4357 self .llm = ChatOpenAI (
4458 model = model_name ,
4559 temperature = model_cfg ["temperature_contextual" ],
@@ -50,7 +64,15 @@ def __init__(self):
5064 self .system_message = SystemMessage (
5165 content = _config ["system_prompts" ]["contextual" ]
5266 )
53-
67+
68+ # Cargar templates Jinja2 para uso futuro si es necesario
69+ template_dir = Path (__file__ ).parent / "prompts"
70+ env = Environment (loader = FileSystemLoader (template_dir ))
71+ self .contextual_template = env .get_template ("faq_contextual.j2" )
72+ self .no_results_template = env .get_template ("faq_no_results.j2" )
73+ self .system_template = env .get_template ("faq_system_contextual.j2" )
74+
75+ @traceable (run_type = "chain" )
5476 async def __call__ (self , state : ConversationState ) -> ConversationState :
5577 """Run the tool-calling loop and return the updated conversation state."""
5678
@@ -112,6 +134,7 @@ async def __call__(self, state: ConversationState) -> ConversationState:
112134 # Internal helpers
113135 # ------------------------------------------------------------------
114136
137+ @traceable (run_type = "chain" )
115138 async def _tool_calling_loop (
116139 self ,
117140 messages : list ,
@@ -141,6 +164,8 @@ async def _tool_calling_loop(
141164faq_agent = FAQAgent ()
142165
143166
167+ # Función para usar en el grafo LangGraph
168+ @traceable (run_type = "chain" )
144169async def handle_faq_query (state : ConversationState ) -> ConversationState :
145170 """Wrapper function for LangGraph."""
146171 return await faq_agent (state )
0 commit comments