Skip to content

Commit cf26ad3

Browse files
authored
feat(langfuse): Parallel traces in langfuse + obvious step instructions (#1908)
1. Langfuse SDK version bumped 2. Parallel threads will now log under the same langfuse trace 2. Adds the instruction to the root cause obvious call ![CleanShot 2025-02-11 at 16 09 17](https://github.com/user-attachments/assets/35abda72-2bec-46f0-9294-f7eb31c21ea6) ![CleanShot 2025-02-11 at 16 08 44](https://github.com/user-attachments/assets/ab6e4739-ebf9-4f3c-b8a4-ccbf0bf44946)
1 parent 43e0ad4 commit cf26ad3

File tree

7 files changed

+45
-12
lines changed

7 files changed

+45
-12
lines changed

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
mypy_path = []
1818
plugins = ["pydantic.mypy"]
1919
files = ["src/"]
20-
disable_error_code = ["no-untyped-def"]
20+
disable_error_code = ["no-untyped-def", "unused-ignore"]
2121
# minimal strictness settings
2222
no_implicit_reexport = true
2323
warn_unreachable = true

requirements-constraints.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ google-cloud-storage==2.*
100100
google-cloud-aiplatform==1.*
101101
google-cloud-secret-manager==2.*
102102
anthropic[vertex]==0.45.*
103-
langfuse @ git+https://github.com/jennmueng/langfuse-python.git@9d9350de1e4e84fa548fe84f82c1b826be17956e
103+
langfuse @ git+https://github.com/jennmueng/langfuse-python.git@d7c0127682ddb20f73c5cf4fbb396cdfa8961fc3
104104
watchdog
105105
stumpy==1.13.0
106106
pytest_alembic==0.11.1

requirements.txt

+7-6
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ convertdate==2.4.0
110110
# via -r requirements-constraints.txt
111111
covdefaults==2.3.0
112112
# via -r requirements-constraints.txt
113-
coverage==7.6.11
113+
coverage==7.6.12
114114
# via
115115
# covdefaults
116116
# pytest-cov
@@ -165,7 +165,7 @@ flask-sqlalchemy==3.1.1
165165
# -r requirements-constraints.txt
166166
# flask-migrate
167167
# types-flask-migrate
168-
flatbuffers==25.1.24
168+
flatbuffers==25.2.10
169169
# via onnxruntime
170170
fonttools==4.43.0
171171
# via
@@ -200,7 +200,7 @@ google-auth==2.38.0
200200
# google-cloud-secret-manager
201201
# google-cloud-storage
202202
# google-genai
203-
google-cloud-aiplatform==1.79.0
203+
google-cloud-aiplatform==1.80.0
204204
# via -r requirements-constraints.txt
205205
google-cloud-bigquery==3.29.0
206206
# via google-cloud-aiplatform
@@ -339,7 +339,7 @@ kiwisolver==1.4.5
339339
# matplotlib
340340
kombu==5.4.2
341341
# via celery
342-
langfuse @ git+https://github.com/jennmueng/langfuse-python.git@9d9350de1e4e84fa548fe84f82c1b826be17956e
342+
langfuse @ git+https://github.com/jennmueng/langfuse-python.git@d7c0127682ddb20f73c5cf4fbb396cdfa8961fc3
343343
# via -r requirements-constraints.txt
344344
lazy-object-proxy==1.10.0
345345
# via openapi-spec-validator
@@ -440,7 +440,7 @@ optimum==1.16.2
440440
# via -r requirements-constraints.txt
441441
overrides==7.7.0
442442
# via chromadb
443-
packaging==23.2
443+
packaging==24.2
444444
# via
445445
# -r requirements-constraints.txt
446446
# build
@@ -487,7 +487,7 @@ pip-tools==7.4.1
487487
# via -r requirements-constraints.txt
488488
pluggy==1.5.0
489489
# via pytest
490-
posthog==3.11.0
490+
posthog==3.12.1
491491
# via chromadb
492492
prompt-toolkit==3.0.50
493493
# via click-repl
@@ -644,6 +644,7 @@ requests==2.32.2
644644
# huggingface-hub
645645
# jsonschema-path
646646
# jsonschema-spec
647+
# langfuse
647648
# posthog
648649
# pygithub
649650
# transformers

src/seer/automation/autofix/autofix_agent.py

+10
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@
33
from concurrent.futures import Executor, Future, ThreadPoolExecutor
44
from typing import Callable, Optional
55

6+
from langfuse.decorators import langfuse_context, observe
7+
from sentry_sdk.ai.monitoring import ai_track
8+
69
from seer.automation.agent.agent import AgentConfig, LlmAgent, RunConfig
710
from seer.automation.agent.models import (
811
LlmGenerateTextResponse,
@@ -167,6 +170,9 @@ def run_iteration(self, run_config: RunConfig):
167170
and not cur.request.options.disable_interactivity
168171
and completion.message.tool_calls # only if the run is in progress
169172
):
173+
trace_id = langfuse_context.get_current_trace_id()
174+
observation_id = langfuse_context.get_current_observation_id()
175+
170176
text_before_tag = completion.message.content.split("<")[0]
171177
text = text_before_tag
172178
if text:
@@ -178,6 +184,8 @@ def run_iteration(self, run_config: RunConfig):
178184
cur_step_idx,
179185
self.context.state,
180186
len(self.memory) - 1,
187+
langfuse_parent_trace_id=trace_id, # type: ignore
188+
langfuse_parent_observation_id=observation_id, # type: ignore
181189
)
182190
)
183191

@@ -226,6 +234,8 @@ def use_user_messages(self):
226234
self.queued_user_messages = []
227235
self.context.event_manager.add_log("Thanks for the input. Thinking through it now...")
228236

237+
@observe(name="Share Insights in parallel")
238+
@ai_track(description="Share Insights in parallel")
229239
def share_insights(
230240
self,
231241
text: str,

src/seer/automation/autofix/components/coding/component.py

+13-2
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import textwrap
55

66
import sentry_sdk
7-
from langfuse.decorators import observe
7+
from langfuse.decorators import langfuse_context, observe
88
from openai import BadRequestError as OpenAiBadRequestError
99
from openai import LengthFinishReasonError as OpenAiLengthFinishReasonError
1010
from pydantic import BaseModel
@@ -290,6 +290,8 @@ def invoke(self, request: CodingRequest) -> CodingOutput | None:
290290
# Resolve LlmClient once in the main thread
291291
resolved_llm_client = self._get_llm_client()
292292

293+
@observe(name="Process Change Task")
294+
@ai_track(description="Process Change Task")
293295
def process_task(task, llm_client):
294296
repo_client = self.context.get_repo_client(task.repo_name)
295297
if task.type == "file_change":
@@ -349,8 +351,17 @@ def process_task(task, llm_client):
349351

350352
# apply change tasks in parallel
351353
with concurrent.futures.ThreadPoolExecutor() as executor:
354+
trace_id = langfuse_context.get_current_trace_id()
355+
observation_id = langfuse_context.get_current_observation_id()
356+
352357
futures = [
353-
executor.submit(process_task, task, resolved_llm_client)
358+
executor.submit(
359+
process_task,
360+
task,
361+
resolved_llm_client,
362+
langfuse_parent_trace_id=trace_id, # type: ignore
363+
langfuse_parent_observation_id=observation_id, # type: ignore
364+
)
354365
for task in code_changes_output.tasks
355366
]
356367
for future in concurrent.futures.as_completed(futures):

src/seer/automation/autofix/components/is_root_cause_obvious.py

+9-1
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,15 @@
55

66
from seer.automation.agent.client import GeminiProvider, LlmClient
77
from seer.automation.autofix.autofix_context import AutofixContext
8+
from seer.automation.autofix.prompts import format_instruction
89
from seer.automation.component import BaseComponent, BaseComponentOutput, BaseComponentRequest
910
from seer.automation.models import EventDetails
1011
from seer.dependency_injection import inject, injected
1112

1213

1314
class IsRootCauseObviousRequest(BaseComponentRequest):
1415
event_details: EventDetails
16+
instruction: str | None = None
1517

1618

1719
class IsRootCauseObviousOutput(BaseComponentOutput):
@@ -22,16 +24,21 @@ class IsRootCauseObviousPrompts:
2224
@staticmethod
2325
def format_default_msg(
2426
event_details: EventDetails,
27+
instruction: str | None = None,
2528
):
2629
return (
2730
textwrap.dedent(
2831
"""\
2932
You are an exceptional principal engineer that is amazing at finding the root cause of any issue. We have an issue in our codebase described below. Is the true, deepest root cause of the issue clear from the details below? Or does it require searching for more information around the codebase?
3033
31-
{event_details}"""
34+
<issue_details>
35+
{event_details}
36+
</issue_details>
37+
{instruction_str}"""
3238
)
3339
.format(
3440
event_details=event_details.format_event(),
41+
instruction_str=format_instruction(instruction),
3542
)
3643
.strip()
3744
)
@@ -51,6 +58,7 @@ def invoke(
5158
output = llm_client.generate_structured(
5259
prompt=IsRootCauseObviousPrompts.format_default_msg(
5360
event_details=request.event_details,
61+
instruction=request.instruction,
5462
),
5563
model=GeminiProvider.model("gemini-2.0-flash-001"),
5664
response_format=IsRootCauseObviousOutput,

src/seer/automation/autofix/components/root_cause/component.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,10 @@ def invoke(
3535
) -> RootCauseAnalysisOutput:
3636
is_obvious = (
3737
IsRootCauseObviousComponent(self.context).invoke(
38-
IsRootCauseObviousRequest(event_details=request.event_details)
38+
IsRootCauseObviousRequest(
39+
event_details=request.event_details,
40+
instruction=request.instruction,
41+
)
3942
)
4043
if not request.initial_memory
4144
else None

0 commit comments

Comments
 (0)