Skip to content

Commit 8a5eac2

Browse files
dnplkndllclaude
andcommitted
fix(seer): Fix anomaly detection response format and clean up imports
- Fix anomaly detection: use prophet prediction timestamp map instead of index-based lookup (predictions have separate timestamps from timeseries) - Add required external_alert_id and default yhat bounds to match Sentry's AnomalyThresholdDataPoint TypedDict - Remove duplicate local imports (translate_query, compare_cohort, ContinuationState) that shadow top-level imports - Remove unused Session import from update endpoint (now at top level) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 2a92ddf commit 8a5eac2

1 file changed

Lines changed: 20 additions & 17 deletions

File tree

src/seer/app.py

Lines changed: 20 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -890,12 +890,12 @@ def coding_agent_state_update_endpoint(
890890
external coding agent reports status changes, completions, or failures.
891891
"""
892892
from seer.automation.autofix.models import ExternalCodingAgentResult
893-
from seer.db import DbRunState, Session
893+
from seer.automation.autofix.state import ContinuationState
894+
from seer.db import DbRunState
894895

895896
try:
896897
# Find the run containing this agent_id by scanning recent runs
897898
with Session() as session:
898-
# Search recent runs for the agent
899899
recent_runs = (
900900
session.query(DbRunState)
901901
.filter(DbRunState.type == "autofix")
@@ -905,8 +905,6 @@ def coding_agent_state_update_endpoint(
905905
)
906906
target_run_id = None
907907
for run_state in recent_runs:
908-
from seer.automation.autofix.state import ContinuationState
909-
910908
try:
911909
cs = ContinuationState(run_state.id)
912910
cur = cs.get()
@@ -925,8 +923,6 @@ def coding_agent_state_update_endpoint(
925923
status="error", message=f"Agent {data.agent_id} not found"
926924
)
927925

928-
from seer.automation.autofix.state import ContinuationState
929-
930926
state = ContinuationState(target_run_id)
931927
with state.update() as cur:
932928
agent = cur.coding_agents.get(data.agent_id)
@@ -1140,14 +1136,13 @@ def assisted_query_translate_agentic_endpoint(
11401136
11411137
Uses the existing assisted_query translate_query infrastructure.
11421138
"""
1143-
from seer.automation.assisted_query.assisted_query import translate_query as do_translate
11441139
from seer.automation.assisted_query.models import TranslateRequest
11451140

11461141
try:
11471142
if not data.natural_language_query:
11481143
return AssistedQueryTranslateAgenticResponse(query=None)
11491144

1150-
result = do_translate(
1145+
result = translate_query(
11511146
TranslateRequest(
11521147
org_id=data.org_id or 0,
11531148
project_ids=data.project_ids,
@@ -1271,22 +1266,32 @@ def anomaly_detection_alert_data_endpoint(
12711266
success=True, message="Alert not found", data=[]
12721267
)
12731268

1274-
# TimeSeries has timestamps/values as numpy arrays + prophet_predictions
1269+
# Build timestamp→prediction index from prophet predictions
1270+
# Prophet predictions have their own timestamps array, separate from timeseries
1271+
predictions = alert.prophet_predictions
1272+
pred_map: dict[float, int] = {}
1273+
if predictions is not None:
1274+
for i in range(len(predictions.timestamps)):
1275+
pred_map[float(predictions.timestamps[i])] = i
1276+
12751277
ts = alert.timeseries
12761278
result_data = []
1279+
ext_alert_id = alert.external_alert_id or 0
12771280

12781281
for i in range(len(ts.timestamps)):
12791282
timestamp = float(ts.timestamps[i])
12801283
if data.start <= timestamp <= data.end:
1281-
point = {
1284+
point: dict = {
1285+
"external_alert_id": ext_alert_id,
12821286
"timestamp": timestamp,
12831287
"value": float(ts.values[i]),
1288+
"yhat_lower": 0.0,
1289+
"yhat_upper": 0.0,
12841290
}
1285-
# Include prediction bounds from prophet if available
1286-
predictions = ts.prophet_predictions or alert.prophet_predictions
1287-
if predictions and i < len(predictions.yhat_upper):
1288-
point["yhat_upper"] = float(predictions.yhat_upper[i])
1289-
point["yhat_lower"] = float(predictions.yhat_lower[i])
1291+
pred_idx = pred_map.get(timestamp)
1292+
if pred_idx is not None:
1293+
point["yhat_upper"] = float(predictions.yhat_upper[pred_idx])
1294+
point["yhat_lower"] = float(predictions.yhat_lower[pred_idx])
12901295
result_data.append(point)
12911296

12921297
return AnomalyDetectionAlertDataResponse(success=True, data=result_data)
@@ -1307,12 +1312,10 @@ def workflows_compare_cohort_endpoint(
13071312
AttributeDistributions,
13081313
CompareCohortsConfig,
13091314
CompareCohortsMeta,
1310-
CompareCohortsRequest,
13111315
StatsAttribute,
13121316
StatsAttributeBucket,
13131317
StatsCohort,
13141318
)
1315-
from seer.workflows.compare.service import compare_cohort
13161319

13171320
try:
13181321
# Build the proper request model from the raw data

0 commit comments

Comments
 (0)