Skip to content

Commit 647d965

Browse files
RafaelPogithub-actions[bot]
authored andcommitted
feat: rehydrate widget activity tab from stored timeline (#5069)
## Summary Builds on PR #5057 (DB tables + timeline endpoint) to wire the widget to rehydrate its Activity tab on re-mount. - **MCP routes**: First call (no cursor) now returns the stored aggregate timeline alongside live data, for both running and terminal tasks - **Widget**: On mount, does a live status check (Claude.ai caches the original tool result so `structuredContent.status` is stale), backfills `aggHistory` from the stored timeline, then either starts polling (running) or fetches results (completed) - **Bug fix**: `pollUrl` was only set in the running-task branch, so `backfillSummaries` silently returned for completed tasks ### Re-mount scenarios | Scenario | Before | After | |---|---|---| | Task completed, return to conversation | Results only, empty Activity tab | Full activity timeline + results | | Leave mid-execution, return | Polling resumes with no history | Stored timeline replayed, then polling continues | ## Test plan - [x] Tested in Claude.ai via CF tunnel against local engine + prod Supabase - [x] Task completed, leave, return → Activity tab shows stored aggregates - [x] Leave mid-execution, return → Activity tab shows prior history + new updates - [x] No duplicate entries in Activity tab - [x] MCP routes lint clean 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com> Sourced from commit ef235840b023775a5363f71c61f7790681556586
1 parent 8cf7006 commit 647d965

File tree

2 files changed

+158
-30
lines changed

2 files changed

+158
-30
lines changed

futuresearch-mcp/src/futuresearch_mcp/routes.py

Lines changed: 85 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,35 @@ async def _fetch_summaries_rest(
111111
return None, cursor
112112

113113

114+
async def _fetch_timeline_rest(
115+
client: AuthenticatedClient, task_id: str
116+
) -> list[dict] | None:
117+
"""Fetch stored aggregate timeline from the Engine API.
118+
119+
Returns a list of timeline entries (each with aggregate + micro_summaries),
120+
or None if the endpoint is unavailable or returns no data.
121+
"""
122+
try:
123+
httpx_client = client.get_async_httpx_client()
124+
resp = await httpx_client.request(
125+
method="get",
126+
url=f"/tasks/{task_id}/summaries/timeline",
127+
)
128+
if resp.status_code == 200:
129+
data = resp.json()
130+
timeline = data.get("timeline")
131+
if timeline:
132+
# Dedupe micro-summaries within each entry
133+
for entry in timeline:
134+
micros = entry.get("micro_summaries")
135+
if micros:
136+
entry["micro_summaries"] = dedupe_summaries(micros)
137+
return timeline
138+
except Exception:
139+
logger.debug("Failed to fetch timeline for task %s via REST", task_id)
140+
return None
141+
142+
114143
async def _fetch_aggregate_rest(
115144
client: AuthenticatedClient, task_id: str, cursor: str | None
116145
) -> tuple[str | None, list[dict] | None, str | None]:
@@ -147,6 +176,48 @@ async def _fetch_aggregate_rest(
147176
return None, summaries, new_cursor
148177

149178

179+
async def _backfill_timeline(
180+
client: AuthenticatedClient, task_id: str, payload: dict
181+
) -> None:
182+
"""Populate payload with stored timeline + tail aggregate for re-mount.
183+
184+
Fetches the stored aggregate timeline (no LLM call), then fills any gap
185+
with one aggregate call for micro-summaries that arrived after the last
186+
stored aggregate.
187+
"""
188+
timeline = await _fetch_timeline_rest(client, task_id)
189+
if timeline:
190+
payload["timeline"] = timeline
191+
# Find the latest micro-summary timestamp to fill the gap
192+
last_cursor = max(
193+
(
194+
ms.get("updated_at", "")
195+
for entry in timeline
196+
for ms in entry.get("micro_summaries", [])
197+
),
198+
default=None,
199+
)
200+
if last_cursor:
201+
aggregate, summaries, new_cursor = await _fetch_aggregate_rest(
202+
client, task_id, last_cursor
203+
)
204+
if aggregate:
205+
payload["aggregate_summary"] = aggregate
206+
payload["summaries"] = summaries
207+
payload["cursor"] = new_cursor
208+
else:
209+
# No stored timeline — fall back to generating one aggregate
210+
aggregate, summaries, new_cursor = await _fetch_aggregate_rest(
211+
client, task_id, None
212+
)
213+
if aggregate:
214+
payload["aggregate_summary"] = aggregate
215+
if summaries:
216+
payload["summaries"] = summaries
217+
if new_cursor:
218+
payload["cursor"] = new_cursor
219+
220+
150221
async def api_progress(request: Request) -> Response:
151222
"""REST endpoint for the session widget to poll task progress."""
152223
cors = _cors_headers()
@@ -193,13 +264,21 @@ async def api_progress(request: Request) -> Response:
193264

194265
payload = ts.model_dump(mode="json", exclude=_UI_EXCLUDE)
195266

196-
# Fetch aggregate + micro-summaries for non-terminal tasks,
197-
# or for terminal tasks without cursor (re-mount backfill).
198-
backfill = ts.is_terminal and not request.query_params.get("cursor")
199-
if not ts.is_terminal or backfill:
200-
cursor = request.query_params.get("cursor")
267+
cursor = request.query_params.get("cursor")
268+
# First call (no cursor) = widget just mounted — include stored
269+
# timeline so the activity tab can replay prior history, regardless
270+
# of whether the task is still running or already terminal.
271+
if not cursor:
272+
await _backfill_timeline(client, task_id, payload)
273+
274+
# Fetch live aggregate + micro-summaries for:
275+
# - incremental polling (has cursor)
276+
# - non-terminal first-call (alongside timeline)
277+
# Skip for terminal first-call — _backfill_timeline already
278+
# fetched the tail aggregate covering any gap.
279+
if not ts.is_terminal:
201280
aggregate, summaries, new_cursor = await _fetch_aggregate_rest(
202-
client, task_id, cursor
281+
client, task_id, cursor or payload.get("cursor")
203282
)
204283
if aggregate:
205284
payload["aggregate_summary"] = aggregate

futuresearch-mcp/src/futuresearch_mcp/templates.py

Lines changed: 73 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1002,30 +1002,65 @@
10021002
}
10031003
10041004
/* ── fetch last aggregate summary (for re-mount when task already done) ── */
1005-
async function backfillSummaries(){
1006-
if(!pollUrl||!pollToken)return;
1007-
try{
1008-
const opts=pollToken?{headers:{"Authorization":"Bearer "+pollToken}}:{};
1009-
const r=await fetch(pollUrl,opts);
1010-
if(!r.ok)return;
1011-
const d=await r.json();
1012-
if(d.aggregate_summary||d.summaries)renderProgress(d);
1013-
}catch{}
1005+
async function backfillSummaries(prefetched){
1006+
1007+
let d=prefetched;
1008+
if(!d){
1009+
if(!pollUrl||!pollToken)return;
1010+
try{
1011+
const opts=pollToken?{headers:{"Authorization":"Bearer "+pollToken}}:{};
1012+
const r=await fetch(pollUrl,opts);
1013+
if(!r.ok)return;
1014+
d=await r.json();
1015+
}catch{return;}
1016+
}
1017+
/* Timeline rehydration: stored aggregates with their micro-summaries */
1018+
if(d.timeline&&d.timeline.length){
1019+
for(const entry of d.timeline){
1020+
const ts=new Date(entry.created_at).toLocaleTimeString([],{hour:"2-digit",minute:"2-digit",second:"2-digit"});
1021+
const micros=(entry.micro_summaries||[]).map(s=>({text:s.summary||String(s),row_indices:s.row_indices||null,row_index:s.row_index}));
1022+
aggHistory.push({aggregate:entry.summary,micros,ts});
1023+
}
1024+
if(d.cursor)pollCursor=d.cursor;
1025+
if(aggHistory.length>30)aggHistory.splice(0,aggHistory.length-30);
1026+
1027+
}
1028+
/* Render current state (progress bar + activity list) */
1029+
renderProgress(d);
10141030
}
10151031
10161032
/* ── ontoolresult: unified entry point ── */
10171033
app.ontoolresult=({content,structuredContent})=>{
1034+
10181035
/* Entry 1: structuredContent from futuresearch_status (widget data) */
10191036
if(structuredContent&&structuredContent.progress_url){
10201037
enterProgressMode(structuredContent);
1021-
/* If task already completed on re-mount, backfill summaries then fetch results */
1022-
const done=["completed","failed","revoked"].includes(structuredContent.status);
1023-
if(done){
1024-
wasDone=true;
1025-
backfillSummaries().then(()=>{if(!resultsFetched)autoFetchResults();});
1026-
} else if(!pollTimer){
1027-
pollUrl=structuredContent.progress_url;startPoll();
1028-
}
1038+
pollUrl=structuredContent.progress_url;
1039+
/* Claude.ai caches the original tool result — structuredContent.status
1040+
may be stale (e.g. "running" even though the task completed).
1041+
Always do a one-off fetch to get current status before deciding path. */
1042+
(async()=>{
1043+
try{
1044+
const opts=structuredContent.poll_token?{headers:{"Authorization":"Bearer "+structuredContent.poll_token}}:{};
1045+
const r=await fetch(pollUrl,opts);
1046+
if(!r.ok){if(!pollTimer)startPoll();return;}
1047+
const d=await r.json();
1048+
const currentStatus=d.status||structuredContent.status;
1049+
const done=["completed","failed","revoked"].includes(currentStatus);
1050+
1051+
/* Always backfill stored timeline on mount (covers mid-execution re-mount too) */
1052+
await backfillSummaries(d);
1053+
if(done){
1054+
wasDone=true;
1055+
if(!resultsFetched)autoFetchResults();
1056+
} else if(!pollTimer){
1057+
startPoll();
1058+
}
1059+
}catch(e){
1060+
1061+
if(!pollTimer)startPoll();
1062+
}
1063+
})();
10291064
return;
10301065
}
10311066
@@ -1037,13 +1072,27 @@
10371072
const d=JSON.parse(t.text);
10381073
if(d.progress_url){
10391074
enterProgressMode(d);
1040-
const done=["completed","failed","revoked"].includes(d.status);
1041-
if(done){
1042-
wasDone=true;
1043-
backfillSummaries().then(()=>{if(!resultsFetched)autoFetchResults();});
1044-
} else if(!pollTimer){
1045-
pollUrl=d.progress_url;startPoll();
1046-
}
1075+
pollUrl=d.progress_url;
1076+
/* Same live-check as Entry 1 — cached status may be stale */
1077+
(async()=>{
1078+
try{
1079+
const opts=d.poll_token?{headers:{"Authorization":"Bearer "+d.poll_token}}:{};
1080+
const r2=await fetch(pollUrl,opts);
1081+
if(!r2.ok){if(!pollTimer)startPoll();return;}
1082+
const d2=await r2.json();
1083+
const currentStatus=d2.status||d.status;
1084+
const done2=["completed","failed","revoked"].includes(currentStatus);
1085+
await backfillSummaries(d2);
1086+
if(done2){
1087+
wasDone=true;
1088+
if(!resultsFetched)autoFetchResults();
1089+
} else if(!pollTimer){
1090+
startPoll();
1091+
}
1092+
}catch(e2){
1093+
if(!pollTimer)startPoll();
1094+
}
1095+
})();
10471096
return;
10481097
}
10491098
}catch{}

0 commit comments

Comments
 (0)