-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathroute.ts
More file actions
73 lines (62 loc) · 2.74 KB
/
route.ts
File metadata and controls
73 lines (62 loc) · 2.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
/**
* Chat API route — receives messages from the client transport's HTTP POST,
* streams the AI response back over Ably.
*
* Uses Next.js `after()` to stream the response without blocking the HTTP
* response. See the docs for why this matters: docs/concepts/transport.md
*/
import { after } from 'next/server';
import { streamText, convertToModelMessages } from 'ai';
import type { UIMessage } from 'ai';
import { anthropic } from '@ai-sdk/anthropic';
import Ably from 'ably';
import { createServerTransport } from '@ably/ai-transport/vercel';
import type { MessageNode } from '@ably/ai-transport';
import { tools } from './tools';
/** Shape of the POST body sent by the client transport. */
interface ChatRequestBody {
turnId: string;
clientId: string;
messages: MessageNode<UIMessage>[];
history?: MessageNode<UIMessage>[];
chatId: string;
forkOf?: string;
parent?: string;
}
// Server-side Ably client — uses API key directly (trusted environment).
const ably = new Ably.Realtime({ key: process.env.ABLY_API_KEY! });
export async function POST(req: Request) {
const { messages, history, chatId, turnId, clientId, forkOf, parent } = (await req.json()) as ChatRequestBody;
const channel = ably.channels.get(chatId);
const transport = createServerTransport({ channel });
const turn = transport.newTurn({ turnId, clientId, parent, forkOf, signal: req.signal });
await turn.start();
// Publish user messages (if any). Fork metadata (parent/forkOf) is
// configured at the turn level — addMessages picks it up automatically.
let lastUserMsgId: string | undefined;
if (messages.length > 0) {
const { msgIds } = await turn.addMessages(messages, { clientId });
lastUserMsgId = msgIds.at(-1);
}
// Reconstruct full conversation for the LLM
const historyMsgs = (history ?? []).map((h) => h.message);
const newMsgs = (messages ?? []).map((m) => m.message);
const allMessages = [...historyMsgs, ...newMsgs];
const result = streamText({
model: anthropic('claude-sonnet-4-6'),
system: `You are a helpful assistant. When the user asks about weather, use the getWeather tool. If they don't specify a location, call getLocation first to get their coordinates, then call getWeather with a description of that location.`,
messages: await convertToModelMessages(allMessages),
tools,
abortSignal: turn.abortSignal,
});
// Stream the response over Ably in the background using after().
// Pass parent explicitly — the assistant response is a child of the last user message.
after(async () => {
const { reason } = await turn.streamResponse(result.toUIMessageStream(), {
parent: lastUserMsgId,
});
await turn.end(reason);
transport.close();
});
return new Response(null, { status: 200 });
}