Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions demo/anthropic/react/.env.local.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Ably API key — get one at https://ably.com/accounts
ABLY_API_KEY=your-app.key:secret

# Anthropic API key — get one at https://console.anthropic.com
ANTHROPIC_API_KEY=sk-ant-...

# Ably channel name (optional, default: ai:demo)
# Can also be overridden per-session via ?channel=<name> query param
# NEXT_PUBLIC_ABLY_CHANNEL=ai:demo
6 changes: 6 additions & 0 deletions demo/anthropic/react/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
.next/
node_modules/
package-lock.json
.env.local
next-env.d.ts
tsconfig.tsbuildinfo
35 changes: 35 additions & 0 deletions demo/anthropic/react/next.config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import path from "path";
import type { NextConfig } from "next";

const repoRoot = path.resolve(__dirname, "..", "..", "..");

const nextConfig: NextConfig = {
// Strict Mode double-mounts cause TransportProvider to close the transport on
// the first cleanup cycle while the ref persists, leaving a closed transport on
// remount. Disable until TransportProvider handles this correctly.
reactStrictMode: false,
serverExternalPackages: ["jsonwebtoken", "ably", "@anthropic-ai/claude-agent-sdk"],
webpack: (config) => {
// Resolve @ably/ai-transport imports to source files instead of the pre-built
// dist/ bundles. The dist bundles contain a Rolldown CJS runtime shim that
// calls `require("react")` (breaks in the browser) and inline `ably` imports
// that bypass `serverExternalPackages` (breaks native ws on the server).
// With source aliases, webpack compiles the TS directly and can properly
// externalize `ably` on the server via `serverExternalPackages`.
config.resolve.alias = {
...config.resolve.alias,
"@ably/ai-transport/react": path.join(repoRoot, "src/react/index.ts"),
"@ably/ai-transport/anthropic": path.join(repoRoot, "src/anthropic/index.ts"),
"@ably/ai-transport": path.join(repoRoot, "src/index.ts"),
};

// Source files use .js extensions in imports (standard TS ESM convention).
// Webpack needs to resolve .js imports to .ts files.
config.resolve.extensionAlias = {
".js": [".ts", ".tsx", ".js"],
};
return config;
},
};

export default nextConfig;
36 changes: 36 additions & 0 deletions demo/anthropic/react/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{
"name": "anthropic-agent-sdk-demo",
"version": "0.0.1",
"private": true,
"scripts": {
"dev": "next dev",
"build": "tsc --noEmit && next build",
"start": "next start",
"typecheck": "tsc --noEmit",
"lint": "next lint"
},
"dependencies": {
"@ably/ai-transport": "file:../../../",
"@anthropic-ai/claude-agent-sdk": "^0.2.85",
"@anthropic-ai/sdk": "^0.80.0",
"ably": "^2",
"jsonwebtoken": "^9",
"next": "^15",
"react": "^19",
"react-dom": "^19"
},
"overrides": {
"@anthropic-ai/sdk": "$@anthropic-ai/sdk"
},
"devDependencies": {
"@tailwindcss/postcss": "^4",
"@types/jsonwebtoken": "^9",
"@types/node": "^22",
"@types/react": "^19",
"@types/react-dom": "^19",
"eslint": "^9",
"eslint-config-next": "^15",
"tailwindcss": "^4",
"typescript": "^5"
}
}
8 changes: 8 additions & 0 deletions demo/anthropic/react/postcss.config.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
/** @type {import('postcss-load-config').Config} */
const config = {
plugins: {
"@tailwindcss/postcss": {},
},
};

export default config;
38 changes: 38 additions & 0 deletions demo/anthropic/react/src/app/api/auth/ably-token/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/**
* Ably JWT token endpoint.
*
* Issues short-lived JWTs signed with the Ably API key secret.
* The client connects to Ably with `authUrl` pointing here.
*/

import jwt from 'jsonwebtoken';
import { NextResponse } from 'next/server';

export async function GET(req: Request) {
const apiKey = process.env.ABLY_API_KEY;
if (!apiKey) {
return NextResponse.json({ error: 'ABLY_API_KEY not set' }, { status: 500 });
}

const [keyName, keySecret] = apiKey.split(':');

const url = new URL(req.url);
const clientId = url.searchParams.get('clientId') ?? `user-${crypto.randomUUID().slice(0, 8)}`;

const token = jwt.sign(
{
'x-ably-clientId': clientId,
'x-ably-capability': JSON.stringify({ '*': ['publish', 'subscribe', 'history'] }),
},
keySecret,
{
algorithm: 'HS256',
keyid: keyName,
expiresIn: '1h',
},
);

return new NextResponse(token, {
headers: { 'Content-Type': 'application/jwt' },
});
}
127 changes: 127 additions & 0 deletions demo/anthropic/react/src/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
/**
* Chat API route — receives messages from the client transport's HTTP POST,
* streams the AI response back over Ably using the Anthropic Agent SDK.
*
* The Agent SDK's query() function produces an AsyncGenerator<SDKMessage>.
* We filter to conversation-relevant types (AgentCodecEvent) and pipe them
* through the Anthropic transport's encoder to the Ably channel.
*/

import { after } from 'next/server';
import { query } from '@anthropic-ai/claude-agent-sdk';
import type { SDKMessage } from '@anthropic-ai/claude-agent-sdk';
import Ably from 'ably';
import { createServerTransport } from '@ably/ai-transport/anthropic';
import type { AgentCodecEvent, AgentMessage } from '@ably/ai-transport/anthropic';
import type { MessageNode } from '@ably/ai-transport';

/** Shape of the POST body sent by the client transport. */
interface ChatRequestBody {
turnId: string;
clientId: string;
messages: MessageNode<AgentMessage>[];
history?: MessageNode<AgentMessage>[];
id: string;
forkOf?: string;
parent?: string | null;
}

/** Check if an SDKMessage is a conversation-relevant AgentCodecEvent. */
function isAgentCodecEvent(msg: SDKMessage): msg is AgentCodecEvent {
return ['stream_event', 'assistant', 'user', 'result', 'tool_progress'].includes(msg.type);
}

/** Convert the Agent SDK's async generator into a ReadableStream<AgentCodecEvent>. */
function sdkMessageStream(queryResult: AsyncIterable<SDKMessage>): ReadableStream<AgentCodecEvent> {
return new ReadableStream({
async start(controller) {
try {
for await (const message of queryResult) {
if (isAgentCodecEvent(message)) {
controller.enqueue(message);
}
}
controller.close();
} catch (err) {
controller.error(err);
}
},
});
}

/** Extract the user's prompt text from the conversation messages. */
function extractPrompt(messages: MessageNode<AgentMessage>[], history: MessageNode<AgentMessage>[]): string {
// Get the latest user message text
const allMsgs = [...history, ...messages];
const lastUser = allMsgs.filter((m) => m.message.type === 'user').at(-1);
if (!lastUser) return '';

const content = lastUser.message.message.content;
if (typeof content === 'string') return content;

// Content is an array of content blocks — extract text
if (Array.isArray(content)) {
return content
.filter(
(block): block is { type: 'text'; text: string } =>
typeof block === 'object' && block !== null && 'type' in block && block.type === 'text',
)
.map((block) => block.text)
.join('\n');
}

return '';
}

// Server-side Ably client — uses API key directly (trusted environment).
const ably = new Ably.Realtime({ key: process.env.ABLY_API_KEY! });

export async function POST(req: Request) {
const { messages, history = [], id, turnId, clientId, forkOf, parent } = (await req.json()) as ChatRequestBody;
const channel = ably.channels.get(id);

const transport = createServerTransport({ channel });
const turn = transport.newTurn({ turnId, clientId, parent: parent ?? undefined, forkOf });

await turn.start();

// Publish user messages to the channel so all clients see them
let lastUserMsgId: string | undefined;
if (messages.length > 0) {
const { msgIds } = await turn.addMessages(messages, { clientId });
lastUserMsgId = msgIds.at(-1);
}

// Extract the user's prompt for the Agent SDK
const prompt = extractPrompt(messages, history);

// Bridge the transport's abort signal to an AbortController for the Agent SDK.
// When the client cancels a turn, the transport fires turn.abortSignal, which
// propagates to the Agent SDK to stop the LLM call.
const abortController = new AbortController();
turn.abortSignal.addEventListener('abort', () => abortController.abort(), { once: true });

// Call the Agent SDK — this spawns a Claude Code process that calls the
// Anthropic API with the ANTHROPIC_API_KEY environment variable.
const conversation = query({
prompt,
options: {
includePartialMessages: true,
maxTurns: 1,
systemPrompt: 'You are a helpful assistant.',
abortController,
},
});

// Stream the response over Ably in the background using after().
after(async () => {
const stream = sdkMessageStream(conversation);
const { reason } = await turn.streamResponse(stream, {
parent: lastUserMsgId,
});
await turn.end(reason);
transport.close();
});

return new Response(null, { status: 200 });
}
93 changes: 93 additions & 0 deletions demo/anthropic/react/src/app/components/chat.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
'use client';

import { useState, useCallback } from 'react';
import { useClientTransport, useView, useActiveTurns, useAblyMessages } from '@ably/ai-transport/react';
import type { AgentCodecEvent, AgentMessage } from '@ably/ai-transport/anthropic';

import { userMessage } from '../helpers';
import { MessageList } from './message-list';
import { DebugPane } from './debug-pane';

interface ChatProps {
chatId: string;
clientId?: string;
historyLimit?: number;
}

export function Chat({ chatId, clientId, historyLimit }: ChatProps) {
const [input, setInput] = useState('');

const transport = useClientTransport<AgentCodecEvent, AgentMessage>({ channelName: chatId });
const view = useView<AgentCodecEvent, AgentMessage>({ transport, limit: historyLimit ?? 30 });
const activeTurns = useActiveTurns({ transport });
const ablyMessages = useAblyMessages({ transport });

const hasOwnTurns = clientId ? activeTurns.has(clientId) : false;

const handleSubmit = useCallback(() => {
const text = input.trim();
if (!text) return;
setInput('');
view.send([userMessage(text)]);
}, [input, view]);

const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
handleSubmit();
}
};

return (
<div className="flex h-dvh">
<div className="flex flex-1 flex-col">
{/* Header */}
<header className="flex items-center gap-2 border-b border-zinc-800 px-4 py-3">
<div className="h-2 w-2 rounded-full bg-emerald-500" />
<h1 className="text-sm font-medium text-zinc-300">Ably AI — Anthropic Agent SDK Demo</h1>
{clientId && <span className="ml-auto text-xs text-zinc-600 font-mono">{clientId}</span>}
</header>

{/* Messages */}
<MessageList view={view} />

{/* Input */}
<div className="border-t border-zinc-800 px-4 py-3">
<div className="flex gap-2">
<input
value={input}
onChange={(e) => setInput(e.target.value)}
onKeyDown={handleKeyDown}
placeholder="Type a message..."
className="flex-1 rounded-md bg-zinc-900 border border-zinc-700 px-3 py-2 text-sm text-zinc-200 placeholder-zinc-600 outline-none focus:border-zinc-500"
autoFocus
/>
{hasOwnTurns && (
<button
type="button"
onClick={() => transport.cancel({ own: true })}
className="rounded-md bg-red-900/60 px-4 py-2 text-sm font-medium text-red-300 hover:bg-red-900/80 transition-colors"
>
Stop
</button>
)}
<button
type="button"
onClick={handleSubmit}
disabled={!input.trim()}
className="rounded-md bg-zinc-700 px-4 py-2 text-sm font-medium text-zinc-200 hover:bg-zinc-600 disabled:opacity-40 disabled:cursor-not-allowed transition-colors"
>
Send
</button>
</div>
</div>
</div>

<DebugPane
messages={view.nodes.map((n) => n.message)}
ablyMessages={ablyMessages}
activeTurns={activeTurns}
/>
</div>
);
}
Loading