Skip to content

Commit 1aa90ea

Browse files
committed
update
1 parent 5f0af1a commit 1aa90ea

8 files changed

Lines changed: 19 additions & 23 deletions

File tree

scripts/generate_code_snippet_mdx.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,9 @@ def _expand_to_deepagents_codegroup(
114114
"""Wrap `content` in a CodeGroup, one tab per quickstart model variant."""
115115
start, end = canonical_span
116116
parts = [
117-
_codegroup_fence(title, fence_lang, _replace_span(content, start, end, model_token))
117+
_codegroup_fence(
118+
title, fence_lang, _replace_span(content, start, end, model_token)
119+
)
118120
for title, model_token in tab_definitions
119121
]
120122
return "<CodeGroup>\n" + "\n\n".join(parts) + "\n</CodeGroup>\n"

src/code-samples/deepagents/content-builder.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ function createContentWriter() {
125125
};
126126

127127
return createDeepAgent({
128-
model: "google_genai:gemini-3.1-pro-preview",
128+
model: "anthropic:claude-sonnet-4-6",
129129
memory: ["./AGENTS.md"],
130130
skills: ["./skills/"],
131131
tools: [generateCover, generateSocialImage],

src/code-samples/langchain/nostream-tag.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@
77
from langgraph.graph import START, StateGraph
88

99
# KEEP MODEL
10-
stream_model = ChatAnthropic(model_name="claude-3-haiku-20240307")
10+
stream_model = ChatAnthropic(model_name="claude-haiku-4-5-20251001")
1111
# KEEP MODEL
12-
internal_model = ChatAnthropic(model_name="claude-3-haiku-20240307").with_config(
12+
internal_model = ChatAnthropic(model_name="claude-haiku-4-5-20251001").with_config(
1313
{"tags": ["nostream"]}
1414
)
1515

src/code-samples/langchain/nostream-tag.ts

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,10 @@ import { StateGraph, StateSchema, START } from "@langchain/langgraph";
88
import * as z from "zod";
99

1010
// KEEP MODEL
11-
const streamModel = new ChatAnthropic({ model: "claude-3-haiku-20240307" });
11+
const streamModel = new ChatAnthropic({ model: "claude-haiku-4-5-20251001" });
1212
const internalModel = new ChatAnthropic({
1313
// KEEP MODEL
14-
model: "claude-3-haiku-20240307",
14+
model: "claude-haiku-4-5-20251001",
1515
}).withConfig({
1616
tags: ["nostream"],
1717
});
@@ -44,7 +44,10 @@ const graph = new StateGraph(State)
4444
.addEdge("writeAnswer", "internal_notes")
4545
.compile();
4646

47-
const stream = await graph.stream({ topic: "AI" }, { streamMode: "messages" });
47+
const stream = await graph.stream(
48+
{ topic: "AI", answer: "", notes: "" },
49+
{ streamMode: "messages" },
50+
);
4851
// :snippet-end:
4952

5053
// :remove-start:

src/code-samples/package-lock.json

Lines changed: 0 additions & 9 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/langsmith/trace-bedrock.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ npm install @aws-sdk/client-bedrock-runtime langsmith
2525

2626
</CodeGroup>
2727

28-
This integration uses the native AWS SDKs with LangSmith's tracing capabilities. For Python, you'll use [`boto3`](https://pypi.org/project/boto3/) (the AWS SDK for Python) along with [`langsmith`](https://pypi.org/project/langsmith/) to capture traces. For JavaScript/TypeScript, you'll use [`@aws-sdk/client-bedrock-runtime`](https://www.npmjs.com/package/@aws-sdk/client-bedrock-runtime) with the [`langsmith`](https://www.npmjs.com/package/langsmith) package. Both implementations use the [Bedrock Converse API](https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html), which provides a unified interface for interacting with foundation models.
28+
This integration uses the native AWS SDKs with LangSmith's tracing capabilities. For Python, you'll use [`boto3`](https://pypi.org/project/boto3/) (the AWS SDK for Python) along with [`langsmith`](https://pypi.org/project/langsmith/) to capture traces. For JavaScript/TypeScript, you'll use [`@aws-sdk/client-bedrock-runtime`](https://www.npmjs.org/package/@aws-sdk/client-bedrock-runtime) with the [`langsmith`](https://www.npmjs.org/package/langsmith) package. Both implementations use the [Bedrock Converse API](https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html), which provides a unified interface for interacting with foundation models.
2929

3030
## Setup
3131

@@ -66,7 +66,7 @@ from langsmith import traceable
6666

6767
# Initialize Bedrock runtime client (ensure AWS creds and region are set)
6868
bedrock = boto3.client("bedrock-runtime", region_name="us-east-1")
69-
model_id = "anthropic.claude-3-haiku-20240307-v1:0" # Example Bedrock model ID
69+
model_id = "anthropic.claude-haiku-4-5-20251001-v1:0" # Example Bedrock model ID
7070

7171
# Decorate the model invocation function to auto-capture a trace with tags/metadata
7272
@traceable(tags=["aws-bedrock", "langsmith", "integration-test"],
@@ -96,7 +96,7 @@ import { BedrockRuntimeClient, ConverseCommand } from "@aws-sdk/client-bedrock-r
9696
import { traceable } from "langsmith";
9797

9898
const client = new BedrockRuntimeClient({ region: "us-east-1" });
99-
const modelId = "anthropic.claude-3-haiku-20240307-v1:0";
99+
const modelId = "anthropic.claude-haiku-4-5-20251001-v1:0";
100100

101101
// Wrap the Bedrock invocation in a traceable function with tags and metadata
102102
const invokeBedrock = traceable(

src/snippets/code-samples/nostream-tag-js.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@ import { ChatAnthropic } from "@langchain/anthropic";
33
import { StateGraph, StateSchema, START } from "@langchain/langgraph";
44
import * as z from "zod";
55

6-
const streamModel = new ChatAnthropic({ model: "claude-3-haiku-20240307" });
6+
const streamModel = new ChatAnthropic({ model: "claude-haiku-4-5-20251001" });
77
const internalModel = new ChatAnthropic({
8-
model: "claude-3-haiku-20240307",
8+
model: "claude-haiku-4-5-20251001",
99
}).withConfig({
1010
tags: ["nostream"],
1111
});

src/snippets/code-samples/nostream-tag-py.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ from typing import Any, TypedDict
44
from langchain_anthropic import ChatAnthropic
55
from langgraph.graph import START, StateGraph
66

7-
stream_model = ChatAnthropic(model_name="claude-3-haiku-20240307")
8-
internal_model = ChatAnthropic(model_name="claude-3-haiku-20240307").with_config(
7+
stream_model = ChatAnthropic(model_name="claude-haiku-4-5-20251001")
8+
internal_model = ChatAnthropic(model_name="claude-haiku-4-5-20251001").with_config(
99
{"tags": ["nostream"]}
1010
)
1111

0 commit comments

Comments
 (0)