Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions test/versioned/langchain-aws/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
{
"name": "langchain-aws-bedrock-tests",
"targets": [
{
"name": "@langchain/core",
"minSupported": "0.1.17",
"minAgentVersion": "11.13.0"
}
],
"version": "0.0.0",
"private": true,
"engines": {
"node": ">=20"
},
"tests": [
{
"engines": {
"node": ">=20"
},
"dependencies": {
"@langchain/aws": ">=1.1.0",
"@langchain/core": ">=1.0.0",
"@langchain/community": ">=1.0.0",
"@elastic/elasticsearch": "8.13.1"
},
"files": [
"runnables.test.js",
"runnables-streaming.test.js",
"vectorstore.test.js"
]
}
]
}
117 changes: 117 additions & 0 deletions test/versioned/langchain-aws/runnables-streaming.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
/*
* Copyright 2025 New Relic Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*/

'use strict'

const test = require('node:test')
const assert = require('node:assert')

const { removeModules } = require('../../lib/cache-buster')
const { match } = require('../../lib/custom-assertions')
const {
runStreamingEnabledTests,
runStreamingDisabledTest,
runAiMonitoringDisabledTests
} = require('../langchain/runnables-streaming')
const { FAKE_CREDENTIALS, getAiResponseServer } = require('../../lib/aws-server-stubs')
const helper = require('../../lib/agent_helper')

const config = {
ai_monitoring: {
enabled: true,
streaming: {
enabled: true
}
}
}
const createAiResponseServer = getAiResponseServer(__dirname)

async function beforeEach({ enabled, ctx }) {
ctx.nr = {}
const { server, baseUrl } = await createAiResponseServer()
ctx.nr.server = server
ctx.nr.agent = helper.instrumentMockedAgent(config)
ctx.nr.agent.config.ai_monitoring.streaming.enabled = enabled

const { ChatPromptTemplate } = require('@langchain/core/prompts')
const { StringOutputParser, CommaSeparatedListOutputParser } = require('@langchain/core/output_parsers')
const { BaseCallbackHandler } = require('@langchain/core/callbacks/base')
const { ChatBedrockConverse } = require('@langchain/aws')
const { BedrockRuntimeClient } = require('@aws-sdk/client-bedrock-runtime')
ctx.nr.ChatPromptTemplate = ChatPromptTemplate
ctx.nr.CommaSeparatedListOutputParser = CommaSeparatedListOutputParser
ctx.nr.BaseCallbackHandler = BaseCallbackHandler
ctx.nr.langchainCoreVersion = require('@langchain/core/package.json').version

// Create the BedrockRuntimeClient with our mock endpoint
const bedrockClient = new BedrockRuntimeClient({
region: 'us-east-1',
credentials: FAKE_CREDENTIALS,
endpoint: baseUrl,
maxAttempts: 1
})

ctx.nr.prompt = ChatPromptTemplate.fromMessages([['assistant', 'text converse ultimate question {topic}']])
ctx.nr.model = new ChatBedrockConverse({
streaming: true,
model: 'anthropic.claude-instant-v1',
region: 'us-east-1',
client: bedrockClient
})
ctx.nr.outputParser = new StringOutputParser()
}

async function afterEach(ctx) {
ctx.nr?.server?.destroy()
helper.unloadAgent(ctx.nr.agent)
// bust the require-cache so it can re-instrument
removeModules(['@langchain/core', '@langchain/aws', '@aws-sdk'])
}

test('streaming enabled', async (t) => {
t.beforeEach((ctx) => beforeEach({ enabled: true, ctx }))
t.afterEach((ctx) => afterEach(ctx))

await runStreamingEnabledTests({
inputData: { topic: 'streamed' },
expectedInput: '{"topic":"streamed"}',
expectedContent: () => 'This is a test.',
errorPromptTemplate: ['assistant', 'text converse ultimate question streamed error'],
errorFromStreamEventCount: 4,
errorFromStreamLangchainEventCount: 2,
errorFromStreamAssertion: (exceptions) => {
assert.equal(exceptions.length, 2)
for (const e of exceptions) {
match(e, {
customAttributes: {
'error.message': /Internal server error during streaming/,
completion_id: /[\w-]{36}/
}
})
}
}
})(t)
})

test('streaming disabled', async (t) => {
t.beforeEach((ctx) => beforeEach({ enabled: false, ctx }))
t.afterEach((ctx) => afterEach(ctx))

await runStreamingDisabledTest({
inputData: { topic: 'streamed' },
expectedContent: () => 'This is a test.',
streamingDisabledMessage: 'should increment streaming disabled in both langchain and bedrock'
})(t)
})

test('ai_monitoring disabled', async (t) => {
t.beforeEach((ctx) => beforeEach({ enabled: true, ctx }))
t.afterEach((ctx) => afterEach(ctx))

await runAiMonitoringDisabledTests({
inputData: { topic: 'streamed' },
expectedContent: () => 'This is a test.'
})(t)
})
69 changes: 69 additions & 0 deletions test/versioned/langchain-aws/runnables.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright 2025 New Relic Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*/

'use strict'

const test = require('node:test')

const { removeModules } = require('../../lib/cache-buster')
const { runRunnablesTests } = require('../langchain/runnables')
const { FAKE_CREDENTIALS, getAiResponseServer } = require('../../lib/aws-server-stubs')
const helper = require('../../lib/agent_helper')

const config = {
ai_monitoring: {
enabled: true
}
}
const createAiResponseServer = getAiResponseServer(__dirname)

test.beforeEach(async (ctx) => {
ctx.nr = {}
const { server, baseUrl } = await createAiResponseServer()
ctx.nr.server = server
ctx.nr.agent = helper.instrumentMockedAgent(config)

const { ChatPromptTemplate } = require('@langchain/core/prompts')
const { StringOutputParser, CommaSeparatedListOutputParser } = require('@langchain/core/output_parsers')
const { BaseCallbackHandler } = require('@langchain/core/callbacks/base')
const { ChatBedrockConverse } = require('@langchain/aws')
const { BedrockRuntimeClient } = require('@aws-sdk/client-bedrock-runtime')
ctx.nr.ChatPromptTemplate = ChatPromptTemplate
ctx.nr.CommaSeparatedListOutputParser = CommaSeparatedListOutputParser
ctx.nr.BaseCallbackHandler = BaseCallbackHandler
ctx.nr.langchainCoreVersion = require('@langchain/core/package.json').version

// Create the BedrockRuntimeClient with our mock endpoint
const bedrockClient = new BedrockRuntimeClient({
region: 'us-east-1',
credentials: FAKE_CREDENTIALS,
endpoint: baseUrl,
maxAttempts: 1
})

ctx.nr.prompt = ChatPromptTemplate.fromMessages([['assistant', 'text converse ultimate {topic}']])
ctx.nr.model = new ChatBedrockConverse({
model: 'anthropic.claude-3-haiku-20240307-v1:0',
region: 'us-east-1',
client: bedrockClient
})
ctx.nr.outputParser = new StringOutputParser()
})

test.afterEach(async (ctx) => {
ctx.nr?.server?.destroy()
helper.unloadAgent(ctx.nr.agent)
// bust the require-cache so it can re-instrument
removeModules(['@langchain/core', '@langchain/aws', '@aws-sdk'])
})

runRunnablesTests({
inputData: { topic: 'question' },
expectedInput: '{"topic":"question"}',
expectedOutput: 'This is a test.',
errorPromptTemplate: ['assistant', 'text converse ultimate question error'],
errorEventCount: 5,
arrayParserOutput: '["This is a test."]'
})
81 changes: 81 additions & 0 deletions test/versioned/langchain-aws/vectorstore.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
/*
* Copyright 2025 New Relic Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*/

'use strict'

const test = require('node:test')

const { removeModules } = require('../../lib/cache-buster')
const { runVectorstoreTests } = require('../langchain/vectorstore')
const { Document } = require('@langchain/core/documents')
const { FAKE_CREDENTIALS, getAiResponseServer } = require('../../lib/aws-server-stubs')
const params = require('../../lib/params')
const helper = require('../../lib/agent_helper')

const config = {
ai_monitoring: {
enabled: true
}
}
const createAiResponseServer = getAiResponseServer(__dirname)

test.beforeEach(async (ctx) => {
ctx.nr = {}
const { server, baseUrl } = await createAiResponseServer()
ctx.nr.server = server
ctx.nr.agent = helper.instrumentMockedAgent(config)

const { BedrockEmbeddings } = require('@langchain/aws')
const { BedrockRuntimeClient } = require('@aws-sdk/client-bedrock-runtime')
ctx.nr.langchainCoreVersion = require('@langchain/core/package.json').version

const { Client } = require('@elastic/elasticsearch')
const clientArgs = {
client: new Client({
node: `http://${params.elastic_host}:${params.elastic_port}`
}),
indexName: 'test_langchain_aws_vectorstore'
}
const { ElasticVectorSearch } = require('@langchain/community/vectorstores/elasticsearch')

// Create the BedrockRuntimeClient with our mock endpoint
const bedrockClient = new BedrockRuntimeClient({
region: 'us-east-1',
credentials: FAKE_CREDENTIALS,
endpoint: baseUrl,
maxAttempts: 1
})

ctx.nr.embedding = new BedrockEmbeddings({
model: 'amazon.titan-embed-text-v1',
region: 'us-east-1',
client: bedrockClient,
maxRetries: 0
})
const docs = [
new Document({
metadata: { id: '2' },
pageContent: 'embed text amazon token count callback response'
})
]
const vectorStore = new ElasticVectorSearch(ctx.nr.embedding, clientArgs)
await vectorStore.deleteIfExists()
await vectorStore.addDocuments(docs)
ctx.nr.vs = vectorStore
})

test.afterEach(async (ctx) => {
await ctx.nr?.vs?.deleteIfExists()
ctx.nr?.server?.destroy()
helper.unloadAgent(ctx.nr.agent)
// bust the require-cache so it can re-instrument
removeModules(['@langchain/core', '@langchain/aws', '@aws-sdk', '@elastic', '@langchain/community'])
})

runVectorstoreTests({
searchQuery: 'embed text amazon token count callback response',
expectedQuery: 'embed text amazon token count callback response',
expectedPageContent: 'embed text amazon token count callback response'
})
34 changes: 34 additions & 0 deletions test/versioned/langchain-openai/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
{
"name": "langchain-openai-tests",
"targets": [
{
"name": "@langchain/core",
"minSupported": "0.1.17",
"minAgentVersion": "11.13.0"
}
],
"version": "0.0.0",
"private": true,
"engines": {
"node": ">=20"
},
"tests": [
{
"engines": {
"node": ">=20"
},
"dependencies": {
"@langchain/core": ">=1.0.0",
"@langchain/community": ">=1.0.0",
"@langchain/openai": ">=1.0.0",
"openai": "4.90.0",
"@elastic/elasticsearch": "8.13.1"
},
"files": [
"runnables.test.js",
"runnables-streaming.test.js",
"vectorstore.test.js"
]
}
]
}
Loading