No spans for LangChain #468
-
Hi, I'm trying this app, but I only get the openai spans, not anything from langchain abstraction. Am I missing something? // Load .env file, if any.
require('dotenv').config()
const traceloop = require('@traceloop/node-server-sdk');
// Reuse the OTEL_EXPORTER_OTLP_ENDPOINT variable instead of TRACELOOP_BASE_URL
const baseUrl = process.env.OTEL_EXPORTER_OTLP_ENDPOINT;
// Reuse the OTEL_SERVICE_NAME variable instead of TRACELOOP_APP_NAME
const appName = process.env.OTEL_SERVICE_NAME;
traceloop.initialize({baseUrl, appName, disableBatch: true});
const {ChatOpenAI, OpenAIEmbeddings} = require("@langchain/openai");
const {MemoryVectorStore} = require("langchain/vectorstores/memory");
const {ChatPromptTemplate} = require("@langchain/core/prompts");
const {StringOutputParser} = require("@langchain/core/output_parsers");
const {RunnablePassthrough, RunnableSequence} = require("@langchain/core/runnables");
async function main() {
const llm = new ChatOpenAI({model: 'qwen2.5:0.5b'});
const embeddings = new OpenAIEmbeddings({model: 'all-minilm:33m'});
const monsters = [
"Goblin: Weak but numerous, attacks in groups.",
"Orc: Strong and aggressive, fights head-on.",
"Skeleton: Undead warrior, immune to poison but fragile.",
"Giant Spider: Webs players, poisonous bite.",
"Dragon: Powerful and magical, breathes fire.",
"Keegorg: Senior Solution Architect at Docker",
].map((pageContent) => ({ pageContent, metadata: {} }));
const vectorStore = new MemoryVectorStore(embeddings);
// Create embeddings for the monsters
await vectorStore.addDocuments(monsters);
// Retrieve only one monster
const retriever = vectorStore.asRetriever(1);
// Create prompt template
const ANSWER_PROMPT = ChatPromptTemplate.fromTemplate(
`You are a monster expert, and the context includes relevant monsters. Answer the user concisely only using the provided context. If you don't know the answer, just say that you don't know.
context: {context}
Question: "{question}"
Answer:`
);
function onlyContent(docs) {
return docs.map(doc => doc.page_content).join('\n\n');
}
const chain = RunnableSequence.from([
{
context: retriever.pipe(onlyContent),
question: new RunnablePassthrough(),
},
ANSWER_PROMPT,
llm,
new StringOutputParser(),
]);
// Pass the user's question to the sequence
const response = await chain.invoke("Who is Keegorg?");
console.log(response);
}
main(); {
"name": "test-app",
"version": "1.0.0",
"private": true,
"type": "commonjs",
"engines": {
"node": ">=16",
"npm": ">=7"
},
"scripts": {
"start": "node index.js"
},
"dependencies": {
"openai": "^4.67.3",
"langchain": "^0.3.2",
"@langchain/core": "^0.3.11",
"@langchain/openai": "^0.3.7",
"dotenv": "^16.4.5",
"@traceloop/node-server-sdk": "^0.11.1"
}
} |
Beta Was this translation helpful? Give feedback.
Replies: 7 comments
-
Thanks @codefromthecrypt looking into this! |
Beta Was this translation helpful? Give feedback.
-
@codefromthecrypt so the reason is an issue we have with OpenTelemetry auto-instrumentation. The only way we're able to solve it as of now is by manually instrumenting the module, like this:
For some reason, |
Beta Was this translation helpful? Give feedback.
-
no worries, thanks for the tip. I'll try it! |
Beta Was this translation helpful? Give feedback.
-
fwiw, after trying this, I still have only one span (for the openai chat), so not multiple traces, just one with one span. |
Beta Was this translation helpful? Give feedback.
-
@codefromthecrypt weird, just ran you code and it works! Want to ping me on slack we can try and debug it! |
Beta Was this translation helpful? Give feedback.
-
thanks, I'll do another check on own first, then ping you or close it out! |
Beta Was this translation helpful? Give feedback.
-
Beta Was this translation helpful? Give feedback.
@codefromthecrypt so the reason is an issue we have with OpenTelemetry auto-instrumentation. The only way we're able to solve it as of now is by manually instrumenting the module, like this:
For some reason,
import-in-the-middle
(which is used by OpenTelemetry for auto-instrumentation) doesn't catch some of langchain packages. I'll continue investigating this.