Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion examples/telegram-bot/.env.example
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# OpenAI API Key
OPENAI_API_KEY=
#OPENAI_API_KEY=

# Polkadot RPC Endpoint
WS_ENDPOINT=
Expand All @@ -10,4 +10,6 @@ PRIVATE_KEY=
# Telegram Bot Token
TELEGRAM_BOT_TOKEN=

#


4 changes: 3 additions & 1 deletion examples/telegram-bot/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@
"telegraf": "^4.16.3",
"telegraf-safe-md-reply": "^1.0.0",
"@langchain/core": "^0.3.40",
"@langchain/openai": "^0.3.17"
"zod": "^3.22.0",
"@langchain/ollama":"^0.2.2",
"@langchain/openai":"^0.5.12"
},
"devDependencies": {
"@types/jest": "^29.5.14",
Expand Down
42 changes: 24 additions & 18 deletions examples/telegram-bot/src/TelegramBot.ts
Original file line number Diff line number Diff line change
@@ -1,31 +1,40 @@
import { Telegraf } from 'telegraf';
import { ChatOpenAI } from '@langchain/openai';
import { Tool } from '@langchain/core/tools';
import { setupHandlers } from './handlers';
import { PolkadotAgentKit } from '@polkadot-agent-kit/sdk';
import { getChainByName, KnownChainId, getAllSupportedChains } from '@polkadot-agent-kit/common';
import { ChatModelFactory, ChatModelOptions, ChatModelWithTools } from './models';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';


interface BotConfig {
botToken: string;
openAiApiKey?: string;
privateKey?: string;
// delegatePrivateKey?: string;
// chains: { url: string; name: string; apiKey: string; type: 'RelayChain' | 'ParaChain'; paraId?: number }[];
}



export class TelegramBot {
private bot: Telegraf;
private agent: PolkadotAgentKit;
private llm: ChatOpenAI;
private llm: ChatModelWithTools;


private initializeLLM(openAiApiKey?: string): ChatModelWithTools {
const options: ChatModelOptions = {
provider: openAiApiKey ? 'openai' as const : 'ollama' as const,
modelName: openAiApiKey ? 'gpt-4o-mini' : 'qwen3:latest',
temperature: 0.7,
verbose: false,
};
return ChatModelFactory.create(options);
}

constructor(config: BotConfig) {
const {
botToken,
openAiApiKey,
privateKey,
// delegatePrivateKey,
// chains,
} = config;

if (!botToken) {
Expand All @@ -34,29 +43,26 @@ export class TelegramBot {

this.bot = new Telegraf(botToken);

this.agent = new PolkadotAgentKit(privateKey as string, {keyType: 'Sr25519'});
this.agent = new PolkadotAgentKit(privateKey as string, { keyType: 'Sr25519' });


this.llm = this.initializeLLM(openAiApiKey);

this.llm = new ChatOpenAI({
modelName: 'gpt-4',
temperature: 0.7,
openAIApiKey: openAiApiKey,
streaming: true,
});
}

async initialize() {
console.log("Initializing bot...");

try {
// Initialize APIs first
await this.agent.initializeApi();

// Set up tools
// Get balance of agent account
const checkBalance = this.agent.getNativeBalanceTool();
// Transfer native tokens to a recipient address on a specific chain.
const transferNative = this.agent.transferNativeTool();

setupHandlers(this.bot, this.llm, {
checkBalance: checkBalance,
transferNative: transferNative,
Expand All @@ -77,7 +83,7 @@ export class TelegramBot {
await this.initialize();
await this.bot.launch();
console.log('Bot is running!');

} catch (error) {
console.error('Failed to start bot:', error);
throw error;
Expand Down
15 changes: 9 additions & 6 deletions examples/telegram-bot/src/handlers.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import { Telegraf } from 'telegraf';
import { HumanMessage, SystemMessage } from '@langchain/core/messages';
import { ChatOpenAI } from '@langchain/openai';
import { DynamicStructuredTool, Tool } from '@langchain/core/tools';
import { ChatModelWithTools } from './models';




const SYSTEM_PROMPT = `I am a Telegram bot powered by PolkadotAgentKit. I can assist you with:
- Transferring native tokens on specific chain (e.g., "transfer 1 WND to 5CSox4ZSN4SGLKUG9NYPtfVK9sByXLtxP4hmoF4UgkM4jgDJ on westend_asset_hub")
Expand All @@ -24,7 +27,7 @@ Please provide instructions, and I will assist you!`;

export function setupHandlers(
bot: Telegraf,
llm: ChatOpenAI,
llm: ChatModelWithTools,
toolsByName: Record<string, DynamicStructuredTool>,
): void {

Expand All @@ -42,11 +45,11 @@ export function setupHandlers(

bot.on('text', async (ctx) => {
const message = ctx.message.text;



if (message.startsWith('/')) return;

try {

const llmWithTools = llm.bindTools(Object.values(toolsByName));
const messages = [
new SystemMessage({ content: SYSTEM_PROMPT }),
Expand All @@ -64,11 +67,11 @@ export function setupHandlers(
return;
}
const response = JSON.parse(toolMessage.content || '{}');

if (response.error) {
await ctx.reply(`Error: ${response.message}`);
} else {
await ctx.reply(response.message || response.content || 'No message from tool.');
const content = JSON.parse(response.content || '{}');
await ctx.reply(content.data || 'No message from tool.');
}
} else {
console.warn(`Tool not found: ${toolCall.name}`);
Expand Down
2 changes: 1 addition & 1 deletion examples/telegram-bot/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ import * as dotenv from 'dotenv';
dotenv.config();

async function runBot() {

const bot = new TelegramBot({
botToken: process.env.TELEGRAM_BOT_TOKEN!,
openAiApiKey: process.env.OPENAI_API_KEY!,
privateKey: process.env.PRIVATE_KEY!,
// delegatePrivateKey: process.env.DELEGATE_PRIVATE_KEY!,
});

await bot.start();
Expand Down
48 changes: 48 additions & 0 deletions examples/telegram-bot/src/models.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { ChatOpenAI } from '@langchain/openai';
import { ChatOllama } from '@langchain/ollama';

export type ChatProvider = 'ollama' | 'openai';

export type ChatModelWithTools = BaseChatModel & {
bindTools: (tools: any[]) => any;
};



export interface ChatModelOptions {
provider: ChatProvider;
temperature?: number;
modelName?: string;
verbose?: boolean;
}


const chatModelConstructors: Record<ChatProvider, (options: ChatModelOptions) => ChatModelWithTools > = {
openai: ({ modelName, temperature = 0.7, verbose = false }) =>
new ChatOpenAI({
modelName: modelName ?? 'gpt-4o-mini',
temperature,
streaming: true,
openAIApiKey: process.env.OPENAI_API_KEY!,
verbose,
}),
ollama: ({ modelName, temperature = 0.7, verbose = false }) =>
new ChatOllama({
model: modelName ?? 'llama3',
temperature,
verbose,
}),
};

export class ChatModelFactory {
static create(options: ChatModelOptions): ChatModelWithTools {
const { provider } = options;
const constructor = chatModelConstructors[provider];
if (!constructor) {
throw new Error(`Unsupported provider: ${provider}`);
}
return constructor(options);
}
}

2 changes: 0 additions & 2 deletions packages/llm/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,8 @@
"dependencies": {
"@polkadot-agent-kit/common": "workspace:*",
"@polkadot-agent-kit/core": "workspace:*",
"@langchain/community": "^0.3.19",
"@langchain/core": "^0.3.40",
"@langchain/langgraph": "^0.2.33",
"@langchain/openai": "^0.3.17",
"@noble/curves": "^1.6.0",
"@polkadot-labs/hdkd": "^0.0.13",
"@polkadot-labs/hdkd-helpers": "^0.0.13",
Expand Down
Loading