Skip to content

添加一个基于OpenAI SDK的聊天插件,支持上下文,内置了跑团导向的prompt #48

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions scripts/娱乐向/aichat/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# 基于OepnAI SDK的聊天插件
## 支持/Supported
1. 接入所有支持OpenAI SDK的大模型开放平台,如OpenAI、Deepseek、Kimi、豆包等
2. 支持分群分用户的上下文记忆,且轮数可控
3. 预置了prompt,确保聊天内容更偏向跑团,如需修改prompt请自行替换 aichat.py 的以下内容,其中 `{botname}` 是骰娘昵称,`{nickname}` 是用户昵称:
```
你是{botname},一个可爱、聪明的AI女孩,专门辅助TRPG跑团,特别擅长CoC(克苏鲁的呼唤)和DND(龙与地下城)。你的任务是帮助{nickname}查询规则、整理人物卡、提供战术建议,并适当参与互动,但不会干涉跑团剧情或替代 DM/KP 的角色。请用可爱、温柔且略带学者气质的语气回答{nickname}的问题。
```

## 需求/Required
1. OpenAI或其他平台的API Key(注意调用API需要付费)
2. 如需在国内调用OpenAI(即ChatGPT),请自行搭建代理
3. Python3.9及以上环境

## 配置/Config
将项目内 aichat.js 以外的内容放置于海豹同一设备的同一目录下,然后执行:
```shell
pip3 install -r requirements.txt
```
而后配置 config.yaml,具体内容已写在配置文件内。

最后执行以下命令:
```shell
python3 aichat.py
```
此时即在本地的13211端口开放一个与AI交互的API服务。

如需后台运行请执行(Linux):
```shell
nohup python3 aichat.py >/dev/null 2>&1 &
```
然后将 aichat.js 上传到海豹即可。

## 用法/Usage
.ask 无上下文的单轮AI对话
.chat 有上下文的AI对话
.aiclear 清除上下文记忆
144 changes: 144 additions & 0 deletions scripts/娱乐向/aichat/aichat.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
// ==UserScript==
// @name 基于OepnAI SDK的聊天插件
// @author 梓漪
// @version 1.0.0
// @description 接入AI模型,和Bot普普通通地聊天
// @timestamp 1739102145
// 2025-02-09
// @license MIT License
// @homepageURL https://github.com/sealdice/javascript
// ==/UserScript==

const callAI = (route, dict, callback) => {
fetch(`http://127.0.0.1:13211/${route}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
user_id: dict.user_id,
group_id: dict.group_id || null,
bot_id: dict.bot_id || '10001',
nickname: dict.nickname || '用户',
botname: dict.botname || '海豹',
content: dict.content,
img_url: dict.img_url || null
})
})
.then(response => response.json())
.then(data => {
if (data.code === 200) {
callback({ err: 0, msg: data.message });
} else {
callback({ err: data.code, msg: data.message });
}
})
.catch(error => {
console.error('Error: ', error);
callback({ err: 504, msg: "呜...AI好像出了点问题,等会再试试吧" });
});
};

if (!seal.ext.find('aichat')) {
const ext = seal.ext.new('aichat', '梓漪', '1.0.0');

const cmdAsk = seal.ext.newCmdItemInfo();
cmdAsk.name = 'ask';
cmdAsk.help = '无上下文的AI聊天,用法:.ask <内容>';

cmdAsk.solve = (ctx, msg, cmdArgs) => {
let val = cmdArgs.getArgN(1);
switch (val) {
case 'help': {
const ret = seal.ext.newCmdExecuteResult(true);
ret.showHelp = true;
return ret;
}
default: {
let atSender = ctx.isPrivate ? '' : `对<${ctx.player.name}>:`;
if (!val) {
seal.replyToSender(ctx, msg, `${atSender}内容不能为空!`);
return seal.ext.newCmdExecuteResult(true);
}
let dict = {
nickname: ctx.player.name,
botname: ctx.endPoint.nickname,
content: val,
}
seal.replyToSender(ctx, msg, `${atSender}${ctx.endPoint.nickname}正在思考中...`);
callAI('ask', dict, (result) => {
seal.replyToSender(ctx, msg, atSender+result.msg);
});
return seal.ext.newCmdExecuteResult(true);
}
}
}

const cmdChat = seal.ext.newCmdItemInfo();
cmdChat.name = 'chat';
cmdChat.help = '有上下文的AI聊天,用法:.chat <内容>';

cmdChat.solve = (ctx, msg, cmdArgs) => {
let val = cmdArgs.getArgN(1);
switch (val) {
case 'help': {
const ret = seal.ext.newCmdExecuteResult(true);
ret.showHelp = true;
return ret;
}
default: {
let atSender = ctx.isPrivate ? '' : `对<${ctx.player.name}>:`;
if (!val) {
seal.replyToSender(ctx, msg, `${atSender}内容不能为空!`);
return seal.ext.newCmdExecuteResult(true);
}
let dict = {
user_id: ctx.player.userId,
group_id: ctx.isPrivate ? null : ctx.group.groupId,
bot_id: ctx.endPoint.userId,
nickname: ctx.player.name,
botname: ctx.endPoint.nickname,
content: val,
}
seal.replyToSender(ctx, msg, `${atSender}${ctx.endPoint.nickname}正在思考中...`);
callAI('chat', dict, (result) => {
seal.replyToSender(ctx, msg, atSender+result.msg);
});
return seal.ext.newCmdExecuteResult(true);
}
}
}

const cmdAIClear = seal.ext.newCmdItemInfo();
cmdAIClear.name = 'aiclear';
cmdAIClear.help = '清除AI聊天记录,用法:.aiclear';

cmdAIClear.solve = (ctx, msg, cmdArgs) => {
let val = cmdArgs.getArgN(1);
switch (val) {
case 'help': {
const ret = seal.ext.newCmdExecuteResult(true);
ret.showHelp = true;
return ret;
}
default: {
let atSender = ctx.isPrivate ? '' : `对<${ctx.player.name}>:`;
let dict = {
user_id: ctx.player.userId,
group_id: ctx.isPrivate ? null : ctx.group.groupId,
bot_id: ctx.endPoint.userId,
}
callAI('aiclear', dict, (result) => {
seal.replyToSender(ctx, msg, atSender+result.msg);
});
return seal.ext.newCmdExecuteResult(true);
}
}
}

ext.cmdMap['ask'] = cmdAsk;
ext.cmdMap['chat'] = cmdChat;
ext.cmdMap['aiclear'] = cmdAIClear;

seal.ext.register(ext);
}
196 changes: 196 additions & 0 deletions scripts/娱乐向/aichat/aichat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,196 @@
import base64
import httpx
import asyncio
import yaml

from flask import Flask, request, jsonify
from openai import AsyncOpenAI

app = Flask(__name__)

class Config():
def __init__(self, config_path: str):
with open(config_path, 'r', encoding='utf-8') as file:
config = yaml.safe_load(file)
self.oneapi_key = config.get('oneapi_key')
self.oneapi_url = config.get('oneapi_url', None)
self.oneapi_model = config.get('oneapi_model', None)
self.max_tokens = config.get('max_tokens', 150)
self.max_ctx = config.get('max_ctx', 15)
self.timeout = config.get('timeout', 60)

plugin_config = Config('config.yaml')

if plugin_config.oneapi_url:
client = AsyncOpenAI(api_key=plugin_config.oneapi_key, base_url=plugin_config.oneapi_url)
else:
client = AsyncOpenAI(api_key=plugin_config.oneapi_key)

model_id = plugin_config.oneapi_model
max_tokens = plugin_config.max_tokens
max_ctx = plugin_config.max_ctx
timeout = plugin_config.timeout

session = {}

@app.route('/chat', methods=['POST'])
def chat():
data = request.json
user_id = data.get('user_id', None)
group_id = data.get('group_id', None)
bot_id = data.get('bot_id', None)
nickname = data.get('nickname', "用户")
botname = data.get('botname', "海豹")
content = data.get('content', None)
img_url = data.get('img_url', None)

if not content:
return jsonify({"code": 400, "message": "内容不能为空!"}), 400

if not user_id:
return jsonify({"code": 400, "message": "用户ID不能为空!"}), 400

if not bot_id:
return jsonify({"code": 400, "message": "机器人ID不能为空!"}), 400

session_id = f"{bot_id}_{group_id}_{user_id}" if group_id else f"{bot_id}_Private_{user_id}"
if session_id not in session:
session[session_id] = []
session[session_id].append({"role": "system", "content": f"你是{botname},一个可爱、聪明的AI女孩,专门辅助TRPG跑团,特别擅长CoC(克苏鲁的呼唤)和DND(龙与地下城)。你的任务是帮助{nickname}查询规则、整理人物卡、提供战术建议,并适当参与互动,但不会干涉跑团剧情或替代 DM/KP 的角色。请用可爱、温柔且略带学者气质的语气回答{nickname}的问题。"})

if max_ctx > 0 and len(session[session_id]) >= max_ctx*2+1:
session[session_id] = session[session_id][3:]
session[session_id].insert(0, {"role": "system", "content": f"你是{botname},一个可爱、聪明的AI女孩,专门辅助TRPG跑团,特别擅长CoC(克苏鲁的呼唤)和DND(龙与地下城)。你的任务是帮助{nickname}查询规则、整理人物卡、提供战术建议,并适当参与互动,但不会干涉跑团剧情或替代 DM/KP 的角色。请用可爱、温柔且略带学者气质的语气回答{nickname}的问题。"})

if not img_url or "moonshot" in model_id or "deepseek" in model_id:
try:
session[session_id].append({"role": "user", "content": content})
async def get_response():
response = await client.chat.completions.create(
model=model_id,
messages=session[session_id],
max_tokens=max_tokens,
temperature=1.0,
stream=False,
timeout=timeout
)
return response
response = asyncio.run(get_response())
except Exception as error:
if "429" in str(error) or "503" in str(error):
return jsonify({"code": 503, "message": f"抱歉!出错了!{botname}有些处理不过来消息了...请稍后再试~"}), 503
return jsonify({"code": 500, "message": str(error)}), 500

session[session_id].append({"role": "assistant", "content": response.choices[0].message.content})
return jsonify({"code": 200, "message": response.choices[0].message.content}), 200
else:
try:
image_data = base64.b64encode(httpx.get(img_url, timeout=60).content).decode("utf-8")
session[session_id].append(
{
"role": "user",
"content": [
{"type": "text", "text": content},
{
"type": "image_url",
"image_url": {"url": f"data:image/png;base64,{image_data}"},
},
],
}
)
response = client.chat.completions.create(
model=model_id,
messages=session[session_id],
max_tokens=max_tokens,
temperature=1.0,
stream=False,
timeout=timeout
)
except Exception as error:
if "429" in str(error):
return jsonify({"code": 503, "message": f"抱歉!出错了!{botname}有些处理不过来消息了...请稍后再试~"}), 503
return jsonify({"code": 500, "message": str(error)}), 500
return jsonify({"code": 200, "message": response.choices[0].message.content}), 200

@app.route('/ask', methods=['POST'])
def ask():
data = request.json
nickname = data.get('nickname', "用户")
botname = data.get('botname', "海豹")
content = data.get('content', None)
img_url = data.get('img_url', None)

if not content:
return jsonify({"code": 400, "message": "内容不能为空!"}), 400

if not img_url or "moonshot" in model_id or "deepseek" in model_id:
try:
async def get_response():
response = await client.chat.completions.create(
model=model_id,
messages=[{"role": "system", "content": f"你是{botname},一个可爱、聪明的AI女孩,专门辅助TRPG跑团,特别擅长CoC(克苏鲁的呼唤)和DND(龙与地下城)。你的任务是帮助{nickname}查询规则、整理人物卡、提供战术建议,并适当参与互动,但不会干涉跑团剧情或替代 DM/KP 的角色。请用可爱、温柔且略带学者气质的语气回答{nickname}的问题。"}, {"role": "user", "content": content}],
max_tokens=max_tokens,
temperature=1.0,
stream=False,
timeout=timeout
)
return response
response = asyncio.run(get_response())
except Exception as error:
if "429" in str(error) or "503" in str(error):
return jsonify({"code": 503, "message": f"抱歉!出错了!{botname}有些处理不过来消息了...请稍后再试~"}), 503
return jsonify({"code": 500, "message": str(error)}), 500
return jsonify({"code": 200, "message": response.choices[0].message.content}), 200
else:
try:
image_data = base64.b64encode(httpx.get(img_url, timeout=60).content).decode("utf-8")
response = client.chat.completions.create(
model=model_id,
messages=[
{
"role": "user",
"content": [
{"type": "text", "text": content},
{
"type": "image_url",
"image_url": {
"url": f"data:image/png;base64,{image_data}"
},
},
],
}
],
max_tokens=max_tokens,
temperature=1.0,
stream=False,
timeout=timeout
)
except Exception as error:
if "429" in str(error):
return jsonify({"code": 503, "message": f"抱歉!出错了!{botname}有些处理不过来消息了...请稍后再试~"}), 503
return jsonify({"code": 500, "message": str(error)}), 500
return jsonify({"code": 200, "message": response.choices[0].message.content}), 200

@app.route('/aiclear', methods=['POST'])
def aiclear():
data = request.json
user_id = data.get('user_id', None)
group_id = data.get('group_id', None)
bot_id = data.get('bot_id', None)

if not user_id:
return jsonify({"code": 400, "message": "用户ID不能为空!"}), 400

if not bot_id:
return jsonify({"code": 400, "message": "机器人ID不能为空!"}), 400

session_id = f"{bot_id}_{group_id}_{user_id}" if group_id else f"{bot_id}_Private_{user_id}"
if session_id in session:
del session[session_id]
return jsonify({"code": 200, "message": "成功清除历史记录!"}), 200

if __name__ == '__main__':
from gunicorn.app.wsgiapp import run
import sys
sys.argv = ['gunicorn', '-b', '127.0.0.1:13211', '--timeout', str(timeout), 'aichat:app']
run()
6 changes: 6 additions & 0 deletions scripts/娱乐向/aichat/config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
oneapi_key: "" # (必填)OpenAI或者是支持OneAPI的大模型中转服务商提供的KEY
oneapi_url: https://api.deepseek.com # (可选)大模型中转服务商提供的中转地址,使用OpenAI官方服务不需要填写
oneapi_model: deepseek-chat # (可选)使用的语言大模型,使用识图功能请填写合适的大模型名称
max_tokens: 150 # (可选)生成的最大tokens数量
max_ctx: 15 # (可选)上下文的最大轮数,超过该轮数会删除最旧的对话,设置为0表示不限制
timeout: 60 # (可选)请求AI模型的超时时间
5 changes: 5 additions & 0 deletions scripts/娱乐向/aichat/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Flask==3.1.0
gunicorn==23.0.0
httpx==0.28.1
openai==1.61.1
PyYAML==6.0.2