Skip to content

Commit

Permalink
🚀 feat: support for multiple instances and introduction of configurat…
Browse files Browse the repository at this point in the history
…ion file

Version 3.0.0 introduces support for multiple bots in a single process, eliminating the need to run multiple Docker containers to use multiple LLMs. Additionally, configuration management has shifted from environment variables to a `config.yaml` file, enhancing flexibility and removing the need for manual environment variable settings. These changes were made to improve the scalability and ease of management of the application.
  • Loading branch information
takuya-o committed Dec 29, 2024
1 parent 008cfbb commit 9575c4b
Show file tree
Hide file tree
Showing 29 changed files with 3,651 additions and 2,991 deletions.
37 changes: 1 addition & 36 deletions .env-sample
Original file line number Diff line number Diff line change
@@ -1,39 +1,4 @@
MATTERMOST_URL=https://mattermost.server.example
MATTERMOST_TOKEN=abababacdcdcd
# MATTERMOST_BOTNAME=@chatgpt
PLUGINS=image-plugin

# MAX_PROMPT_TOKEN=3000
# OPENAI_MAX_TOKENS=2000
# OPENAI_TEMPERATURE=1

OPENAI_API_KEY=sk-234234234234234234
# OPENAI_MODEL_NAME=gpt-3.5-turbo
# OPENAI_IMAGE_MODEL_NAME=dall-e-2
# OPENAI_VISION_MODEL_NAME=gpt-4-vision-preview

# AZURE_OPENAI_API_KEY=0123456789abcdefghijklmno
# AZURE_OPENAI_API_INSTANCE_NAME=example-name
# AZURE_OPENAI_API_DEPLOYMENT_NAME=gpt-35-turbo
# AZURE_OPENAI_API_VERSION=2024-03-01-preview

# AZURE_OPENAI_IMAGE_API_KEY=0123456789abcdefghijklmno
# AZURE_OPENAI_IMAGE_API_INSTANCE_NAME=example-name
# AZURE_OPENAI_IMAGE_API_DEPLOYMENT_NAME=dall-e-2

# AZURE_OPENAI_VISION_API_KEY=0123456789abcdefghijklmno
# AZURE_OPENAI_VISION_API_INSTANCE_NAME=example-name
# AZURE_OPENAI_VISION_API_DEPLOYMENT_NAME=gpt-4-vision-preview

# COHERE_API_KEY=abcdefghijklmnopqrstuvw
# OPENAI_MODEL_NAME=command-r-plus
# MAX_PROMPT_TOKENS=123904
# OPENAI_MAX_TOKENS=4000

# GOOGLE_API_KEY=abcdefghijklmnopqrstuvw
# OPENAI_MODEL_NAME=gemini-1.5-pro-latest
# MAX_PROMPT_TOKENS=1048576
# OPENAI_MAX_TOKENS=8192
#CONFIG_FILE=./config.yaml

# DEBUG_LEVEL=INFO
# DEBUG_JSON=true
Expand Down
3 changes: 3 additions & 0 deletions .github/copilot-instructions.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
- 新たに生成するコメントは日本語にしてください。ただし元のコメントが英語の場合は英語にしてください。
- コード変更時に元からあるコメントは、そのまま残してください。
- JavaScriptやTypeScriptで行末のセミコロンは省略してください。
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ docker-compose.override.yml

!.env-sample
.env*
!config-sample.yaml
config*.yaml
# envFileを使えは問題なし /.vscode/launch.json
/out/

Expand Down Expand Up @@ -124,3 +126,4 @@ $RECYCLE.BIN/
*.lnk

# End of https://www.toptal.com/developers/gitignore/api/emacs,MacOS,Windows

2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ COPY [ "./license.md", "./" ]
# Avoid running as root:
USER nonroot

CMD [ "src/botservice.mjs" ]
CMD [ "src/MultiInstance.mjs" ]
201 changes: 135 additions & 66 deletions README.md

Large diffs are not rendered by default.

37 changes: 33 additions & 4 deletions chatgpt-mattermost-bot.code-workspace
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,42 @@
"version": "0.2.0",
"configurations": [
{ // https://www.npmjs.com/package/tsx#debugging-method-1-run-tsx-directly-from-vscode
"name": "tsx - workspace",
"name": "tsx - v3 workspace",
"type": "node",
"request": "launch",
"envFile": "${workspaceFolder}/.env.debug",
//"envFile": "${workspaceFolder}/.env",
// Debug current file in VSCode
"program": "${workspaceFolder}/src/botservice.ts", //"${file}",
"program": "${workspaceFolder}/src/MultiInstance.ts", //"${file}",
//"stopOnEntry": true,
/*
Path to tsx binary
Assuming locally installed
*/
"runtimeExecutable": "${workspaceFolder}/node_modules/.bin/tsx",
/*
Open terminal when debugging starts (Optional)
Useful to see console.logs
*/
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen", //デバッグ セッション中の [デバッグ コンソール] パネルの表示/非表示を制御
//"outputCapture": "std", //LogがDEBUG CONSOLEに出る
// Files to exclude from debugger (e.g. call stack)
"skipFiles": [
// Node.js internal core modules
"<node_internals>/**",
// Ignore all dependencies (optional)
"${workspaceFolder}/node_modules/**",
],
},
{ // https://www.npmjs.com/package/tsx#debugging-method-1-run-tsx-directly-from-vscode
"name": "tsx - workspace",
"type": "node",
"request": "launch",
"envFile": "${workspaceFolder}/.env.debug.old",
//"envFile": "${workspaceFolder}/.env",
// Debug current file in VSCode
"program": "${workspaceFolder}/src/MultiInstance.ts", //"${file}",
//"stopOnEntry": true,
/*
Path to tsx binary
Expand All @@ -44,7 +73,7 @@
"type": "node",
"request": "launch",
"envFile": "${workspaceFolder}/.env.cohere.debug",
"program": "${workspaceFolder}/src/botservice.ts", //"${file}",
"program": "${workspaceFolder}/src/MultiInstance.ts", //"${file}",
/*
Path to tsx binary
Assuming locally installed
Expand All @@ -68,7 +97,7 @@
"type": "node",
"request": "launch",
"envFile": "${workspaceFolder}/.env.gemini.debug",
"program": "${workspaceFolder}/src/botservice.ts", //"${file}",
"program": "${workspaceFolder}/src/MultiInstance.ts", //"${file}",
/*
Path to tsx binary
Assuming locally installed
Expand Down
35 changes: 3 additions & 32 deletions compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,38 +12,9 @@ services:
env_file: .env # Dockerfileでも有効にするために必須
# .envファイルより環境変数を有効にする場合
# environment:
# MATTERMOST_URL: ${MATTERMOST_URL}
# MATTERMOST_TOKEN: ${MATTERMOST_TOKEN}
# MATTERMOST_BOTNAME: ${MATTERMOST_BOTNAME}
# OPENAI_API_KEY: ${OPENAI_API_KEY}
# OPENAI_MODEL_NAME: ${OPENAI_MODEL_NAME}
# OPENAI_IMAGE_MODEL_NAME: ${OPENAI_IMAGE_MODEL_NAME}
# OPENAI_VISION_MODEL_NAME: ${OPENAI_VISION_MODEL_NAME}
# OPENAI_MAX_TOKENS: ${OPENAI_MAX_TOKENS}
# MAX_PROMPT_TOKENS: ${MAX_PROMPT_TOKENS}
# PLUGINS: ${PLUGINS}
# AZURE_OPENAI_API_KEY: ${AZURE_OPENAI_API_KEY}
# AZURE_OPENAI_API_INSTANCE_NAME: ${AZURE_OPENAI_API_INSTANCE_NAME}
# AZURE_OPENAI_API_DEPLOYMENT_NAME: ${AZURE_OPENAI_API_DEPLOYMENT_NAME}
# AZURE_OPENAI_API_VERSION: ${AZURE_OPENAI_API_VERSION}
# CONFIG_FILE: ./config.yaml
# DEBUG_LEVEL: ${DEBUG_LEVEL}
# DEBUG_JSON: ${DEBUG_JSON}
# DEBUG_COLORS: ${DEBUG_COLORS}

cohere:
image: ${CI_REGISTRY_IMAGE:-gitlab.example.com/user/chatgpt-mattermost-bot}:${CI_COMMIT_REF_NAME:-local}
build:
context: .
args:
NODE_ENV: ${NODE_ENV:-production}
restart: always
env_file: .env.cohere

gemini:
image: ${CI_REGISTRY_IMAGE:-gitlab.example.com/user/chatgpt-mattermost-bot}:${CI_COMMIT_REF_NAME:-local}
build:
context: .
args:
NODE_ENV: ${NODE_ENV:-production}
restart: always
env_file: .env.gemini
volumes:
- ./config.yaml:/app/config.yaml
72 changes: 72 additions & 0 deletions config-sample.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
# System default settings
MATTERMOST_URL: 'https://your-mattermost-url.example.com'
BOT_CONTEXT_MSG: 100
PLUGINS: image-plugin
OPENAI_MAX_TOKENS: 2000
OPENAI_TEMPERATURE: 1
MAX_PROMPT_TOKENS: 2000

bots:
- name: '@OpenAI'
mattermostUrl: 'https://your-mattermost-url.example.com'
mattermostToken: 'your-mattermost-token'
type: 'openai'
apiKey: 'your-openai-api-key'
apiBase: 'https://api.openai.com/v1'
modelName: 'gpt-4o-mini'
visionModelName: 'gpt-4-vision-preview'
imageModelName: 'dall-e-3'
maxTokens: 16384
temperature: 1
maxPromptTokens: 123904
plugins: 'image-plugin'
- name: '@ChatGPT'
mattermostUrl: 'https://your-mattermost-url.example.com'
mattermostToken: 'your-mattermost-token'
type: 'azure'
apiKey: 'your-azure-openai-api-key'
apiVersion: '2024-10-21'
instanceName: 'your-azure-instance-name'
deploymentName: 'gpt-4o-mini'
visionKey: 'your-azure-openai-vision-key'
visionInstanceName: 'your-azure-vision-instance-name'
visionDeploymentName: 'gpt-4v'
imageKey: 'your-azure-openai-image-key'
imageInstanceName: 'your-azure-image-instance-name'
imageDeploymentName: 'dall-e-3'
maxTokens: 16384
temperature: 1
maxPromptTokens: 123904
plugins: 'image-plugin'
- name: '@Gemini'
mattermostUrl: 'https://your-mattermost-url.example.com'
mattermostToken: 'your-mattermost-token'
type: 'google'
apiKey: 'your-google-api-key'
imageModelName: 'dall-e-3'
maxTokens: 8192
temperature: 1
maxPromptTokens: 1048576
plugins: ''
- name: '@Cohere'
mattermostUrl: 'https://your-mattermost-url.example.com'
mattermostToken: 'your-mattermost-token'
type: 'cohere'
apiKey: 'your-cohere-api-key'
imageModelName: 'dall-e-3'
maxTokens: 4000
temperature: 1
maxPromptTokens: 123904
plugins: ''
- name: '@Anthropic'
mattermostUrl: 'https://your-mattermost-url.example.com'
mattermostToken: 'your-mattermost-token'
type: 'anthropic'
apiKey: 'your-anthropic-api-key'
imageModelName: 'dall-e-3'
maxTokens: 4096
temperature: 1
maxPromptTokens: 123904
plugins: ''
# Bot instructions
BOT_INSTRUCTION: "You are a helpful assistant. Whenever users asks you for help you will provide them with succinct answers formatted using Markdown. You know the user's name as it is provided within the meta data of the messages."
Loading

0 comments on commit 9575c4b

Please sign in to comment.