Skip to content

Commit e22d58c

Browse files
added openrouter as a provider
1 parent fdba838 commit e22d58c

File tree

6 files changed

+315
-4
lines changed

6 files changed

+315
-4
lines changed

README.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
Milla is an IRC bot that:
44

5-
- sends things over to an LLM when you ask it questions and prints the answer with optional syntax-highlighting.Currently supported providers: Ollama, Openai, Gemini <br/>
5+
- sends things over to an LLM when you ask it questions and prints the answer with optional syntax-highlighting.Currently supported providers: Ollama, Openai, Gemini, Openrouter <br/>
66
- Milla can run more than one instance of itself
77
- Each instance can connect to a different ircd, and will get the full set of configs, e.g. different proxies, different postgres instance, ...
88
- You can define custom commands in the form of SQL queries to the database with the SQL query result being passed to the bot along with the given prompt and an optional limit so you don't go bankrupt(unless you are running ollama locally like the smart cookie that you are).<br/>
@@ -45,7 +45,7 @@ The SASL username.
4545

4646
The SASL password for SASL plain authentication. Can also be passed as and environment variable.
4747

48-
#### ollamaEndpoint
48+
#### Endpoint
4949

5050
The address for the Ollama chat endpoint.
5151

main.go

+37-1
Original file line numberDiff line numberDiff line change
@@ -405,6 +405,27 @@ func handleCustomCommand(
405405
if result != "" {
406406
sendToIRC(client, event, result, appConfig.ChromaFormatter)
407407
}
408+
case "openrouter":
409+
var memory []MemoryElement
410+
411+
for _, log := range logs {
412+
memory = append(memory, MemoryElement{
413+
Role: "user",
414+
Content: log.Log,
415+
})
416+
}
417+
418+
for _, customContext := range customCommand.Context {
419+
memory = append(memory, MemoryElement{
420+
Role: "user",
421+
Content: customContext,
422+
})
423+
}
424+
425+
result := ORRequestProcessor(appConfig, client, event, &memory, customCommand.Prompt)
426+
if result != "" {
427+
sendToIRC(client, event, result, appConfig.ChromaFormatter)
428+
}
408429
default:
409430
}
410431
}
@@ -681,7 +702,7 @@ func DoOllamaRequest(
681702
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(appConfig.RequestTimeout)*time.Second)
682703
defer cancel()
683704

684-
request, err := http.NewRequest(http.MethodPost, appConfig.OllamaEndpoint, bytes.NewBuffer(jsonPayload))
705+
request, err := http.NewRequest(http.MethodPost, appConfig.Endpoint, bytes.NewBuffer(jsonPayload))
685706
if err != nil {
686707

687708
return "", err
@@ -1011,6 +1032,10 @@ func DoChatGPTRequest(
10111032

10121033
config := openai.DefaultConfig(appConfig.Apikey)
10131034
config.HTTPClient = &httpClient
1035+
if appConfig.Endpoint != "" {
1036+
config.BaseURL = appConfig.Endpoint
1037+
log.Print(config.BaseURL)
1038+
}
10141039

10151040
gptClient := openai.NewClientWithConfig(config)
10161041

@@ -1264,6 +1289,8 @@ func runIRC(appConfig TomlConfig) {
12641289

12651290
var GPTMemory []openai.ChatCompletionMessage
12661291

1292+
var ORMemory []MemoryElement
1293+
12671294
poolChan := make(chan *pgxpool.Pool, 1)
12681295

12691296
irc := girc.New(girc.Config{
@@ -1363,6 +1390,15 @@ func runIRC(appConfig TomlConfig) {
13631390
}
13641391

13651392
ChatGPTHandler(irc, &appConfig, &GPTMemory)
1393+
case "openrouter":
1394+
for _, context := range appConfig.Context {
1395+
ORMemory = append(ORMemory, MemoryElement{
1396+
Role: "user",
1397+
Content: context,
1398+
})
1399+
}
1400+
1401+
ORHandler(irc, &appConfig, &ORMemory)
13661402
}
13671403

13681404
go LoadAllPlugins(&appConfig, irc)

makefile

+30
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
.PHONY: d_test d_deploy d_down d_build help
2+
3+
IMAGE_NAME=milla
4+
5+
d_test:
6+
nq docker compose -f ./docker-compose-devi.yaml up --build
7+
8+
d_deploy:
9+
nq docker compose -f ./docker-compose.yaml up --build
10+
11+
d_down:
12+
docker compose -f ./docker-compose.yaml down
13+
docker compose -f ./docker-compose-devi.yaml down
14+
15+
d_build: d_build_distroless_vendored
16+
17+
d_build_regular:
18+
docker build -t $(IMAGE_NAME)-f ./Dockerfile .
19+
20+
d_build_distroless:
21+
docker build -t $(IMAGE_NAME) -f ./Dockerfile_distroless .
22+
23+
d_build_distroless_vendored:
24+
docker build -t $(IMAGE_NAME) -f ./Dockerfile_distroless_vendored .
25+
26+
help:
27+
@echo "d_test"
28+
@echo "d_deploy"
29+
@echo "d_down"
30+
@echo "d_build"

openrouter.go

+200
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,200 @@
1+
package main
2+
3+
import (
4+
"bytes"
5+
"context"
6+
"encoding/json"
7+
"log"
8+
"net"
9+
"net/http"
10+
"net/url"
11+
"strings"
12+
"time"
13+
14+
"github.com/alecthomas/chroma/v2/quick"
15+
"github.com/lrstanley/girc"
16+
"golang.org/x/net/proxy"
17+
)
18+
19+
func DoORRequest(
20+
appConfig *TomlConfig,
21+
memory *[]MemoryElement,
22+
prompt string,
23+
) (string, error) {
24+
var jsonPayload []byte
25+
26+
var err error
27+
28+
memoryElement := MemoryElement{
29+
Role: "user",
30+
Content: prompt,
31+
}
32+
33+
if len(*memory) > appConfig.MemoryLimit {
34+
*memory = []MemoryElement{}
35+
36+
for _, context := range appConfig.Context {
37+
*memory = append(*memory, MemoryElement{
38+
Role: "assistant",
39+
Content: context,
40+
})
41+
}
42+
}
43+
44+
*memory = append(*memory, memoryElement)
45+
46+
ollamaRequest := OllamaChatRequest{
47+
Model: appConfig.Model,
48+
Messages: *memory,
49+
}
50+
51+
jsonPayload, err = json.Marshal(ollamaRequest)
52+
if err != nil {
53+
54+
return "", err
55+
}
56+
57+
log.Printf("json payload: %s", string(jsonPayload))
58+
59+
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(appConfig.RequestTimeout)*time.Second)
60+
defer cancel()
61+
62+
request, err := http.NewRequest(http.MethodPost, appConfig.Endpoint, bytes.NewBuffer(jsonPayload))
63+
if err != nil {
64+
65+
return "", err
66+
}
67+
68+
request = request.WithContext(ctx)
69+
request.Header.Set("content-type", "application/json")
70+
request.Header.Set("Authorization", "Bearer "+appConfig.Apikey)
71+
72+
var httpClient http.Client
73+
74+
var dialer proxy.Dialer
75+
76+
if appConfig.LLMProxy != "" {
77+
proxyURL, err := url.Parse(appConfig.IRCProxy)
78+
if err != nil {
79+
cancel()
80+
81+
log.Fatal(err.Error())
82+
}
83+
84+
dialer, err = proxy.FromURL(proxyURL, &net.Dialer{Timeout: time.Duration(appConfig.RequestTimeout) * time.Second})
85+
if err != nil {
86+
cancel()
87+
88+
log.Fatal(err.Error())
89+
}
90+
91+
httpClient = http.Client{
92+
Transport: &http.Transport{
93+
Dial: dialer.Dial,
94+
},
95+
}
96+
}
97+
response, err := httpClient.Do(request)
98+
99+
if err != nil {
100+
return "", err
101+
}
102+
103+
defer response.Body.Close()
104+
105+
log.Println("response body:", response.Body)
106+
107+
var orresponse ORResponse
108+
109+
err = json.NewDecoder(response.Body).Decode(&orresponse)
110+
if err != nil {
111+
return "", err
112+
}
113+
114+
var result string
115+
116+
for _, choice := range orresponse.Choices {
117+
result += choice.Message.Content + "\n"
118+
}
119+
120+
return result, nil
121+
}
122+
123+
func ORRequestProcessor(
124+
appConfig *TomlConfig,
125+
client *girc.Client,
126+
event girc.Event,
127+
memory *[]MemoryElement,
128+
prompt string,
129+
) string {
130+
response, err := DoORRequest(appConfig, memory, prompt)
131+
if err != nil {
132+
client.Cmd.ReplyTo(event, "error: "+err.Error())
133+
134+
return ""
135+
}
136+
137+
assistantElement := MemoryElement{
138+
Role: "assistant",
139+
Content: response,
140+
}
141+
142+
*memory = append(*memory, assistantElement)
143+
144+
log.Println(response)
145+
146+
var writer bytes.Buffer
147+
148+
err = quick.Highlight(&writer,
149+
response,
150+
"markdown",
151+
appConfig.ChromaFormatter,
152+
appConfig.ChromaStyle)
153+
if err != nil {
154+
client.Cmd.ReplyTo(event, "error: "+err.Error())
155+
156+
return ""
157+
}
158+
159+
return writer.String()
160+
}
161+
162+
func ORHandler(
163+
irc *girc.Client,
164+
appConfig *TomlConfig,
165+
memory *[]MemoryElement) {
166+
irc.Handlers.AddBg(girc.PRIVMSG, func(client *girc.Client, event girc.Event) {
167+
if !strings.HasPrefix(event.Last(), appConfig.IrcNick+": ") {
168+
return
169+
}
170+
171+
if appConfig.AdminOnly {
172+
byAdmin := false
173+
174+
for _, admin := range appConfig.Admins {
175+
if event.Source.Name == admin {
176+
byAdmin = true
177+
}
178+
}
179+
180+
if !byAdmin {
181+
return
182+
}
183+
}
184+
185+
prompt := strings.TrimPrefix(event.Last(), appConfig.IrcNick+": ")
186+
log.Println(prompt)
187+
188+
if string(prompt[0]) == "/" {
189+
runCommand(client, event, appConfig)
190+
191+
return
192+
}
193+
194+
result := ORRequestProcessor(appConfig, client, event, memory, prompt)
195+
if result != "" {
196+
sendToIRC(client, event, result, appConfig.ChromaFormatter)
197+
}
198+
})
199+
200+
}

plugins.go

+16
Original file line numberDiff line numberDiff line change
@@ -238,6 +238,21 @@ func ircPartChannelClosure(luaState *lua.LState, client *girc.Client) func(*lua.
238238
}
239239
}
240240

241+
func orRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
242+
return func(luaState *lua.LState) int {
243+
prompt := luaState.CheckString(1)
244+
245+
result, err := DoORRequest(appConfig, &[]MemoryElement{}, prompt)
246+
if err != nil {
247+
LogError(err)
248+
}
249+
250+
luaState.Push(lua.LString(result))
251+
252+
return 1
253+
}
254+
}
255+
241256
func ollamaRequestClosure(luaState *lua.LState, appConfig *TomlConfig) func(*lua.LState) int {
242257
return func(luaState *lua.LState) int {
243258
prompt := luaState.CheckString(1)
@@ -334,6 +349,7 @@ func millaModuleLoaderClosure(luaState *lua.LState, client *girc.Client, appConf
334349
"send_ollama_request": lua.LGFunction(ollamaRequestClosure(luaState, appConfig)),
335350
"send_gemini_request": lua.LGFunction(geminiRequestClosure(luaState, appConfig)),
336351
"send_chatgpt_request": lua.LGFunction(chatGPTRequestClosure(luaState, appConfig)),
352+
"send_or_request": lua.LGFunction(orRequestClosure(luaState, appConfig)),
337353
"query_db": lua.LGFunction(dbQueryClosure(luaState, appConfig)),
338354
"register_cmd": lua.LGFunction(registerLuaCommand(luaState, appConfig)),
339355
"url_encode": lua.LGFunction(urlEncode(luaState)),

0 commit comments

Comments
 (0)