Skip to content

Commit fdba838

Browse files
added a new option, context. fixed a bug with the custom commands where the context was not being treated as such
1 parent fa11101 commit fdba838

File tree

4 files changed

+64
-3
lines changed

4 files changed

+64
-3
lines changed

README.md

+13
Original file line numberDiff line numberDiff line change
@@ -235,6 +235,18 @@ webirc password to use.
235235

236236
webirc address to use.
237237

238+
#### context
239+
240+
the context to use for the normal conversations with the bot. Yes, this is how you tell your milla instance to act like a pirate.
241+
242+
```toml
243+
context = ["you are a pirate. use the language and words a pirate would unless you are asked to do otherwise explicitly", "your name is caption blackbeard"]
244+
```
245+
246+
```toml
247+
context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french", "your name is terra"]
248+
```
249+
238250
#### rssFile
239251

240252
The file that contains the rss feeeds.
@@ -351,6 +363,7 @@ skipTLSVerify = false
351363
useTLS = true
352364
adminOnly = false
353365
plugins = ["/plugins/ip.lua", "/plugins/urban.lua"]
366+
context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french"]
354367
[ircd.devinet.watchlist.security]
355368
watchList = ["#securityfeeds"]
356369
watchFiles = ["/watchfiles/voidbox.list"]

config-example.toml

+1
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ llmProxy = "http://127.0.0.1:8180"
2828
skipTLSVerify = false
2929
useTLS = true
3030
adminOnly = false
31+
context = ["please respond in french even if i use another language unless you are specifically asked to use any language other than french", "your name is terra"]
3132
plugins = ["/plugins/ip.lua", "/plugins/urban.lua"]
3233
[ircd.devinet.watchlist.security]
3334
watchList = ["#securityfeeds"]

main.go

+49-3
Original file line numberDiff line numberDiff line change
@@ -345,7 +345,7 @@ func handleCustomCommand(
345345

346346
for _, customContext := range customCommand.Context {
347347
gptMemory = append(gptMemory, openai.ChatCompletionMessage{
348-
Role: openai.ChatMessageRoleUser,
348+
Role: openai.ChatMessageRoleAssistant,
349349
Content: customContext,
350350
})
351351
}
@@ -376,7 +376,7 @@ func handleCustomCommand(
376376
Parts: []genai.Part{
377377
genai.Text(customContext),
378378
},
379-
Role: "user",
379+
Role: "model",
380380
})
381381
}
382382

@@ -396,7 +396,7 @@ func handleCustomCommand(
396396

397397
for _, customContext := range customCommand.Context {
398398
ollamaMemory = append(ollamaMemory, MemoryElement{
399-
Role: "user",
399+
Role: "assistant",
400400
Content: customContext,
401401
})
402402
}
@@ -649,6 +649,13 @@ func DoOllamaRequest(
649649

650650
if len(*ollamaMemory) > appConfig.MemoryLimit {
651651
*ollamaMemory = []MemoryElement{}
652+
653+
for _, context := range appConfig.Context {
654+
*ollamaMemory = append(*ollamaMemory, MemoryElement{
655+
Role: "assistant",
656+
Content: context,
657+
})
658+
}
652659
}
653660

654661
*ollamaMemory = append(*ollamaMemory, memoryElement)
@@ -887,6 +894,15 @@ func GeminiRequestProcessor(
887894

888895
if len(*geminiMemory) > appConfig.MemoryLimit {
889896
*geminiMemory = []*genai.Content{}
897+
898+
for _, context := range appConfig.Context {
899+
*geminiMemory = append(*geminiMemory, &genai.Content{
900+
Parts: []genai.Part{
901+
genai.Text(context),
902+
},
903+
Role: "model",
904+
})
905+
}
890906
}
891907

892908
*geminiMemory = append(*geminiMemory, &genai.Content{
@@ -1036,6 +1052,13 @@ func ChatGPTRequestProcessor(
10361052

10371053
if len(*gptMemory) > appConfig.MemoryLimit {
10381054
*gptMemory = []openai.ChatCompletionMessage{}
1055+
1056+
for _, context := range appConfig.Context {
1057+
*gptMemory = append(*gptMemory, openai.ChatCompletionMessage{
1058+
Role: openai.ChatMessageRoleAssistant,
1059+
Content: context,
1060+
})
1061+
}
10391062
}
10401063

10411064
var writer bytes.Buffer
@@ -1312,10 +1335,33 @@ func runIRC(appConfig TomlConfig) {
13121335

13131336
switch appConfig.Provider {
13141337
case "ollama":
1338+
for _, context := range appConfig.Context {
1339+
OllamaMemory = append(OllamaMemory, MemoryElement{
1340+
Role: "assistant",
1341+
Content: context,
1342+
})
1343+
}
1344+
13151345
OllamaHandler(irc, &appConfig, &OllamaMemory)
13161346
case "gemini":
1347+
for _, context := range appConfig.Context {
1348+
GeminiMemory = append(GeminiMemory, &genai.Content{
1349+
Parts: []genai.Part{
1350+
genai.Text(context),
1351+
},
1352+
Role: "model",
1353+
})
1354+
}
1355+
13171356
GeminiHandler(irc, &appConfig, &GeminiMemory)
13181357
case "chatgpt":
1358+
for _, context := range appConfig.Context {
1359+
GPTMemory = append(GPTMemory, openai.ChatCompletionMessage{
1360+
Role: openai.ChatMessageRoleAssistant,
1361+
Content: context,
1362+
})
1363+
}
1364+
13191365
ChatGPTHandler(irc, &appConfig, &GPTMemory)
13201366
}
13211367

types.go

+1
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,7 @@ type TomlConfig struct {
8080
WebIRCAddress string `toml:"webIRCAddress"`
8181
RSSFile string `toml:"rssFile"`
8282
Plugins []string `toml:"plugins"`
83+
Context []string `toml:"context"`
8384
CustomCommands map[string]CustomCommand `toml:"customCommands"`
8485
WatchLists map[string]WatchList `toml:"watchList"`
8586
LuaStates map[string]LuaLstates

0 commit comments

Comments
 (0)