Skip to content

Commit

Permalink
refactor: foundations for chatbot (code divided into scenario selecti…
Browse files Browse the repository at this point in the history
…on, scenario handling, and data retrieval)

Signed-off-by: Hunter Gregory <[email protected]>
  • Loading branch information
huntergregory committed Aug 5, 2024
1 parent df4a302 commit 74b9fc4
Show file tree
Hide file tree
Showing 17 changed files with 695 additions and 345 deletions.
4 changes: 0 additions & 4 deletions ai/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,3 @@
- `$env:AOAI_COMPLETIONS_ENDPOINT = Read-Host 'Enter AOAI_COMPLETIONS_ENDPOINT'`
- `$env:AOAI_DEPLOYMENT_NAME = Read-Host 'Enter AOAI_DEPLOYMENT_NAME'`
- `go run main.go`

## Development

Modify prompts in the folders within *pkg/analysis/* (e.g. *pkg/analysis/flows/prompt.go* or *analyzer.go*)
29 changes: 4 additions & 25 deletions ai/main.go
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
package main

import (
"context"

"github.com/microsoft/retina/ai/pkg/chat"
"github.com/microsoft/retina/ai/pkg/lm"
flowscenario "github.com/microsoft/retina/ai/pkg/scenarios/flows"

"github.com/sirupsen/logrus"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
)

Expand Down Expand Up @@ -43,26 +40,8 @@ func main() {
}
log.Info("initialized Azure OpenAI model")

handleChat(log, config, clientset, model)
}

// pretend there's input from chat interface
func handleChat(log logrus.FieldLogger, config *rest.Config, clientset *kubernetes.Clientset, model lm.Model) {
question := "what's wrong with my app?"
var chat lm.ChatHistory

h := flowscenario.NewHandler(log, config, clientset, model)
params := &flowscenario.ScenarioParams{
Scenario: flowscenario.DropScenario,
Namespace1: "default",
Namespace2: "default",
bot := chat.NewBot(log, config, clientset, model)
if err := bot.Loop(); err != nil {
log.WithError(err).Fatal("error running chat loop")
}

ctx := context.TODO()
response, err := h.Handle(ctx, question, chat, params)
if err != nil {
log.WithError(err).Fatal("error running flows scenario")
}

_ = response
}
26 changes: 0 additions & 26 deletions ai/pkg/analysis/flows/analyzer.go

This file was deleted.

78 changes: 0 additions & 78 deletions ai/pkg/analysis/flows/types.go

This file was deleted.

91 changes: 91 additions & 0 deletions ai/pkg/chat/chat.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
package chat

import (
"context"
"fmt"

"github.com/microsoft/retina/ai/pkg/lm"
flowretrieval "github.com/microsoft/retina/ai/pkg/retrieval/flows"
"github.com/microsoft/retina/ai/pkg/scenarios"
"github.com/microsoft/retina/ai/pkg/scenarios/dns"
"github.com/microsoft/retina/ai/pkg/scenarios/drops"

"github.com/sirupsen/logrus"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
)

var (
definitions = []*scenarios.Definition{
drops.Definition,
dns.Definition,
}
)

type Bot struct {
log logrus.FieldLogger
config *rest.Config
clientset *kubernetes.Clientset
model lm.Model
}

// input log, config, clientset, model
func NewBot(log logrus.FieldLogger, config *rest.Config, clientset *kubernetes.Clientset, model lm.Model) *Bot {
return &Bot{
log: log.WithField("component", "chat"),
config: config,
clientset: clientset,
model: model,
}
}

func (b *Bot) Loop() error {
var history lm.ChatHistory
flowRetriever := flowretrieval.NewRetriever(b.log, b.config, b.clientset)

for {
// TODO get user input
question := "what's wrong with my app?"

// select scenario and get parameters
definition, params, err := b.selectScenario(question, history)
if err != nil {
return fmt.Errorf("error selecting scenario: %w", err)
}

// cfg.FlowRetriever.UseFile()

cfg := &scenarios.Config{
Log: b.log,
Config: b.config,
Clientset: b.clientset,
Model: b.model,
FlowRetriever: flowRetriever,
}

ctx := context.TODO()
response, err := definition.Handle(ctx, cfg, params, question, history)
if err != nil {
return fmt.Errorf("error handling scenario: %w", err)
}

fmt.Println(response)

// TODO keep chat loop going
break
}

return nil
}

func (b *Bot) selectScenario(question string, history lm.ChatHistory) (*scenarios.Definition, map[string]string, error) {
// TODO use chat interface
// FIXME hard-coding the scenario and params for now
d := definitions[0]
params := map[string]string{
scenarios.Namespace1.Name: "default",
scenarios.Namespace2.Name: "default",
}

return d, params, nil
}
4 changes: 2 additions & 2 deletions ai/pkg/lm/azure-openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,11 @@ func NewAzureOpenAI() (*AzureOpenAI, error) {
return aoai, nil
}

func (m *AzureOpenAI) Generate(ctx context.Context, systemPrompt string, chat ChatHistory, message string) (string, error) {
func (m *AzureOpenAI) Generate(ctx context.Context, systemPrompt string, history ChatHistory, message string) (string, error) {
messages := []azopenai.ChatRequestMessageClassification{
&azopenai.ChatRequestSystemMessage{Content: to.Ptr(systemPrompt)},
}
for _, pair := range chat {
for _, pair := range history {
messages = append(messages, &azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent(pair.User)})
messages = append(messages, &azopenai.ChatRequestAssistantMessage{Content: to.Ptr(pair.Assistant)})
}
Expand Down
6 changes: 3 additions & 3 deletions ai/pkg/lm/echo.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ func NewEchoModel() *EchoModel {
return &EchoModel{}
}

func (m *EchoModel) Generate(ctx context.Context, systemPrompt string, chat ChatHistory, message string) (string, error) {
chatStrings := make([]string, 0, len(chat))
for _, pair := range chat {
func (m *EchoModel) Generate(ctx context.Context, systemPrompt string, history ChatHistory, message string) (string, error) {
chatStrings := make([]string, 0, len(history))
for _, pair := range history {
chatStrings = append(chatStrings, fmt.Sprintf("USER: %s\nASSISTANT: %s\n", pair.User, pair.Assistant))
}
resp := fmt.Sprintf("systemPrompt: %s\nhistory: %s\nmessage: %s", systemPrompt, strings.Join(chatStrings, "\n"), message)
Expand Down
2 changes: 1 addition & 1 deletion ai/pkg/lm/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,5 @@ type MessagePair struct {
type ChatHistory []MessagePair

type Model interface {
Generate(ctx context.Context, systemPrompt string, chat ChatHistory, message string) (string, error)
Generate(ctx context.Context, systemPrompt string, history ChatHistory, message string) (string, error)
}
16 changes: 8 additions & 8 deletions ai/pkg/analysis/flows/parser.go → ai/pkg/parse/flows/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,19 @@ import (
)

type Parser struct {
log logrus.FieldLogger
summary FlowSummary
log logrus.FieldLogger
connections Connections
}

func NewParser(log logrus.FieldLogger) *Parser {
return &Parser{
log: log.WithField("component", "flow-parser"),
summary: make(map[string]*Connection),
log: log.WithField("component", "flow-parser"),
connections: make(map[string]*Connection),
}
}

func (p *Parser) Summary() FlowSummary {
return p.summary
func (p *Parser) Connections() Connections {
return p.connections
}

func (p *Parser) Parse(flows []*flowpb.Flow) {
Expand Down Expand Up @@ -57,15 +57,15 @@ func (p *Parser) addFlow(f *flowpb.Flow) error {
pod1, pod2 := pods[0], pods[1]
key := pod1 + "#" + pod2

conn, exists := p.summary[key]
conn, exists := p.connections[key]
if !exists {
conn = &Connection{
Pod1: pod1,
Pod2: pod2,
Key: key,
Flows: []*flowpb.Flow{},
}
p.summary[key] = conn
p.connections[key] = conn
}

conn.Flows = append(conn.Flows, f)
Expand Down
Loading

0 comments on commit 74b9fc4

Please sign in to comment.