Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(lark): Add /ask command using OpenAI #251

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions chatops-lark/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ require (
github.com/google/go-github/v68 v68.0.0
github.com/larksuite/oapi-sdk-go/v3 v3.4.7
github.com/rs/zerolog v1.33.0
github.com/sashabaranov/go-openai v1.9.3
gopkg.in/yaml.v3 v3.0.1
)

Expand Down
2 changes: 2 additions & 0 deletions chatops-lark/go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,8 @@ github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/sashabaranov/go-openai v1.9.3 h1:uNak3Rn5pPsKRs9bdT7RqRZEyej/zdZOEI2/8wvrFtM=
github.com/sashabaranov/go-openai v1.9.3/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
Expand Down
134 changes: 134 additions & 0 deletions chatops-lark/pkg/events/handler/ask.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
package handler

import (
"context"
"fmt"
"strings"

"github.com/rs/zerolog/log"
"github.com/sashabaranov/go-openai"
)

const (
askHelpText = `missing question

Usage: /ask <question...>

Example:
/ask What is the on-call schedule for the infra team this week?
/ask How do I debug error code 1234 in service X?

For more details, use: /ask --help
`

askDetailedHelpText = `Usage: /ask <question...>

Description:
Asks an AI assistant a question. The assistant may leverage internal tools (MCP context)
to provide relevant and up-to-date information alongside its general knowledge.

Examples:
/ask What is the current status of the main production cluster?
/ask Explain the purpose of the 'widget-processor' microservice.
/ask Summarize the recent alerts for the database tier.
/ask How do I request access to the staging environment?

Use '/ask --help' or '/ask -h' to see this message.
`
)

// runCommandAsk handles the /ask command logic.
func runCommandAsk(ctx context.Context, args []string) (string, error) {
if len(args) == 0 {
// No question provided
return "", fmt.Errorf(askHelpText)
}

// Check for help flags explicitly, as there are no subcommands
firstArg := args[0]
if firstArg == "-h" || firstArg == "--help" {
if len(args) == 1 {
return askDetailedHelpText, nil
}
// Allow asking help *about* something, e.g. /ask --help tools
// But for now, just treat any args after --help as part of the help request itself.
// Let's just return the detailed help for simplicity.
// Alternatively, could error out:
// return "", fmt.Errorf("unknown arguments after %s: %v", firstArg, args[1:])
return askDetailedHelpText, nil
}

// The entire argument list forms the question
question := strings.Join(args, " ")

// --- Placeholder for actual AI/Tool interaction ---
// Here you would:
// 1. Parse the question for intent or specific tool requests (if applicable).
// 2. Potentially query MCP tools based on the question to gather context.
// 3. Format a prompt including the user's question and any gathered context.
// 4. Send the prompt to the configured LLM.
// 5. Receive the LLM's response.
// 6. Format the response for Lark.
result, err := processAskRequest(ctx, question)
if err != nil {
return "", fmt.Errorf("failed to process ask request: %w", err)
Comment on lines +88 to +89
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Consider adding more context to the error message, such as the question that was asked. This can help with debugging when the ask request fails.

return "", fmt.Errorf("failed to process ask request for question %q: %w", question, err)

}
// --- End Placeholder ---

return result, nil
}

// processAskRequest is a placeholder for the core logic interacting with the LLM and tools.
// TODO: Implement the actual interaction with the AI model and MCP tools.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

This is an important TODO that needs to be addressed before merging. Ensure that the actual interaction with the AI model and MCP tools is implemented.

// TODO: Implement the actual interaction with the AI model and MCP tools.
	// This implementation should include parsing the question, querying MCP tools, formatting a prompt, sending the prompt to the LLM, receiving the response, and formatting the response for Lark.

func processAskRequest(ctx context.Context, question string) (string, error) {
// Simulate processing and generating a response
// In a real implementation, this would involve API calls to an LLM service
// and potentially calls to internal "MCP tools" APIs.
fmt.Printf("Processing ask request for question: %q\n", question) // Log for debugging

openaiCfg := ctx.Value(ctxKeyOpenAIConfig)
openaiModel := ctx.Value(ctxKeyOpenAIModel)
systemPrompt := ctx.Value(ctxKeyOpenAISystemPrompt)
log.Debug().
Any("base_url", openaiCfg.(openai.ClientConfig).BaseURL).
Any("systemPrompt", systemPrompt).
Msg("debug vars")
client := openai.NewClientWithConfig(openaiCfg.(openai.ClientConfig))

resp, err := client.CreateChatCompletion(
context.Background(),
openai.ChatCompletionRequest{
Model: openaiModel.(string),
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
Content: systemPrompt.(string),
},
{
Role: openai.ChatMessageRoleUser,
Content: question,
},
},
},
)

if err != nil {
log.Err(err).Msg("failed to create chat completion")
return "", fmt.Errorf("failed to create chat completion: %w", err)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Consider including the model name in the error message for better debugging.

return "", fmt.Errorf("failed to create chat completion with model %s: %w", openaiModel.(string), err)

}
response := resp.Choices[0].Message.Content

// // Example of how context *could* be added (replace with actual tool calls)
// if strings.Contains(strings.ToLower(question), "status") && strings.Contains(strings.ToLower(question), "production") {
// // Pretend we called an MCP tool for cluster status
// mcpContext := "\n[MCP Tool Context: Production cluster status is currently nominal.]"
// response += mcpContext
// }

// // Simulate potential error
// if strings.Contains(strings.ToLower(question), "error simulation") {
// return "", fmt.Errorf("simulated error during AI processing")
// }

return response, nil
}
30 changes: 28 additions & 2 deletions chatops-lark/pkg/events/handler/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@ import (
larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/sashabaranov/go-openai"
)

const (
ctxKeyGithubToken = "github_token"
ctxKeyLarkSenderEmail = "lark.sender.email"
ctxKeyGithubToken = "github_token"
ctxKeyLarkSenderEmail = "lark.sender.email"
ctxKeyOpenAIConfig = "openai.config"
ctxKeyOpenAIModel = "openai.model"
ctxKeyOpenAISystemPrompt = "openai.system_prompt"

// Message types
msgTypePrivate = "private"
Expand Down Expand Up @@ -52,6 +56,28 @@ var commandConfigs = map[string]CommandConfig{
return context.WithValue(ctx, ctxKeyLarkSenderEmail, sender.Email)
},
},
"/ask": {
Handler: runCommandAsk,
SetupContext: func(ctx context.Context, config map[string]any, sender *CommandSender) context.Context {
cfg := config["openai.config"]
var openaiCfg openai.ClientConfig
switch v := cfg.(type) {
case map[string]any:
openaiCfg = openai.DefaultConfig(v["api_key"].(string))
openaiCfg.BaseURL = v["base_url"].(string)
openaiCfg.APIType = openai.APIType(v["api_type"].(string))
openaiCfg.APIVersion = v["api_version"].(string)
openaiCfg.Engine = v["engine"].(string)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

It would be beneficial to add error checking after each type assertion to ensure the values are of the expected type. This can prevent unexpected panics if the configuration is not set up correctly.

if apiKey, ok := v["api_key"].(string); ok {
		openaiCfg = openai.DefaultConfig(apiKey)
	} else {
		log.Error().Msg("api_key not found or not a string")
		return ctx // or return an error, depending on desired behavior
	}
	if baseURL, ok := v["base_url"].(string); ok {
		openaiCfg.BaseURL = baseURL
	} else {
		log.Error().Msg("base_url not found or not a string")
	}
	if apiType, ok := v["api_type"].(string); ok {
		openaiCfg.APIType = openai.APIType(apiType)
	} else {
		log.Error().Msg("api_type not found or not a string")
	}
	if apiVersion, ok := v["api_version"].(string); ok {
		openaiCfg.APIVersion = apiVersion
	} else {
		log.Error().Msg("api_version not found or not a string")
	}
	if engine, ok := v["engine"].(string); ok {
		openaiCfg.Engine = engine
	} else {
		log.Error().Msg("engine not found or not a string")
	}

default:
openaiCfg = openai.DefaultConfig("")
}

newCtx := context.WithValue(ctx, ctxKeyOpenAIConfig, openaiCfg)
newCtx = context.WithValue(newCtx, ctxKeyOpenAIModel, config["openai.model"])
newCtx = context.WithValue(newCtx, ctxKeyOpenAISystemPrompt, config["openai.system_prompt"])
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Consider adding error checking to ensure that config["openai.model"] and config["openai.system_prompt"] exist and are of the expected type (string).

model, ok := config["openai.model"].(string)
	if !ok {
	log.Error().Msg("openai.model not found or not a string")
	return ctx // or return an error
	}
systemPrompt, ok := config["openai.system_prompt"].(string)
	if !ok {
	log.Error().Msg("openai.system_prompt not found or not a string")
	return ctx // or return an error
	}

	newCtx := context.WithValue(ctx, ctxKeyOpenAIConfig, openaiCfg)
	newCtx = context.WithValue(newCtx, ctxKeyOpenAIModel, model)
	newCtx = context.WithValue(newCtx, ctxKeyOpenAISystemPrompt, systemPrompt)
	return newCtx

return newCtx
},
},
}

type Command struct {
Expand Down
Loading