Open
Description
I'm working with Assistant API , and I want to implement streaming for the response content. Here's my current code:
`package main
import (
"context"
"fmt"
"log"
"time"
openai "github.com/sashabaranov/go-openai"
)
func testGPT_thread(messageData string, threadId string) string {
client := openai.NewClient("your-api-key")
ctx := context.Background()
assistantId := "your-assistant-id"
if threadId == "" {
thread, err := client.CreateThread(ctx, openai.ThreadRequest{})
if err != nil {
fmt.Printf("Create thread error: %v\n", err)
return ""
}
threadId = thread.ID
}
message, err := client.CreateMessage(ctx, threadId, openai.MessageRequest{
Role: openai.ChatMessageRoleUser,
Content: messageData,
})
if err != nil {
fmt.Printf("Create message error: %v\n", err)
return threadId
}
run, err := client.CreateRun(ctx, threadId, openai.RunRequest{
AssistantID: assistantId,
})
if err != nil {
fmt.Printf("Create run error: %v\n", err)
return threadId
}
// Currently polling the status
for run.Status == openai.RunStatusQueued || run.Status == openai.RunStatusInProgress {
run, err = client.RetrieveRun(ctx, threadId, run.ID)
if err != nil {
return threadId
}
log.Printf("Run status: %s\n", run.Status)
time.Sleep(100 * time.Millisecond)
}
if run.Status != openai.RunStatusCompleted {
log.Fatalf("run failed with status %s", run.Status)
}
numMessages := 1
messages, err := client.ListMessage(ctx, run.ThreadID, &numMessages, nil, nil, nil, &run.ID)
if err != nil {
log.Fatal(err)
}
log.Printf(messages.Messages[0].Content[0].Text.Value)
return threadId
}`