Skip to content

Chat.SendStream does not seem to support parallel function calling #692

@junhanlin

Description

@junhanlin

Description

When using Chat.SendStream with parallel function calling, the SDK returns an error on the second round of the conversation after receiving multiple function calls in the first round.

Error Message:

Error 400, Message: Please ensure that function response turn comes immediately after a function call turn., Status: INVALID_ARGUMENT

Expected Behavior

Chat.SendStream should handle parallel function calls the same way as Chat.Send does, allowing the model to request multiple function calls in a single turn and accepting responses for all of them in the next turn.

Actual Behavior

  • Chat.SendStream: Fails with INVALID_ARGUMENT error when responding to parallel function calls
  • Chat.Send: Works correctly with parallel function calling

Environment details

  • Programming language: Go
  • OS: macOS 15.4
  • Language runtime version: 1.24
  • Package version: v1.45.0

Steps to reproduce

Failing Example (Chat.SendStream)
package main

import (
	"context"
	"fmt"
	"log"
	"os"

	"google.golang.org/genai"
)

func main() {
	ctx := context.Background()

	apiKey := os.Getenv("GEMINI_API_KEY")
	if apiKey == "" {
		log.Fatal("GEMINI_API_KEY environment variable not set")
	}

	client, err := genai.NewClient(ctx, &genai.ClientConfig{
		APIKey:  apiKey,
		Backend: genai.BackendGeminiAPI,
	})
	if err != nil {
		log.Fatalf("Failed to create Gemini client: %v", err)
	}

	fridgeContentFunc := &genai.FunctionDeclaration{
		Name:        "fridgeContent",
		Description: "Get the current content of the fridge.",
		Parameters: &genai.Schema{
			Type: genai.TypeObject,
			Properties: map[string]*genai.Schema{
				"date": {
					Type:        genai.TypeString,
					Description: "The date for which to check the fridge content, in YYYY-MM-DD format. If not provided, defaults to today's date.",
				},
			},
			Required: []string{"date"},
		},
	}

	modelCfg := &genai.GenerateContentConfig{
		Tools: []*genai.Tool{{
			FunctionDeclarations: []*genai.FunctionDeclaration{
				fridgeContentFunc,
			}},
		},
		ToolConfig: &genai.ToolConfig{
			FunctionCallingConfig: &genai.FunctionCallingConfig{
				Mode: genai.FunctionCallingConfigModeAuto,
			},
		},
	}

	chat, err := client.Chats.Create(ctx, "gemini-3-pro-preview", modelCfg, []*genai.Content{})
	if err != nil {
		log.Fatalf("Error creating chat: %v\n", err)
	}

	nextParts := []*genai.Part{genai.NewPartFromText("What's in the fridge on 2026-01-01 and 2026-01-02?")}
	conversationRound := 0
	for {
		conversationRound++
		fmt.Printf("\n--- Round %d ---\n", conversationRound)

		// Stream the response
		hasToolCall := false
		toolCalls := make([]*genai.FunctionCall, 0)
		fullResponse := ""

		for result, err := range chat.SendStream(ctx, cloneParts(nextParts)...) {
			if err != nil {
				log.Fatalf("Error during chat stream: %v", err)
			}
			if len(result.Candidates) > 0 {
				candidate := result.Candidates[0]
				if candidate.Content != nil && len(candidate.Content.Parts) > 0 {
					for _, part := range candidate.Content.Parts {
						if part.FunctionCall != nil {
							hasToolCall = true
							toolCalls = append(toolCalls, part.FunctionCall)
							fmt.Printf("Function Call: %s\n", part.FunctionCall.Name)
							fmt.Printf("  Args: %v\n", part.FunctionCall.Args)
						} else if part.Text != "" {
							fullResponse += part.Text
							fmt.Printf("Assistant (Stream):  %s", part.Text)
						}
					}
				}
			}
		}

		if fullResponse != "" {
			fmt.Println()
		}

		// If no tool calls, we're done
		if !hasToolCall || len(toolCalls) == 0 {
			fmt.Println("Conversation complete - no more tool calls needed")
			break
		}

		// Process tool calls and prepare next round
		newParts := make([]*genai.Part, 0, len(toolCalls))
		for _, fc := range toolCalls {
			newParts = append(newParts, genai.NewPartFromFunctionResponse(
				fc.Name,
				map[string]any{"content": "pizza"},
			))
		}
		nextParts = newParts

		// Safety check to prevent infinite loops
		if conversationRound > 10 {
			fmt.Println("Max conversation rounds reached")
			break
		}
	}
}

func cloneParts(parts []*genai.Part) []*genai.Part {
	copied := make([]*genai.Part, len(parts))
	copy(copied, parts)
	return copied
}

Result: Fails with error

--- Round 1 ---
Function Call: fridgeContent
  Args: map[date:2026-01-01]
Function Call: fridgeContent
  Args: map[date:2026-01-02]

--- Round 2 ---
2026/02/08 13:43:58 Error during chat stream: Error 400, Message: Please ensure that function response turn comes immediately after a function call turn., Status: INVALID_ARGUMENT, Details: []
Working Example (Chat.Send)
package main

import (
	"context"
	"fmt"
	"log"
	"os"

	"google.golang.org/genai"
)

func main() {
	ctx := context.Background()

	apiKey := os.Getenv("GEMINI_API_KEY")
	if apiKey == "" {
		log.Fatal("GEMINI_API_KEY environment variable not set")
	}

	client, err := genai.NewClient(ctx, &genai.ClientConfig{
		APIKey:  apiKey,
		Backend: genai.BackendGeminiAPI,
	})
	if err != nil {
		log.Fatalf("Failed to create Gemini client: %v", err)
	}

	fridgeContentFunc := &genai.FunctionDeclaration{
		Name:        "fridgeContent",
		Description: "Get the current content of the fridge.",
		Parameters: &genai.Schema{
			Type: genai.TypeObject,
			Properties: map[string]*genai.Schema{
				"date": {
					Type:        genai.TypeString,
					Description: "The date for which to check the fridge content, in YYYY-MM-DD format. If not provided, defaults to today's date.",
				},
			},
			Required: []string{"date"},
		},
	}

	modelCfg := &genai.GenerateContentConfig{
		Tools: []*genai.Tool{{
			FunctionDeclarations: []*genai.FunctionDeclaration{
				fridgeContentFunc,
			}},
		},
		ToolConfig: &genai.ToolConfig{
			FunctionCallingConfig: &genai.FunctionCallingConfig{
				Mode: genai.FunctionCallingConfigModeAuto,
			},
		},
	}

	chat, err := client.Chats.Create(ctx, "gemini-3-pro-preview", modelCfg, []*genai.Content{})
	if err != nil {
		log.Fatalf("Error creating chat: %v\n", err)
	}

	nextParts := []*genai.Part{genai.NewPartFromText("What's in the fridge on 2026-01-01 and 2026-01-02?")}
	conversationRound := 0
	for {
		conversationRound++
		fmt.Printf("\n--- Round %d ---\n", conversationRound)

		resp, err := chat.Send(ctx, cloneParts(nextParts)...)
		if err != nil {
			log.Fatalf("Error sending message: %v\n", err)
		}

		// Check if we have function calls
		// If no tool calls, we're done
		if len(resp.FunctionCalls()) == 0 {
			fmt.Printf("Assistant: %s\n", resp.Text())
			fmt.Println("Conversation complete - no more tool calls needed")
			break
		}

		// Log all function calls
		for _, fc := range resp.FunctionCalls() {
			fmt.Printf("Function Call: %s\n", fc.Name)
			fmt.Printf("  Args: %v\n", fc.Args)
		}

		// Process tool calls and prepare next round
		newParts := make([]*genai.Part, 0, len(resp.FunctionCalls()))
		for _, fc := range resp.FunctionCalls() {
			newParts = append(newParts, genai.NewPartFromFunctionResponse(
				fc.Name,
				map[string]any{"content": "pizza"},
			))
		}
		nextParts = newParts

		// Safety check to prevent infinite loops
		if conversationRound > 10 {
			fmt.Println("Max conversation rounds reached")
			break
		}
	}
}

func cloneParts(parts []*genai.Part) []*genai.Part {
	copied := make([]*genai.Part, len(parts))
	copy(copied, parts)
	return copied
}

Result: success with logs

--- Round 1 ---
Function Call: fridgeContent
  Args: map[date:2026-01-01]
Function Call: fridgeContent
  Args: map[date:2026-01-02]

--- Round 2 ---
Assistant: On both 2026-01-01 and 2026-01-02, there is pizza in the fridge.
Conversation complete - no more tool calls needed

Additional Notes

Note: The model gemini-3-pro-preview appears to be more likely to trigger parallel function calling compared to other models, which makes it useful for quickly reproducing this issue. If you have trouble reproducing with other models, try using gemini-3-pro-preview.

Metadata

Metadata

Labels

api:gemini-apiIssues related to Gemini APIpriority: p2Moderately-important priority. Fix may not be included in next release.type: bugError or flaw in code with unintended results or allowing sub-optimal usage patterns.

Type

No type

Projects

No projects

Milestone

No milestone

Relationships

None yet

Development

No branches or pull requests

Issue actions