Skip to content

Commit 187517b

Browse files
jxnlclaude[bot]
andauthored
fix(genai): handle response_model=None for GenAI modes (#1694)
Co-authored-by: claude[bot] <209825114+claude[bot]@users.noreply.github.com>
1 parent fb4861a commit 187517b

File tree

2 files changed

+84
-0
lines changed

2 files changed

+84
-0
lines changed

instructor/process_response.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1110,6 +1110,30 @@ def handle_response_model(
11101110
if system_message:
11111111
new_kwargs["system"] = system_message
11121112

1113+
elif mode in {Mode.GENAI_TOOLS, Mode.GENAI_STRUCTURED_OUTPUTS}:
1114+
# Handle GenAI mode - convert messages to contents and extract system message
1115+
from instructor.utils import convert_to_genai_messages, extract_genai_system_message
1116+
1117+
# Convert OpenAI-style messages to GenAI-style contents
1118+
new_kwargs["contents"] = convert_to_genai_messages(messages)
1119+
1120+
# Extract multimodal content for GenAI
1121+
new_kwargs["contents"] = extract_genai_multimodal_content(
1122+
new_kwargs["contents"], autodetect_images
1123+
)
1124+
1125+
# Handle system message for GenAI
1126+
if "system" not in new_kwargs:
1127+
system_message = extract_genai_system_message(messages)
1128+
if system_message:
1129+
from google.genai import types
1130+
new_kwargs["config"] = types.GenerateContentConfig(
1131+
system_instruction=system_message
1132+
)
1133+
1134+
# Remove messages since we converted to contents
1135+
new_kwargs.pop("messages", None)
1136+
11131137
else:
11141138
if mode in {
11151139
Mode.RESPONSES_TOOLS,
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
"""
2+
Test cases for GenAI client with response_model=None.
3+
4+
This test verifies that the GenAI client properly handles the case when
5+
response_model is set to None, ensuring that OpenAI-style messages are
6+
correctly converted to GenAI-style contents.
7+
"""
8+
9+
import pytest
10+
from instructor.mode import Mode
11+
12+
13+
@pytest.mark.parametrize("mode", [Mode.GENAI_TOOLS, Mode.GENAI_STRUCTURED_OUTPUTS])
14+
def test_genai_response_model_none(genai_client, mode):
15+
"""Test that GenAI client works with response_model=None"""
16+
17+
# This should not raise a "Models.generate_content() got an unexpected keyword argument 'messages'" error
18+
messages = [
19+
{
20+
"role": "user",
21+
"content": "What is the capital of France?"
22+
}
23+
]
24+
25+
# This should work without error and return the raw response
26+
response = genai_client.chat.completions.create(
27+
messages=messages,
28+
response_model=None,
29+
mode=mode
30+
)
31+
32+
# We expect to get back a response object, not a parsed model
33+
assert response is not None
34+
# The response should not be a Pydantic model since response_model=None
35+
from pydantic import BaseModel
36+
assert not isinstance(response, BaseModel)
37+
38+
39+
def test_genai_response_model_none_with_system_message(genai_client):
40+
"""Test that GenAI client works with response_model=None and system message"""
41+
42+
messages = [
43+
{
44+
"role": "system",
45+
"content": "You are a helpful assistant."
46+
},
47+
{
48+
"role": "user",
49+
"content": "What is the capital of France?"
50+
}
51+
]
52+
53+
# This should work without error and properly extract system message
54+
response = genai_client.chat.completions.create(
55+
messages=messages,
56+
response_model=None,
57+
mode=Mode.GENAI_TOOLS
58+
)
59+
60+
assert response is not None

0 commit comments

Comments
 (0)