Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

[project]
name = "lmnr"
version = "0.7.17"
version = "0.7.18"
description = "Python SDK for Laminar"
authors = [
{ name = "lmnr.ai", email = "[email protected]" }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,15 +206,17 @@ def _set_request_attributes(span, args, kwargs):
contents = [contents]
for content in contents:
processed_content = process_content_union(content)
content_str = get_content(processed_content)
content_payload = get_content(processed_content)
if isinstance(content_payload, dict):
content_payload = [content_payload]

set_span_attribute(
span,
f"{gen_ai_attributes.GEN_AI_PROMPT}.{i}.content",
(
content_str
if isinstance(content_str, str)
else json_dumps(content_str)
content_payload
if isinstance(content_payload, str)
else json_dumps(content_payload)
),
)
blocks = (
Expand Down Expand Up @@ -318,20 +320,22 @@ def _set_response_attributes(span, response: types.GenerateContentResponse):
for candidate in candidates_list:
has_content = False
processed_content = process_content_union(candidate.content)
content_str = get_content(processed_content)
content_payload = get_content(processed_content)
if isinstance(content_payload, dict):
content_payload = [content_payload]

set_span_attribute(
span, f"{gen_ai_attributes.GEN_AI_COMPLETION}.{i}.role", "model"
)
if content_str:
if content_payload:
has_content = True
set_span_attribute(
span,
f"{gen_ai_attributes.GEN_AI_COMPLETION}.{i}.content",
(
content_str
if isinstance(content_str, str)
else json_dumps(content_str)
content_payload
if isinstance(content_payload, str)
else json_dumps(content_payload)
),
)
blocks = (
Expand Down
6 changes: 3 additions & 3 deletions src/lmnr/sdk/laminar.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,17 +434,17 @@ def foo(span):
with Laminar.use_span(span):
with Laminar.start_as_current_span("foo_inner"):
some_function()

def bar():
with Laminar.use_span(span):
openai_client.chat.completions.create()

span = Laminar.start_span("outer")
foo(span)
bar(span)
# IMPORTANT: End the span manually
span.end()

# Results in:
# | outer
# | | foo
Expand Down
2 changes: 1 addition & 1 deletion src/lmnr/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from packaging import version


__version__ = "0.7.17"
__version__ = "0.7.18"
PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}"


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
interactions:
- request:
body: '{"contents": [{"parts": [{"text": "What is the capital of France?"}], "role":
"user"}], "systemInstruction": {"parts": [{"text": "Be concise and to the point.
Use tools as much as possible."}], "role": "user"}, "generationConfig": {}}'
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate, zstd
connection:
- keep-alive
content-length:
- '234'
content-type:
- application/json
host:
- generativelanguage.googleapis.com
user-agent:
- google-genai-sdk/1.34.0 gl-python/3.13.5
x-goog-api-client:
- google-genai-sdk/1.34.0 gl-python/3.13.5
method: POST
uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash-preview-05-20:generateContent
response:
body:
string: !!binary |
H4sIAAAAAAAC/2WQXU+DMBSG7/srSK+HItsS9NaPZIlGVPxIjDHVnkEzaLE96AzZf7eFwUrsRdOe
9+3peZ+WBAH9ZJILzhAMPQtebSUI2m53mpIIEq0wlGyxZhoP3n613tlaELbuEU2ZFoZ62m48v80O
HbUqwdkrxaEc7LvBQNdCClPcAzNKOttDdpvSURWSw9aWIzJ80LWmjWE53AAym42NCWitVVVjpjYg
z1XTZYvnfTMPxUQ/2cuokJUTZZHM/nU1F/ZPUfqEPHg2IisF/roc2eVLRj0MOB1q4EA8XBQL1eQF
TgeMF2TPq0f4BNqInlV3N8c5VBZiGB8tw3XJTBHWGr4F/ITRMoyjbgiqwdRKGlhx91A9zxu2+oqu
knfc8Mf0tLn+SO4iSnbkD5aoXE83AgAA
headers:
Alt-Svc:
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
Content-Encoding:
- gzip
Content-Type:
- application/json; charset=UTF-8
Date:
- Tue, 14 Oct 2025 15:34:57 GMT
Server:
- scaffolding on HTTPServer2
Server-Timing:
- gfet4t7; dur=716
Transfer-Encoding:
- chunked
Vary:
- Origin
- X-Origin
- Referer
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- '0'
status:
code: 200
message: OK
version: 1
36 changes: 34 additions & 2 deletions tests/test_google_genai.py
Original file line number Diff line number Diff line change
Expand Up @@ -677,7 +677,7 @@ def test_google_genai_reasoning_tokens_with_include_thoughts(
assert span_output[1]["text"] == response.text


@pytest.mark.vcr(record_mode="once")
@pytest.mark.vcr
@pytest.mark.asyncio
async def test_google_genai_reasoning_tokens_async(span_exporter: InMemorySpanExporter):
client = Client(api_key="123")
Expand Down Expand Up @@ -726,7 +726,7 @@ async def test_google_genai_reasoning_tokens_async(span_exporter: InMemorySpanEx
)


@pytest.mark.vcr(record_mode="once")
@pytest.mark.vcr
@pytest.mark.asyncio
async def test_google_genai_reasoning_tokens_with_include_thoughts_async(
span_exporter: InMemorySpanExporter,
Expand Down Expand Up @@ -784,6 +784,38 @@ async def test_google_genai_reasoning_tokens_with_include_thoughts_async(
assert span_output[1]["text"] == response.text


@pytest.mark.vcr
def test_google_genai_string_contents(span_exporter: InMemorySpanExporter):
# The actual key was used during recording and the request/response was saved
# to the VCR cassette.
client = Client(api_key="123")
system_instruction = "Be concise and to the point. Use tools as much as possible."
response = client.models.generate_content(
model="gemini-2.5-flash-preview-05-20",
contents="What is the capital of France?",
config=types.GenerateContentConfig(
system_instruction={"text": system_instruction},
),
)
spans = span_exporter.get_finished_spans()
assert len(spans) == 1
assert spans[0].name == "gemini.generate_content"
assert spans[0].attributes["gen_ai.prompt.0.content"] == system_instruction
assert spans[0].attributes["gen_ai.prompt.0.role"] == "system"
assert json.loads(spans[0].attributes["gen_ai.prompt.1.content"]) == [
{
"type": "text",
"text": "What is the capital of France?",
}
]
assert spans[0].attributes["gen_ai.prompt.1.role"] == "user"
assert spans[0].attributes["gen_ai.completion.0.role"] == "model"

span_output = json.loads(spans[0].attributes["gen_ai.completion.0.content"])
assert span_output[0]["type"] == "text"
assert span_output[0]["text"] == response.parts[0].text


def test_google_genai_error(span_exporter: InMemorySpanExporter):
# Invalid key on purpose
client = Client(api_key="123")
Expand Down