Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 0 additions & 28 deletions core/src/llm_tracekit/core/_extended_gen_ai_attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,34 +45,6 @@
The parameters of the tool function in JSON format.
"""

GEN_AI_OPENAI_REQUEST_TOOLS_TYPE: Final = (
"gen_ai.openai.request.tools.{tool_index}.type"
)
"""
The type of the tool. Expected to be `function`.
"""

GEN_AI_OPENAI_REQUEST_TOOLS_FUNCTION_NAME: Final = (
"gen_ai.openai.request.tools.{tool_index}.function.name"
)
"""
The name of the tool function.
"""

GEN_AI_OPENAI_REQUEST_TOOLS_FUNCTION_DESCRIPTION: Final = (
"gen_ai.openai.request.tools.{tool_index}.function.description"
)
"""
The description of the tool function.
"""

GEN_AI_OPENAI_REQUEST_TOOLS_FUNCTION_PARAMETERS: Final = (
"gen_ai.openai.request.tools.{tool_index}.function.parameters"
)
"""
The parameters of the tool function in JSON format.
"""

GEN_AI_BEDROCK_AGENT_ALIAS_ID: Final = "gen_ai.bedrock.agent_alias.id"
"""
The ID of the Bedrock agent alias.
Expand Down
9 changes: 4 additions & 5 deletions instrumentations/openai/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,6 @@ response = client.chat.completions.create(
### Changes from OpenTelemetry
#### General
* The `user` parameter in the OpenAI Chat Completions API is now recorded in the span as the `gen_ai.openai.request.user` attribute
* The `tools` parameter in the OpenAI Chat Completions API is now recorded in the span as the `gen_ai.openai.request.tools` attributes.
* User prompts and model responses are captured as span attributes instead of log events (see [Semantic Conventions](#semantic-conventions) below)
#### For OpenAI Agents SDK
* Agent & Tool Spans: Creates dedicated spans for each agent execution and for each tool call, providing clear visibility into the agent's inner workings.
Expand All @@ -118,15 +117,15 @@ response = client.chat.completions.create(
| `gen_ai.completion.<choice_number>.tool_calls.<tool_call_number >.type` | string | Type of tool call in choice <choice_number> | `function`
| `gen_ai.completion.<choice_number>.tool_calls.<tool_call_number >.function.name` | string | The name of the function used in tool call within choice <choice_number> | `get_current_weather`
| `gen_ai.completion.<choice_number>.tool_calls.<tool_call_number >.function.arguments` | string | Arguments passed to the function used in tool call within choice <choice_number> | `{"location": "Seattle, WA"}`
| `gen_ai.request.tools.<tool_number>.type` | string | Type of tool entry in tools list | `function`
| `gen_ai.request.tools.<tool_number>.function.name` | string | The name of the function to use in tool calls | `get_current_weather`
| `gen_ai.request.tools.<tool_number>.function.description` | string | Description of the function | `Get the current weather in a given location`
| `gen_ai.request.tools.<tool_number>.function.parameters` | string | JSON describing the schema of the function parameters | `{"type": "object", "properties": {"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"}, "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}}, "required": ["location"]}`

### OpenAI specific attributes
| Attribute | Type | Description | Examples
| --------- | ---- | ----------- | --------
| `gen_ai.openai.request.user` | string | A unique identifier representing the end-user | `[email protected]`
| `gen_ai.openai.request.tools.<tool_number>.type` | string | Type of tool entry in tools list | `function`
| `gen_ai.openai.request.tools.<tool_number>.function.name` | string | The name of the function to use in tool calls | `get_current_weather`
| `gen_ai.openai.request.tools.<tool_number>.function.description` | string | Description of the function | `Get the current weather in a given location`
| `gen_ai.openai.request.tools.<tool_number>.function.parameters` | string | JSON describing the schema of the function parameters | `{"type": "object", "properties": {"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"}, "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}}, "required": ["location"]}`

#### Function spans
These spans represent the execution of a tool (a Python function).
Expand Down
8 changes: 4 additions & 4 deletions instrumentations/openai/src/llm_tracekit/openai/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,26 +216,26 @@ def get_llm_request_attributes(kwargs, client_instance, capture_content: bool):
continue

attributes[
ExtendedGenAIAttributes.GEN_AI_OPENAI_REQUEST_TOOLS_TYPE.format(
ExtendedGenAIAttributes.GEN_AI_REQUEST_TOOLS_TYPE.format(
tool_index=index
)
] = tool.get("type", "function")
function = tool.get("function")
if function is not None and isinstance(function, Mapping):
attributes[
ExtendedGenAIAttributes.GEN_AI_OPENAI_REQUEST_TOOLS_FUNCTION_NAME.format(
ExtendedGenAIAttributes.GEN_AI_REQUEST_TOOLS_FUNCTION_NAME.format(
tool_index=index
)
] = function.get("name")
attributes[
ExtendedGenAIAttributes.GEN_AI_OPENAI_REQUEST_TOOLS_FUNCTION_DESCRIPTION.format(
ExtendedGenAIAttributes.GEN_AI_REQUEST_TOOLS_FUNCTION_DESCRIPTION.format(
tool_index=index
)
] = function.get("description")
function_parameters = function.get("parameters")
if function_parameters is not None:
attributes[
ExtendedGenAIAttributes.GEN_AI_OPENAI_REQUEST_TOOLS_FUNCTION_PARAMETERS.format(
ExtendedGenAIAttributes.GEN_AI_REQUEST_TOOLS_FUNCTION_PARAMETERS.format(
tool_index=index
)
] = json.dumps(function_parameters)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ async def test_async_chat_completion_bad_endpoint(span_exporter, instrument_no_c
spans = span_exporter.get_finished_spans()
assert_all_attributes(spans[0], llm_model_value, server_address="localhost")
assert 4242 == spans[0].attributes[ServerAttributes.SERVER_PORT]
assert "APIConnectionError" == spans[0].attributes[ErrorAttributes.ERROR_TYPE]
assert "APITimeoutError" == spans[0].attributes[ErrorAttributes.ERROR_TYPE]


@pytest.mark.vcr()
Expand Down
4 changes: 2 additions & 2 deletions instrumentations/openai/tests/test_chat_completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def test_chat_completion_bad_endpoint(
spans = span_exporter.get_finished_spans()
assert_all_attributes(spans[0], llm_model_value, server_address="localhost")
assert 4242 == spans[0].attributes[ServerAttributes.SERVER_PORT]
assert "APIConnectionError" == spans[0].attributes[ErrorAttributes.ERROR_TYPE]
assert "APITimeoutError" == spans[0].attributes[ErrorAttributes.ERROR_TYPE]

metrics = metric_reader.get_metrics_data().resource_metrics
assert len(metrics) == 1
Expand All @@ -164,7 +164,7 @@ def test_chat_completion_bad_endpoint(
assert duration_metric.data.data_points[0].sum > 0
assert (
duration_metric.data.data_points[0].attributes[ErrorAttributes.ERROR_TYPE]
== "APIConnectionError"
== "APITimeoutError"
)


Expand Down