Skip to content

Commit b9ab5be

Browse files
committed
CR fixes: place tool call adding logic on the level above
1 parent f54fd6b commit b9ab5be

File tree

3 files changed

+53
-36
lines changed
  • agents/agents-core/src/commonMain/kotlin/ai/koog/agents/core/dsl/extension
  • prompt
    • prompt-executor/prompt-executor-clients/prompt-executor-openai-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/openai
    • prompt-model/src/commonMain/kotlin/ai/koog/prompt/dsl

3 files changed

+53
-36
lines changed

agents/agents-core/src/commonMain/kotlin/ai/koog/agents/core/dsl/extension/AIAgentNodes.kt

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -450,21 +450,6 @@ public fun AIAgentSubgraphBuilderBase<*, *>.nodeExecuteMultipleToolsAndSendResul
450450
}
451451

452452
llm.writeSession {
453-
/*
454-
Ensure all originating tool-call messages exist in the prompt before adding results.
455-
This is important when providers concatenate tool names/args and we normalize/split them,
456-
producing synthesized calls that were not part of the original prompt history.
457-
*/
458-
val existingCallIds = prompt.messages.filterIsInstance<Message.Tool.Call>().map { it.id }.toSet()
459-
val missingCalls = toolCalls.filter { it.id !in existingCallIds }
460-
if (missingCalls.isNotEmpty()) {
461-
appendPrompt {
462-
tool {
463-
missingCalls.forEach { call(it) }
464-
}
465-
}
466-
}
467-
468453
appendPrompt {
469454
tool {
470455
results.forEach { result(it) }
@@ -486,14 +471,8 @@ public fun AIAgentSubgraphBuilderBase<*, *>.nodeLLMSendMultipleToolResults(
486471
): AIAgentNodeDelegate<List<ReceivedToolResult>, List<Message.Response>> =
487472
node(name) { results ->
488473
llm.writeSession {
489-
/*
490-
Ensure corresponding tool-call messages are present before adding results.
491-
*/
492-
val existingCallIds = prompt.messages.filterIsInstance<Message.Tool.Call>().map { it.id }.toSet()
493-
val missingCalls = results.filter { it.id !in existingCallIds }
494474
appendPrompt {
495475
tool {
496-
missingCalls.forEach { call(it.id, it.tool, it.toolArgs.toString()) }
497476
results.forEach { result(it) }
498477
}
499478
}

prompt/prompt-executor/prompt-executor-clients/prompt-executor-openai-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/openai/OpenAILLMClient.kt

Lines changed: 24 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,9 @@ import io.github.oshai.kotlinlogging.KotlinLogging
5656
import io.ktor.client.HttpClient
5757
import kotlinx.coroutines.CancellationException
5858
import kotlinx.coroutines.Dispatchers
59+
import kotlinx.coroutines.async
60+
import kotlinx.coroutines.awaitAll
61+
import kotlinx.coroutines.coroutineScope
5962
import kotlinx.coroutines.flow.Flow
6063
import kotlinx.coroutines.flow.filterNotNull
6164
import kotlinx.coroutines.withContext
@@ -371,16 +374,20 @@ public open class OpenAILLMClient(
371374
is OpenAIChatParams -> super.executeMultipleChoices(prompt, model, tools)
372375

373376
is OpenAIResponsesParams -> {
374-
// Responses API does not currently expose a native "n" parameter,
375-
// so we issue multiple independent responses and aggregate them.
376-
// This path is required for models like gpt-5.1-codex that only
377-
// support the Responses endpoint and return 404 on Chat Completions.
377+
/*
378+
Responses API does not currently expose a native "n" parameter,
379+
so we issue multiple independent responses and aggregate them.
380+
This path is required for models like gpt-5.1-codex that only
381+
support the Responses endpoint and return 404 on Chat Completions.
382+
*/
378383
val choices = (params.numberOfChoices ?: 1).coerceAtLeast(1)
379-
buildList {
380-
repeat(choices) {
381-
val response = getResponseWithResponsesAPI(prompt, params, model, tools)
382-
add(processResponsesAPIResponse(response))
383-
}
384+
coroutineScope {
385+
List(choices) {
386+
async {
387+
val response = getResponseWithResponsesAPI(prompt, params, model, tools)
388+
processResponsesAPIResponse(response)
389+
}
390+
}.awaitAll()
384391
}
385392
}
386393
}
@@ -769,7 +776,10 @@ public open class OpenAILLMClient(
769776
add(InputContent.File(fileData = fileData, fileUrl = fileUrl, filename = part.fileName))
770777
}
771778

772-
else -> throw LLMClientException(clientName, "Unsupported attachment type: $part, for model: $model with Responses API")
779+
else -> throw LLMClientException(
780+
clientName,
781+
"Unsupported attachment type: $part, for model: $model with Responses API"
782+
)
773783
}
774784
}
775785
}
@@ -811,7 +821,10 @@ public open class OpenAILLMClient(
811821
metaInfo = metaInfo
812822
)
813823

814-
else -> throw LLMClientException(clientName, "Unexpected response from $clientName: no tool calls and no content")
824+
else -> throw LLMClientException(
825+
clientName,
826+
"Unexpected response from $clientName: no tool calls and no content"
827+
)
815828
}
816829
}
817830
}

prompt/prompt-model/src/commonMain/kotlin/ai/koog/prompt/dsl/PromptBuilder.kt

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -259,14 +259,39 @@ public class PromptBuilder internal constructor(
259259
*
260260
* Tool results represent the output from executing a tool.
261261
*
262+
* This method ensures that the corresponding tool call message exists in the prompt
263+
* before adding the result. If the tool call is missing, it will be synthesized and
264+
* added to maintain proper conversation flow.
265+
*
266+
* Problematic cases could potentially occur, when:
267+
* 1. LLM providers concatenate tool names/args and normalize/split them, producing
268+
* synthesized calls that were not part of the original prompt history
269+
* 2. Tool calls with null IDs get processed separately
270+
* 3. Parallel tool execution results arrive before calls are recorded in prompt
271+
*
262272
* @param result The tool result message to add
263273
*/
264274
public fun result(result: Message.Tool.Result) {
265-
this@PromptBuilder.messages
275+
val existingCallIndex = this@PromptBuilder.messages
266276
.indexOfLast { it is Message.Tool.Call && it.id == result.id }
267-
.takeIf { it != -1 }
268-
?.let { index -> this@PromptBuilder.messages.add(index + 1, result) }
269-
?: throw IllegalStateException("Failed to add tool result: no call message with id ${result.id}")
277+
278+
if (existingCallIndex != -1) {
279+
// Normal case: a corresponding tool call exists, so we just add its result after it
280+
this@PromptBuilder.messages.add(existingCallIndex + 1, result)
281+
} else {
282+
// Missing tool call case: synthesize the call message and ensure all originating tool-call messages exist in the prompt before adding results
283+
if (result.id != null) {
284+
val synthesizedCall = Message.Tool.Call(
285+
id = result.id,
286+
tool = result.tool,
287+
content = "Synthesized call for result",
288+
metaInfo = ResponseMetaInfo.create(clock)
289+
)
290+
this@PromptBuilder.messages.add(synthesizedCall)
291+
}
292+
// Add the result message at the end after a synthetic tool call
293+
this@PromptBuilder.messages.add(result)
294+
}
270295
}
271296

272297
/**

0 commit comments

Comments
 (0)