Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,7 @@ dependencies {
dokka(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-bedrock-client"))
dokka(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-deepseek-client"))
dokka(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-google-client"))
dokka(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-mistralai-client"))
dokka(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-ollama-client"))
dokka(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-openai-client"))
dokka(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-openai-client-base"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,7 @@ internal object ApiKeyService {
val brightDataKey: String
get() = System.getenv("BRIGHT_DATA_KEY")
?: throw IllegalArgumentException("BRIGHT_DATA_KEY env is not set")

val mistralAIApiKey: String
get() = System.getenv("MISTRALAI_API_KEY") ?: throw IllegalArgumentException("MISTRALAI_API_KEY env is not set")
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package ai.koog.agents.example.userpaystatus

import ai.koog.agents.core.tools.SimpleTool
import ai.koog.agents.core.tools.annotations.LLMDescription
import kotlinx.serialization.Serializable

private data class Payment(
val transactionId: String,
val customerId: String,
val paymentAmount: Double,
val paymentDate: String,
val paymentStatus: String
)

private val payments = listOf(
Payment("T1001", "C001", 125.50, "2021-10-05", "Paid"),
Payment("T1002", "C002", 89.99, "2021-10-06", "Unpaid"),
Payment("T1003", "C003", 120.00, "2021-10-07", "Paid"),
Payment("T1004", "C002", 54.30, "2021-10-05", "Paid"),
Payment("T1005", "C001", 210.20, "2021-10-08", "Pending")
)

class PaymentStatusTool : SimpleTool<PaymentStatusTool.Args>() {

@Serializable
data class Args(
@property:LLMDescription("The transaction id.")
val transactionId: String
)

override val description: String = "Get payment status of a transaction"

override val argsSerializer = Args.serializer()

override suspend fun doExecute(args: Args): String {
val transaction = payments.firstOrNull { it.transactionId == args.transactionId }
return when {
transaction != null -> "Current state of the payment is :\n${transaction.paymentStatus}"
else -> "Cannot find a payment status for this transaction with id ${args.transactionId}"
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package ai.koog.agents.example.userpaystatus

import ai.koog.agents.core.agent.AIAgent
import ai.koog.agents.core.tools.ToolRegistry
import ai.koog.agents.example.ApiKeyService
import ai.koog.prompt.executor.clients.mistralai.MistralAIModels
import ai.koog.prompt.executor.llms.all.simpleMistralAIExecutor
import kotlinx.coroutines.runBlocking

fun main() = runBlocking {
val toolRegistry = ToolRegistry {
tools(listOf(PaymentStatusTool()))
}

val paymentsAgent = AIAgent(
promptExecutor = simpleMistralAIExecutor(ApiKeyService.mistralAIApiKey),
llmModel = MistralAIModels.Chat.MistralMedium31,
temperature = 0.0,
toolRegistry = toolRegistry,
maxIterations = 50,
)
val paymentStatus = paymentsAgent.run("What's the status of my payment? Transaction ID is T1001")

println("User's payment status: $paymentStatus")
}
1 change: 1 addition & 0 deletions koog-agents/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ val included = setOf(
":prompt:prompt-executor:prompt-executor-clients:prompt-executor-bedrock-client",
":prompt:prompt-executor:prompt-executor-clients:prompt-executor-deepseek-client",
":prompt:prompt-executor:prompt-executor-clients:prompt-executor-google-client",
":prompt:prompt-executor:prompt-executor-clients:prompt-executor-mistralai-client",
":prompt:prompt-executor:prompt-executor-clients:prompt-executor-ollama-client",
":prompt:prompt-executor:prompt-executor-clients:prompt-executor-openai-client",
":prompt:prompt-executor:prompt-executor-clients:prompt-executor-openai-client-base",
Expand Down
8 changes: 7 additions & 1 deletion koog-ktor/Module.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ The `koog-ktor` module provides seamless integration between the Koog AI agents
It includes:

- A Ktor plugin for easy installation and configuration
- Support for multiple LLM providers (OpenAI, Anthropic, Google, OpenRouter, DeepSeek, Ollama)
- Support for multiple LLM providers (OpenAI, Anthropic, Google, MistralAI, OpenRouter, DeepSeek, Ollama)
- Agent configuration with tools, features, and prompt customization
- Extension functions for routes to interact with LLMs and agents
- JVM-specific support for Model Context Protocol (MCP) integration
Expand All @@ -33,6 +33,7 @@ koog:
openai.apikey: "$OPENAI_API_KEY:your-openai-api-key"
anthropic.apikey: "$ANTHROPIC_API_KEY:your-anthropic-api-key"
google.apikey: "$GOOGLE_API_KEY:your-google-api-key"
mistral.apikey: "$MISTRALAI_API_KEY:your-mistralai-api-key"
openrouter.apikey: "$OPENROUTER_API_KEY:your-openrouter-api-key"
deepseek.apikey: "$DEEPSEEK_API_KEY:your-deepseek-api-key"
ollama.enabled: "$DEBUG:false"
Expand All @@ -51,6 +52,7 @@ fun Application.module() {
anthropic(apiKey = "your-anthropic-api-key")
ollama { baseUrl = "http://localhost:11434" }
google(apiKey = "your-google-api-key")
mistral(apiKey = "your-mistral-api-key")
openRouter(apiKey = "your-openrouter-api-key")
deepSeek(apiKey = "your-deepseek-api-key")
}
Expand Down Expand Up @@ -173,6 +175,10 @@ koog:
apikey: "your-google-api-key"
baseUrl: "https://generativelanguage.googleapis.com"

mistral:
apikey: "your-mistral-api-key"
baseUrl: "https://api.mistral.ai"

openrouter:
apikey: "your-openrouter-api-key"
baseUrl: "https://openrouter.ai"
Expand Down
140 changes: 128 additions & 12 deletions koog-ktor/src/commonMain/kotlin/ai/koog/ktor/KoogAgentsConfig.kt
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ import ai.koog.prompt.executor.clients.deepseek.DeepSeekClientSettings
import ai.koog.prompt.executor.clients.deepseek.DeepSeekLLMClient
import ai.koog.prompt.executor.clients.google.GoogleClientSettings
import ai.koog.prompt.executor.clients.google.GoogleLLMClient
import ai.koog.prompt.executor.clients.mistralai.MistralAIClientSettings
import ai.koog.prompt.executor.clients.mistralai.MistralAILLMClient
import ai.koog.prompt.executor.clients.openai.OpenAIClientSettings
import ai.koog.prompt.executor.clients.openai.OpenAILLMClient
import ai.koog.prompt.executor.clients.openrouter.OpenRouterClientSettings
Expand Down Expand Up @@ -158,7 +160,7 @@ public class KoogAgentsConfig(private val scope: CoroutineScope) {
/**
* Configuration class for managing various Language Learning Model (LLM) providers and their settings.
* This class allows integration with different LLM services such as OpenAI,
* Anthropic, Google, OpenRouter, DeepSeek, and Ollama.
* Anthropic, Google, MistralAI, OpenRouter, DeepSeek, and Ollama.
* Users can also define fallback configurations and custom LLM clients.
*/
public inner class LLMConfig {
Expand Down Expand Up @@ -193,6 +195,16 @@ public class KoogAgentsConfig(private val scope: CoroutineScope) {
[email protected](apiKey, configure)
}

/**
* Configures the MistralAI client for the application.
*
* @param apiKey The API key to authenticate with MistralAI services.
* @param configure A lambda to customize the `MistralAIConfig` settings such as base URL, timeouts, or HTTP client configuration.
*/
public fun mistral(apiKey: String, configure: MistralAIConfig.() -> Unit = {}) {
[email protected](apiKey, configure)
}

/**
* Configures and initializes the OpenRouter API with the provided API key and optional configuration.
*
Expand Down Expand Up @@ -383,7 +395,7 @@ public class KoogAgentsConfig(private val scope: CoroutineScope) {
*
* @param llmParams The parameters that define the behavior of the language model, such as temperature
* and tool selection. Defaults to an instance of `LLMParams`.
* @param buildPrompt A lambda function that is used to construct the prompt using a `PromptBuilder`.
* @param build A lambda function that is used to construct the prompt using a `PromptBuilder`.
*/
@PromptDSL
public fun prompt(
Expand Down Expand Up @@ -522,10 +534,6 @@ public class KoogAgentsConfig(private val scope: CoroutineScope) {
* It allows for customization of base API URL, model versions, API version, timeout settings,
* and the HTTP client used for requests. This class facilitates specifying all necessary
* parameters and settings required to interact with Anthropic's LLM services.
*
* @constructor Creates an instance of AnthropicConfig with a mandatory API key.
*
* @param apiKey The API key used for authenticating requests to the Anthropic API.
*/
public class AnthropicConfig {
/**
Expand Down Expand Up @@ -606,10 +614,6 @@ public class KoogAgentsConfig(private val scope: CoroutineScope) {
* integrations with the Google Generative Language API. It allows for
* specifying an API key, configuring timeouts, and setting the base URL
* used for API requests.
*
* @constructor Creates an instance of GoogleConfig with the provided API key.
*
* @param apiKey The API key required to authenticate requests to the Google Generative Language API.
*/
public class GoogleConfig {
/**
Expand Down Expand Up @@ -663,12 +667,98 @@ public class KoogAgentsConfig(private val scope: CoroutineScope) {
}
}

/**
* Configuration class for MistralAI integration, providing options to set
* API-specific paths, network timeouts, and base connection settings.
*/
public class MistralAIConfig {

/**
* The base URL for the MistralAI API. This property defines the endpoint that the client
* connects to for making API requests. It is used to construct the full URL for various
* API operations such as chat completions, embeddings, and moderations.
*
* The default value is set to "[MistralAIClientSettings.baseUrl]". This can be overridden for
* custom API endpoints or testing purposes by changing its value.
*/
public var baseUrl: String? = null

/**
* Represents the API path segment used for MistralAI's chat completions endpoint.
*
* This variable can be configured to specify a custom endpoint path when interacting
* with the MistralAI chat completions API. By default, it is set to [MistralAIClientSettings.chatCompletionsPath].
*/
public var chatCompletionsPath: String? = null

/**
* Specifies the API path for embedding operations in the MistralAI API.
*
* This variable determines the endpoint to be used when interacting with
* embedding-related functionalities provided by the MistralAI service.
* By default, it is set to [MistralAIClientSettings.embeddingsPath].
*
* Can be customized to target a different API path if required.
*/
public var embeddingsPath: String? = null

/**
* Represents the API path for the moderation endpoint used in MistralAI API requests.
* This is a constant value and is typically appended to the base URL when making
* requests to moderation-related services.
*
* By default, it is set to [MistralAIClientSettings.moderationPath].
*/
public val moderationPath: String? = null

/**
* A configuration property that defines timeout settings for network interactions with the MistralAI API.
* It specifies limits for request execution time, connection establishment time, and socket operation time.
* These timeout values are represented in [Duration] and provide control over handling delayed or
* unresponsive network operations.
*
* The default values for these timeouts are derived from the [ConnectionTimeoutConfig] class, but can
* be customized through the `timeouts` function in [MistralAIConfig].
*
* Used primarily when configuring an [MistralAIClientSettings] for making API requests.
*/
public var timeoutConfig: ConnectionTimeoutConfig? = null

/**
* Represents the HTTP client used for making network requests to the MistralAI API.
* This client is configurable and can be replaced or customized to meet specific requirements,
* such as adjusting timeouts, adding interceptors, or modifying base client behavior.
* The default implementation initializes with a standard [HttpClient] instance.
*/
public var httpClient: HttpClient = HttpClient()

/**
* Configures custom timeout settings for the MistralAI API client.
*
* This method allows users to specify custom timeout values by providing
* a lambda using the `TimeoutConfiguration` class. The configured timeouts
* will then be used for API requests, including request timeout, connection
* timeout, and socket timeout.
*
* @param configure A lambda with the [TimeoutConfiguration] receiver to define
* custom timeout values for request, connection, and socket operations.
*/
public fun timeouts(configure: TimeoutConfiguration.() -> Unit) {
timeoutConfig = with(TimeoutConfiguration()) {
configure()
ConnectionTimeoutConfig(
requestTimeout.inWholeMilliseconds,
connectTimeout.inWholeMilliseconds,
socketTimeout.inWholeMilliseconds
)
}
}
}

/**
* OpenRouterConfig is a configuration class for setting up the OpenRouter client.
* It manages essential parameters such as API key, base URL, connection timeout settings,
* and the HTTP client used for requests.
*
* @property apiKey The API key used for authenticating with the OpenRouter service.
*/
public class OpenRouterConfig {
/**
Expand Down Expand Up @@ -900,6 +990,32 @@ public class KoogAgentsConfig(private val scope: CoroutineScope) {
addLLMClient(LLMProvider.Google, client)
}

/**
* Configures and initializes an MistralAI LLM client.
*
* @param apiKey The API key used for authenticating with the MistralAI API.
* @param configure A lambda receiver to customize the MistralAI configuration such as base URL, timeout settings, and paths.
*/
internal fun mistral(apiKey: String, configure: MistralAIConfig.() -> Unit) {
val client = with(MistralAIConfig()) {
configure()
val defaults = MistralAIClientSettings()

MistralAILLMClient(
apiKey = apiKey,
settings = MistralAIClientSettings(
baseUrl = baseUrl ?: defaults.baseUrl,
chatCompletionsPath = chatCompletionsPath ?: defaults.chatCompletionsPath,
embeddingsPath = embeddingsPath ?: defaults.embeddingsPath,
moderationPath = moderationPath ?: defaults.moderationPath,
timeoutConfig = timeoutConfig ?: defaults.timeoutConfig,
),
baseClient = httpClient
)
}
addLLMClient(LLMProvider.MistralAI, client)
}

/**
* Configures and integrates an OpenRouter client into the system using the provided API key and configuration.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,17 @@ import kotlin.time.Duration.Companion.milliseconds
/**
* Loads and configures the environment-specific settings for Koog agents based on the provided
* application configuration.
* This includes setup for OpenAI, Anthropic, Google, OpenRouter, DeepSeek,
* This includes setup for OpenAI, Anthropic, Google, MistralAI, OpenRouter, DeepSeek,
* Ollama, as well as default and fallback LLM (Large Language Model) configurations.
*
* @param envConfig The application configuration that contains environment-specific properties
* for configuring Koog agents and associated integrations.
* @return A populated instance of [KoogAgentsConfig] with the environment-specific settings applied.
*/
internal fun ApplicationEnvironment.loadAgentsConfig(scope: CoroutineScope): KoogAgentsConfig {
val koogConfig = KoogAgentsConfig(scope)
.openAI(config)
.anthropic(config)
.google(config)
.mistral(config)
.openrouter(config)
.deepSeek(config)

Expand All @@ -37,6 +36,7 @@ internal fun ApplicationEnvironment.loadAgentsConfig(scope: CoroutineScope): Koo
"openai" -> LLMProvider.OpenAI
"anthropic" -> LLMProvider.Anthropic
"google" -> LLMProvider.Google
"mistral" -> LLMProvider.MistralAI
"openrouter" -> LLMProvider.OpenRouter
"ollama" -> LLMProvider.Ollama
"deepseek" -> LLMProvider.DeepSeek
Expand Down Expand Up @@ -108,6 +108,14 @@ private fun KoogAgentsConfig.google(envConfig: ApplicationConfig) =
}
}

private fun KoogAgentsConfig.mistral(envConfig: ApplicationConfig) =
config(envConfig, "koog.mistral") { apiKey, baseUrlOrNull ->
mistral(apiKey) {
baseUrlOrNull?.let { baseUrl = it }
timeouts { configure("koog.mistral.timeout", envConfig) }
}
}

private fun KoogAgentsConfig.openAI(envConfig: ApplicationConfig) =
config(envConfig, "koog.openai") { apiKey, baseUrlOrNull ->
openAI(apiKey) {
Expand Down
Loading