Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
pluginName=Cognotik - Open Source Agentic Power Tools
pluginRepositoryUrl=https://github.com/SimiaCryptus/Cognotik
libraryGroup=com.cognotik
libraryVersion=2.0.35
libraryVersion=2.0.36
gradleVersion=8.13
org.gradle.caching=true
org.gradle.configureondemand=false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,5 +191,4 @@ inline fun <reified T : Any> Any.parserCast(
model = model,
parsingChatter = model,
describer = describer
).getParser().apply(this.toJson())

).getParser().apply(this.toJson())
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
package com.simiacryptus.cognotik.agents

import com.simiacryptus.cognotik.chat.model.ChatInterface
import com.simiacryptus.cognotik.describe.AbbrevWhitelistYamlDescriber
import com.simiacryptus.cognotik.describe.TypeDescriber
import com.simiacryptus.cognotik.models.ModelSchema
import com.simiacryptus.cognotik.util.JsonUtil
import com.simiacryptus.cognotik.util.LoggerFactory
import com.simiacryptus.cognotik.util.toContentList

open class ParsedImageAgent<T : Any>(
var resultClass: Class<T>? = null,
val exampleInstance: T? = resultClass?.getConstructor()?.newInstance(),
prompt: String = "",
name: String? = resultClass?.simpleName,
model: ChatInterface,
temperature: Double = 0.3,
val validation: Boolean = true,
open val describer: TypeDescriber = object : AbbrevWhitelistYamlDescriber(
"com.simiacryptus", "aicoder.actions"
) {
override val includeMethods: Boolean get() = false
},
) : BaseAgent<List<ImageAndText>, ParsedResponse<T>>(
prompt = prompt,
name = name,
model = model,
temperature = temperature,
) {
init {
requireNotNull(resultClass) {
"Result class is required"
}
}
override fun chatMessages(questions: List<ImageAndText>) = arrayOf(
ModelSchema.ChatMessage(
role = ModelSchema.Role.system,
content = """
$prompt

Response should be in JSON format:
${describer.describe(resultClass!!)}
""".trimIndent().toContentList()
),
ModelSchema.ChatMessage(
role = ModelSchema.Role.user,
content = questions.flatMap { question ->
listOf(
ModelSchema.ContentPart(
text = question.text,
image_url = question.image?.let { "data:image/png;base64,${it.encodeImageToBase64()}" },
)
)
}
)
)
private inner class ParsedResponseImpl(vararg messages: ModelSchema.ChatMessage) :
ParsedResponse<T>(resultClass!!) {
override val text =
response(*messages).choices.firstOrNull()?.message?.content
?: throw RuntimeException("No response")
private val _obj: T by lazy { JsonUtil.fromJson(unwrap(text), resultClass!!) }

private fun unwrap(text: String): String {
val trimmed = text.trim()
return if (trimmed.startsWith("```json") && trimmed.endsWith("```")) {
trimmed.removePrefix("```json").removeSuffix("```").trim()
} else if (trimmed.startsWith("```") && trimmed.endsWith("```")) {
trimmed.removePrefix("```").removeSuffix("```").trim()
} else {
trimmed
}
}

override val obj get() = _obj
}
override fun respond(input: List<ImageAndText>, vararg messages: ModelSchema.ChatMessage): ParsedResponse<T> =
try {
ParsedResponseImpl(*messages)
} catch (e: Exception) {
log.info("Failed to parse response", e)
throw e
}
override fun withModel(model: ChatInterface): ParsedImageAgent<T> = ParsedImageAgent(
resultClass = resultClass,
exampleInstance = exampleInstance,
prompt = prompt,
name = name,
model = model,
temperature = temperature,
validation = validation,
describer = describer,
)
companion object {
private val log = LoggerFactory.getLogger(ParsedImageAgent::class.java)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ class GeminiSdkChatClient(
log(
"<details>\n<summary>Sending request to Gemini SDK for model: ${model.modelName} (${requestID})</summary>\n\n```json\n${
toJson(config)
}\n```\n\n${
}\n```\n\nSystem Prompt:\n```\n${config?.systemInstruction()?.getOrNull()?.toString()?.indent(" ")}\n```\n\n${
contents.joinToString("\n\n") {
it.toMarkdown()
}.indent(" ")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@ package com.simiacryptus.cognotik.chat

import com.google.common.util.concurrent.ListeningScheduledExecutorService
import com.simiacryptus.cognotik.chat.model.ChatModel
import com.simiacryptus.cognotik.chat.model.OpenAIModels
import com.simiacryptus.cognotik.models.APIProvider
import com.simiacryptus.cognotik.models.ModelSchema
import com.simiacryptus.cognotik.models.LLMModel
import com.simiacryptus.cognotik.exceptions.ErrorUtil.checkError
import com.simiacryptus.cognotik.util.JsonUtil
import org.apache.hc.core5.http.HttpRequest
import java.util.concurrent.ExecutorService
import java.util.concurrent.ConcurrentHashMap

class OpenAIChatClient(
apiKey: String,
Expand Down Expand Up @@ -62,6 +64,55 @@ class OpenAIChatClient(
require(model.modelName?.isNotBlank() == true) { "Model name cannot be blank" }
require(chatRequest.model?.isNotBlank() == true) { "Chat request model must be specified" }
}
override fun getModels(): List<ChatModel>? {
modelsCache[apiBase]?.let { return it }
return try {
val modelsResponse = fetchModels()
val models = modelsResponse.mapNotNull { modelInfo ->
val knownModels = OpenAIModels.values.values
.filter { it.modelName == modelInfo.id }
if (knownModels.isNotEmpty()) {
knownModels.first()
} else if (modelInfo.id.startsWith("gpt") || modelInfo.id.startsWith("o1") || modelInfo.id.startsWith("o3")) {
ChatModel(
name = modelInfo.id,
modelName = modelInfo.id,
provider = APIProvider.OpenAI,
maxTotalTokens = 128000,
inputTokenPricePerK = 0.0,
outputTokenPricePerK = 0.0
)
} else {
null
}
}
modelsCache[apiBase] = models
models
} catch (e: Exception) {
log.error("Failed to fetch OpenAI models", e)
null
}
}
private fun fetchModels(): List<OpenAIModelInfo> {
val response = get("${apiBase}/models")
checkError(response)
val listResponse = JsonUtil.objectMapper().readValue(response, OpenAIListModelsResponse::class.java)
return listResponse.data
}
companion object {
private val log = com.simiacryptus.cognotik.util.LoggerFactory.getLogger(OpenAIChatClient::class.java)
private val modelsCache = ConcurrentHashMap<String, List<ChatModel>>()
data class OpenAIModelInfo(
val id: String,
val `object`: String,
val created: Long,
val owned_by: String
)
data class OpenAIListModelsResponse(
val `object`: String,
val data: List<OpenAIModelInfo>
)
}



Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.simiacryptus.cognotik.plan

import com.simiacryptus.cognotik.chat.model.ChatInterface
import com.simiacryptus.cognotik.input.getDocumentReader
import com.simiacryptus.cognotik.input.isDocumentFile
import com.simiacryptus.cognotik.util.FileSelectionUtils
Expand All @@ -26,6 +27,12 @@ abstract class AbstractTask<T : TaskExecutionConfig, U : TaskTypeConfig>(
get() = executionConfig?.task_type
?.let { task_type -> orchestrationConfig.taskSettings.values.firstOrNull { it.task_type == task_type } as? U }

open val defaultChatter: ChatInterface
get() = typeConfig?.model?.let { orchestrationConfig.instance(it) } ?: orchestrationConfig.defaultChatter

open val parsingChatter: ChatInterface
get() = orchestrationConfig.parsingChatter

enum class TaskState {
Pending,
InProgress,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,9 @@ class TaskType<out T : TaskExecutionConfig, out U : TaskTypeConfig>(
}
register(taskType)
}
registerConstructor(GenerateSpriteSheetTask.GenerateSpriteSheet) { settings, task ->
GenerateSpriteSheetTask(settings, task)
}
registerConstructor(FunctorialMappingTask.FunctorialMapping) { settings, task ->
FunctorialMappingTask(settings, task)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class RunCodeTask(
val semaphore = Semaphore(0)
val typeConfig = typeConfig ?: throw RuntimeException()
val model = (typeConfig.model?.let { orchestrationConfig.instance(it) }
?: orchestrationConfig.defaultChatter).getChildClient(task)
?: defaultChatter).getChildClient(task)

// val taskSettings = this.orchestrationConfig.getTaskSettings(TaskType.RunCodeTask)
val taskSettings = typeConfig as? RunCodeTaskTypeConfig
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ class SelfHealingTask(
val task = task.ui.newTask()
task.ui.pool.submit {
val model = (typeConfig.model?.let { orchestrationConfig.instance(it) }
?: orchestrationConfig.defaultChatter).getChildClient(task)
?: defaultChatter).getChildClient(task)
CmdPatchApp(
root = agent.root,
settings = PatchApp.Settings(
Expand Down Expand Up @@ -140,7 +140,7 @@ class SelfHealingTask(
),
files = agent.files,
model = model,
parsingModel = orchestrationConfig.parsingChatter,
parsingModel = parsingChatter,
processor = orchestrationConfig.processor,
).also { app ->
markdownTranscript?.let { transcript ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ class SubPlanningTask(
// Use an agent to create a summary
val typeConfig = typeConfig ?: throw RuntimeException()
val model = (typeConfig.model?.let { orchestrationConfig.instance(it) }
?: orchestrationConfig.defaultChatter).getChildClient(task)
?: defaultChatter).getChildClient(task)

val summaryAgent = ChatAgent(
prompt = """
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class AnalysisTask(
Ensure the information is accurate, up-to-date, and well-organized to facilitate easy understanding.
""".trimIndent(),
model = (typeConfig.model?.let<ApiChatModel, ChatInterface> { this.orchestrationConfig.instance(it) }
?: this.orchestrationConfig.defaultChatter).getChildClient(task),
?: defaultChatter).getChildClient(task),
temperature = this.orchestrationConfig.temperature,
)
val inquiryResult = if (orchestrationConfig.autoFix || typeConfig.non_interactive) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,8 @@ DataIngest - Parse unstructured logs/text into structured data
log("## Phase 2: Pattern Discovery")
val registry = mutableListOf<PatternRegistryItem>()
val unparsedSample = sampleLines.toMutableList()
val parsingChatter = orchestrationConfig.parsingChatter.getChildClient(task)
val defaultChatter = orchestrationConfig.defaultChatter.getChildClient(task)
val parsingChatter = parsingChatter.getChildClient(task)
val defaultChatter = defaultChatter.getChildClient(task)

val discoveryTask = task.ui.newTask(false)
tabs["Discovery"] = discoveryTask.placeholder
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ ${
task.ui.pool.submit {
val chatInterface =
(typeConfig.model?.let<ApiChatModel, ChatInterface> { this.orchestrationConfig.instance(it) }
?: this.orchestrationConfig.defaultChatter).getChildClient(task)
?: this.defaultChatter).getChildClient(task)
val chatAgent = ChatAgent(
name = "FileModification",
prompt = """
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ class GeneratePresentationTask(
val newTask = task.ui.newTask(false)
val toInput = { it: String -> listOf(it) }
val ui = task.ui
val api = orchestrationConfig.defaultChatter
val api = defaultChatter

newTask.add(MarkdownUtil.renderMarkdown("## Creating Presentation: `$htmlFile`", ui = ui))

Expand Down
Loading
Loading