|
| 1 | +# ============================================================================ |
| 2 | +# Dream Server Windows -- OpenCode config helpers |
| 3 | +# ============================================================================ |
| 4 | +# Part of: installers/windows/lib/ |
| 5 | +# Purpose: Create or update OpenCode config from the active local LLM settings. |
| 6 | +# Requires: constants.ps1 and llm-endpoint.ps1 sourced first. |
| 7 | +# ============================================================================ |
| 8 | + |
| 9 | +function Set-OpenCodeObjectProperty { |
| 10 | + param( |
| 11 | + [Parameter(Mandatory = $true)] |
| 12 | + [object]$Target, |
| 13 | + [Parameter(Mandatory = $true)] |
| 14 | + [string]$Name, |
| 15 | + $Value |
| 16 | + ) |
| 17 | + |
| 18 | + $property = $Target.PSObject.Properties[$Name] |
| 19 | + if ($property) { |
| 20 | + $property.Value = $Value |
| 21 | + } else { |
| 22 | + $Target | Add-Member -NotePropertyName $Name -NotePropertyValue $Value |
| 23 | + } |
| 24 | +} |
| 25 | + |
| 26 | +function New-WindowsOpenCodeConfigObject { |
| 27 | + param( |
| 28 | + [hashtable]$LlmEndpoint, |
| 29 | + [string]$ModelId, |
| 30 | + [string]$ModelName, |
| 31 | + [int]$ContextLimit |
| 32 | + ) |
| 33 | + |
| 34 | + return [pscustomobject]@{ |
| 35 | + '$schema' = "https://opencode.ai/config.json" |
| 36 | + model = "llama-server/$ModelId" |
| 37 | + small_model = "llama-server/$ModelId" |
| 38 | + provider = [pscustomobject]@{ |
| 39 | + 'llama-server' = [pscustomobject]@{ |
| 40 | + npm = "@ai-sdk/openai-compatible" |
| 41 | + name = "llama-server (local)" |
| 42 | + options = [pscustomobject]@{ |
| 43 | + baseURL = $LlmEndpoint.BaseUrl |
| 44 | + apiKey = "no-key" |
| 45 | + } |
| 46 | + models = [pscustomobject]@{ |
| 47 | + $ModelId = [pscustomobject]@{ |
| 48 | + name = $ModelName |
| 49 | + limit = [pscustomobject]@{ |
| 50 | + context = $ContextLimit |
| 51 | + output = 32768 |
| 52 | + } |
| 53 | + } |
| 54 | + } |
| 55 | + } |
| 56 | + } |
| 57 | + } |
| 58 | +} |
| 59 | + |
| 60 | +function Update-WindowsOpenCodeConfigObject { |
| 61 | + param( |
| 62 | + [object]$Config, |
| 63 | + [hashtable]$LlmEndpoint, |
| 64 | + [string]$ModelId, |
| 65 | + [string]$ModelName, |
| 66 | + [int]$ContextLimit |
| 67 | + ) |
| 68 | + |
| 69 | + if ($null -eq $Config) { |
| 70 | + return New-WindowsOpenCodeConfigObject -LlmEndpoint $LlmEndpoint -ModelId $ModelId -ModelName $ModelName -ContextLimit $ContextLimit |
| 71 | + } |
| 72 | + |
| 73 | + Set-OpenCodeObjectProperty -Target $Config -Name '$schema' -Value "https://opencode.ai/config.json" |
| 74 | + Set-OpenCodeObjectProperty -Target $Config -Name 'model' -Value "llama-server/$ModelId" |
| 75 | + Set-OpenCodeObjectProperty -Target $Config -Name 'small_model' -Value "llama-server/$ModelId" |
| 76 | + |
| 77 | + if (-not $Config.PSObject.Properties['provider'] -or $null -eq $Config.provider) { |
| 78 | + Set-OpenCodeObjectProperty -Target $Config -Name 'provider' -Value ([pscustomobject]@{}) |
| 79 | + } |
| 80 | + $provider = $Config.provider |
| 81 | + |
| 82 | + if (-not $provider.PSObject.Properties['llama-server'] -or $null -eq $provider.'llama-server') { |
| 83 | + Set-OpenCodeObjectProperty -Target $provider -Name 'llama-server' -Value ([pscustomobject]@{}) |
| 84 | + } |
| 85 | + $llamaProvider = $provider.'llama-server' |
| 86 | + |
| 87 | + Set-OpenCodeObjectProperty -Target $llamaProvider -Name 'npm' -Value "@ai-sdk/openai-compatible" |
| 88 | + Set-OpenCodeObjectProperty -Target $llamaProvider -Name 'name' -Value "llama-server (local)" |
| 89 | + |
| 90 | + if (-not $llamaProvider.PSObject.Properties['options'] -or $null -eq $llamaProvider.options) { |
| 91 | + Set-OpenCodeObjectProperty -Target $llamaProvider -Name 'options' -Value ([pscustomobject]@{}) |
| 92 | + } |
| 93 | + Set-OpenCodeObjectProperty -Target $llamaProvider.options -Name 'baseURL' -Value $LlmEndpoint.BaseUrl |
| 94 | + Set-OpenCodeObjectProperty -Target $llamaProvider.options -Name 'apiKey' -Value "no-key" |
| 95 | + |
| 96 | + if (-not $llamaProvider.PSObject.Properties['models'] -or $null -eq $llamaProvider.models) { |
| 97 | + Set-OpenCodeObjectProperty -Target $llamaProvider -Name 'models' -Value ([pscustomobject]@{}) |
| 98 | + } |
| 99 | + $models = $llamaProvider.models |
| 100 | + |
| 101 | + if (-not $models.PSObject.Properties[$ModelId] -or $null -eq $models.PSObject.Properties[$ModelId].Value) { |
| 102 | + Set-OpenCodeObjectProperty -Target $models -Name $ModelId -Value ([pscustomobject]@{}) |
| 103 | + } |
| 104 | + $modelEntry = $models.PSObject.Properties[$ModelId].Value |
| 105 | + Set-OpenCodeObjectProperty -Target $modelEntry -Name 'name' -Value $ModelName |
| 106 | + |
| 107 | + if (-not $modelEntry.PSObject.Properties['limit'] -or $null -eq $modelEntry.limit) { |
| 108 | + Set-OpenCodeObjectProperty -Target $modelEntry -Name 'limit' -Value ([pscustomobject]@{}) |
| 109 | + } |
| 110 | + Set-OpenCodeObjectProperty -Target $modelEntry.limit -Name 'context' -Value $ContextLimit |
| 111 | + Set-OpenCodeObjectProperty -Target $modelEntry.limit -Name 'output' -Value 32768 |
| 112 | + |
| 113 | + return $Config |
| 114 | +} |
| 115 | + |
| 116 | +function Sync-WindowsOpenCodeConfig { |
| 117 | + param( |
| 118 | + [hashtable]$LlmEndpoint, |
| 119 | + [string]$ModelId, |
| 120 | + [string]$ModelName, |
| 121 | + [int]$ContextLimit, |
| 122 | + [string]$ConfigDir = $script:OPENCODE_CONFIG_DIR, |
| 123 | + [switch]$SkipIfUnavailable |
| 124 | + ) |
| 125 | + |
| 126 | + $_ocConfigFile = Join-Path $ConfigDir "opencode.json" |
| 127 | + $_ocCompatConfigFile = Join-Path $ConfigDir "config.json" |
| 128 | + |
| 129 | + if ($SkipIfUnavailable -and ` |
| 130 | + -not (Test-Path $script:OPENCODE_EXE) -and ` |
| 131 | + -not (Test-Path $_ocConfigFile) -and ` |
| 132 | + -not (Test-Path $_ocCompatConfigFile)) { |
| 133 | + return @{ |
| 134 | + Status = "skipped" |
| 135 | + ConfigPath = $_ocConfigFile |
| 136 | + CompatConfigPath = $_ocCompatConfigFile |
| 137 | + ModelId = $ModelId |
| 138 | + ModelName = $ModelName |
| 139 | + BaseUrl = $LlmEndpoint.BaseUrl |
| 140 | + } |
| 141 | + } |
| 142 | + |
| 143 | + New-Item -ItemType Directory -Path $ConfigDir -Force | Out-Null |
| 144 | + |
| 145 | + $_existingConfigFile = @($_ocConfigFile, $_ocCompatConfigFile) | |
| 146 | + Where-Object { Test-Path $_ } | |
| 147 | + Select-Object -First 1 |
| 148 | + |
| 149 | + $_configObject = $null |
| 150 | + $_configStatus = "created" |
| 151 | + |
| 152 | + if ($_existingConfigFile) { |
| 153 | + try { |
| 154 | + $_configObject = Get-Content $_existingConfigFile -Raw | ConvertFrom-Json -ErrorAction Stop |
| 155 | + $_configStatus = "updated" |
| 156 | + } catch { |
| 157 | + $_configStatus = "regenerated" |
| 158 | + } |
| 159 | + } |
| 160 | + |
| 161 | + $_configObject = Update-WindowsOpenCodeConfigObject ` |
| 162 | + -Config $_configObject ` |
| 163 | + -LlmEndpoint $LlmEndpoint ` |
| 164 | + -ModelId $ModelId ` |
| 165 | + -ModelName $ModelName ` |
| 166 | + -ContextLimit $ContextLimit |
| 167 | + |
| 168 | + $_configJson = $_configObject | ConvertTo-Json -Depth 12 |
| 169 | + $_utf8NoBom = New-Object System.Text.UTF8Encoding($false) |
| 170 | + [System.IO.File]::WriteAllText($_ocConfigFile, $_configJson, $_utf8NoBom) |
| 171 | + [System.IO.File]::WriteAllText($_ocCompatConfigFile, $_configJson, $_utf8NoBom) |
| 172 | + |
| 173 | + return @{ |
| 174 | + Status = $_configStatus |
| 175 | + ConfigPath = $_ocConfigFile |
| 176 | + CompatConfigPath = $_ocCompatConfigFile |
| 177 | + ModelId = $ModelId |
| 178 | + ModelName = $ModelName |
| 179 | + BaseUrl = $LlmEndpoint.BaseUrl |
| 180 | + } |
| 181 | +} |
| 182 | + |
| 183 | +function Sync-WindowsOpenCodeConfigFromEnv { |
| 184 | + param( |
| 185 | + [string]$InstallDir = $script:DS_INSTALL_DIR, |
| 186 | + [string]$ConfigDir = $script:OPENCODE_CONFIG_DIR, |
| 187 | + [string]$GpuBackend = "", |
| 188 | + [string]$NativeBackend = "", |
| 189 | + [switch]$UseLemonade, |
| 190 | + [switch]$CloudMode, |
| 191 | + [string]$DefaultModelId = "", |
| 192 | + [string]$DefaultModelName = "", |
| 193 | + [int]$DefaultContextLimit = 16384, |
| 194 | + [switch]$SkipIfUnavailable |
| 195 | + ) |
| 196 | + |
| 197 | + $_envMap = Get-WindowsDreamEnvMap -InstallDir $InstallDir |
| 198 | + $_llmEndpoint = Get-WindowsLocalLlmEndpoint -InstallDir $InstallDir -EnvMap $_envMap ` |
| 199 | + -GpuBackend $GpuBackend -NativeBackend $NativeBackend ` |
| 200 | + -UseLemonade:$UseLemonade -CloudMode:$CloudMode |
| 201 | + $_modelId = Get-WindowsDreamEnvValue -EnvMap $_envMap -Keys @("GGUF_FILE") -Default $DefaultModelId |
| 202 | + $_modelName = Get-WindowsDreamEnvValue -EnvMap $_envMap -Keys @("LLM_MODEL") -Default $DefaultModelName |
| 203 | + $_contextRaw = Get-WindowsDreamEnvValue -EnvMap $_envMap -Keys @("MAX_CONTEXT", "CTX_SIZE") -Default "$DefaultContextLimit" |
| 204 | + $_contextLimit = 0 |
| 205 | + if (-not [int]::TryParse($_contextRaw, [ref]$_contextLimit)) { |
| 206 | + $_contextLimit = $DefaultContextLimit |
| 207 | + } |
| 208 | + |
| 209 | + return Sync-WindowsOpenCodeConfig ` |
| 210 | + -LlmEndpoint $_llmEndpoint ` |
| 211 | + -ModelId $_modelId ` |
| 212 | + -ModelName $_modelName ` |
| 213 | + -ContextLimit $_contextLimit ` |
| 214 | + -ConfigDir $ConfigDir ` |
| 215 | + -SkipIfUnavailable:$SkipIfUnavailable |
| 216 | +} |
0 commit comments