Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: ModelPicker not snapshotted and refactor ModelPicker #1122

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions webview-ui/src/__mocks__/lucide-react.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
import React from "react"

export const Check = () => React.createElement("div")
export const ChevronsUpDown = () => React.createElement("div")
export const Loader = () => React.createElement("div")
export const X = () => React.createElement("div")
190 changes: 165 additions & 25 deletions webview-ui/src/components/settings/ApiOptions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,17 +36,13 @@ import {
import { ExtensionMessage } from "../../../../src/shared/ExtensionMessage"
import { vscode } from "../../utils/vscode"
import VSCodeButtonLink from "../common/VSCodeButtonLink"
import { OpenRouterModelPicker } from "./OpenRouterModelPicker"
import OpenAiModelPicker from "./OpenAiModelPicker"
import { GlamaModelPicker } from "./GlamaModelPicker"
import { UnboundModelPicker } from "./UnboundModelPicker"
import { ModelInfoView } from "./ModelInfoView"
import { DROPDOWN_Z_INDEX } from "./styles"
import { RequestyModelPicker } from "./RequestyModelPicker"
import { ModelPicker } from "./ModelPicker"

interface ApiOptionsProps {
uriScheme: string | undefined
apiConfiguration: ApiConfiguration | undefined
apiConfiguration: ApiConfiguration
setApiConfigurationField: <K extends keyof ApiConfiguration>(field: K, value: ApiConfiguration[K]) => void
apiErrorMessage?: string
modelIdErrorMessage?: string
Expand All @@ -64,6 +60,20 @@ const ApiOptions = ({
const [ollamaModels, setOllamaModels] = useState<string[]>([])
const [lmStudioModels, setLmStudioModels] = useState<string[]>([])
const [vsCodeLmModels, setVsCodeLmModels] = useState<vscodemodels.LanguageModelChatSelector[]>([])
const [openRouterModels, setOpenRouterModels] = useState<Record<string, ModelInfo> | null>({
[openRouterDefaultModelId]: openRouterDefaultModelInfo,
})
const [glamaModels, setGlamaModels] = useState<Record<string, ModelInfo> | null>({
[glamaDefaultModelId]: glamaDefaultModelInfo,
})
const [unboundModels, setUnboundModels] = useState<Record<string, ModelInfo> | null>({
[unboundDefaultModelId]: unboundDefaultModelInfo,
})
const [requestyModels, setRequestyModels] = useState<Record<string, ModelInfo> | null>({
[requestyDefaultModelId]: requestyDefaultModelInfo,
})
const [openAiModels, setOpenAiModels] = useState<Record<string, ModelInfo> | null>(null)

const [anthropicBaseUrlSelected, setAnthropicBaseUrlSelected] = useState(!!apiConfiguration?.anthropicBaseUrl)
const [azureApiVersionSelected, setAzureApiVersionSelected] = useState(!!apiConfiguration?.azureApiVersion)
const [openRouterBaseUrlSelected, setOpenRouterBaseUrlSelected] = useState(!!apiConfiguration?.openRouterBaseUrl)
Expand Down Expand Up @@ -98,22 +108,92 @@ const ApiOptions = ({
vscode.postMessage({ type: "requestLmStudioModels", text: apiConfiguration?.lmStudioBaseUrl })
} else if (selectedProvider === "vscode-lm") {
vscode.postMessage({ type: "requestVsCodeLmModels" })
} else if (selectedProvider === "openai") {
vscode.postMessage({
type: "refreshOpenAiModels",
values: {
baseUrl: apiConfiguration?.openAiBaseUrl,
apiKey: apiConfiguration?.openAiApiKey,
},
})
} else if (selectedProvider === "openrouter") {
vscode.postMessage({ type: "refreshOpenRouterModels", values: {} })
} else if (selectedProvider === "glama") {
vscode.postMessage({ type: "refreshGlamaModels", values: {} })
} else if (selectedProvider === "requesty") {
vscode.postMessage({
type: "refreshRequestyModels",
values: {
apiKey: apiConfiguration?.requestyApiKey,
},
})
}
},
250,
[selectedProvider, apiConfiguration?.ollamaBaseUrl, apiConfiguration?.lmStudioBaseUrl],
[
selectedProvider,
apiConfiguration?.ollamaBaseUrl,
apiConfiguration?.lmStudioBaseUrl,
apiConfiguration?.openAiBaseUrl,
apiConfiguration?.openAiApiKey,
apiConfiguration?.requestyApiKey,
],
)
const handleMessage = useCallback((event: MessageEvent) => {
const message: ExtensionMessage = event.data
if (message.type === "ollamaModels" && Array.isArray(message.ollamaModels)) {
const newModels = message.ollamaModels
setOllamaModels(newModels)
} else if (message.type === "lmStudioModels" && Array.isArray(message.lmStudioModels)) {
const newModels = message.lmStudioModels
setLmStudioModels(newModels)
} else if (message.type === "vsCodeLmModels" && Array.isArray(message.vsCodeLmModels)) {
const newModels = message.vsCodeLmModels
setVsCodeLmModels(newModels)
switch (message.type) {
case "ollamaModels":
{
const newModels = message.ollamaModels ?? []
setOllamaModels(newModels)
}
break
case "lmStudioModels":
{
const newModels = message.lmStudioModels ?? []
setLmStudioModels(newModels)
}
break
case "vsCodeLmModels":
{
const newModels = message.vsCodeLmModels ?? []
setVsCodeLmModels(newModels)
}
break
case "glamaModels": {
const updatedModels = message.glamaModels ?? {}
setGlamaModels({
[glamaDefaultModelId]: glamaDefaultModelInfo, // in case the extension sent a model list without the default model
...updatedModels,
})
break
}
case "openRouterModels": {
const updatedModels = message.openRouterModels ?? {}
setOpenRouterModels({
[openRouterDefaultModelId]: openRouterDefaultModelInfo, // in case the extension sent a model list without the default model
...updatedModels,
})
break
}
case "openAiModels": {
const updatedModels = message.openAiModels ?? []
setOpenAiModels(Object.fromEntries(updatedModels.map((item) => [item, openAiModelInfoSaneDefaults])))
break
}
case "unboundModels": {
const updatedModels = message.unboundModels ?? {}
setUnboundModels(updatedModels)
break
}
case "requestyModels": {
const updatedModels = message.requestyModels ?? {}
setRequestyModels({
[requestyDefaultModelId]: requestyDefaultModelInfo, // in case the extension sent a model list without the default model
...updatedModels,
})
break
}
}
}, [])
useEvent("message", handleMessage)
Expand Down Expand Up @@ -604,7 +684,17 @@ const ApiOptions = ({
placeholder="Enter API Key...">
<span style={{ fontWeight: 500 }}>API Key</span>
</VSCodeTextField>
<OpenAiModelPicker />
<ModelPicker
apiConfiguration={apiConfiguration}
modelIdKey="openAiModelId"
modelInfoKey="openAiCustomModelInfo"
serviceName="OpenAI"
serviceUrl="https://platform.openai.com"
recommendedModel="gpt-4-turbo-preview"
models={openAiModels}
setApiConfigurationField={setApiConfigurationField}
defaultModelInfo={openAiModelInfoSaneDefaults}
/>
<div style={{ display: "flex", alignItems: "center" }}>
<Checkbox
checked={apiConfiguration?.openAiStreamingEnabled ?? true}
Expand Down Expand Up @@ -691,7 +781,7 @@ const ApiOptions = ({
})(),
}}
title="Maximum number of tokens the model can generate in a single response"
onChange={handleInputChange("openAiCustomModelInfo", (e) => {
onInput={handleInputChange("openAiCustomModelInfo", (e) => {
const value = parseInt((e.target as HTMLInputElement).value)
return {
...(apiConfiguration?.openAiCustomModelInfo ||
Expand Down Expand Up @@ -738,7 +828,7 @@ const ApiOptions = ({
})(),
}}
title="Total number of tokens (input + output) the model can process in a single request"
onChange={handleInputChange("openAiCustomModelInfo", (e) => {
onInput={handleInputChange("openAiCustomModelInfo", (e) => {
const value = (e.target as HTMLInputElement).value
const parsed = parseInt(value)
return {
Expand Down Expand Up @@ -884,7 +974,7 @@ const ApiOptions = ({
: "var(--vscode-errorForeground)"
})(),
}}
onChange={handleInputChange("openAiCustomModelInfo", (e) => {
onInput={handleInputChange("openAiCustomModelInfo", (e) => {
const value = (e.target as HTMLInputElement).value
const parsed = parseInt(value)
return {
Expand Down Expand Up @@ -929,7 +1019,7 @@ const ApiOptions = ({
: "var(--vscode-errorForeground)"
})(),
}}
onChange={handleInputChange("openAiCustomModelInfo", (e) => {
onInput={handleInputChange("openAiCustomModelInfo", (e) => {
const value = (e.target as HTMLInputElement).value
const parsed = parseInt(value)
return {
Expand Down Expand Up @@ -1207,7 +1297,18 @@ const ApiOptions = ({
}}>
This key is stored locally and only used to make API requests from this extension.
</p>
<UnboundModelPicker />
<ModelPicker
apiConfiguration={apiConfiguration}
defaultModelId={unboundDefaultModelId}
defaultModelInfo={unboundDefaultModelInfo}
models={unboundModels}
modelInfoKey="unboundModelInfo"
modelIdKey="unboundModelId"
serviceName="Unbound"
serviceUrl="https://api.getunbound.ai/models"
recommendedModel={unboundDefaultModelId}
setApiConfigurationField={setApiConfigurationField}
/>
</div>
)}

Expand All @@ -1223,10 +1324,49 @@ const ApiOptions = ({
</p>
)}

{selectedProvider === "glama" && <GlamaModelPicker />}
{selectedProvider === "glama" && (
<ModelPicker
apiConfiguration={apiConfiguration ?? {}}
defaultModelId={glamaDefaultModelId}
defaultModelInfo={glamaDefaultModelInfo}
models={glamaModels}
modelInfoKey="glamaModelInfo"
modelIdKey="glamaModelId"
serviceName="Glama"
serviceUrl="https://glama.ai/models"
recommendedModel="anthropic/claude-3-5-sonnet"
setApiConfigurationField={setApiConfigurationField}
/>
)}

{selectedProvider === "openrouter" && <OpenRouterModelPicker />}
{selectedProvider === "requesty" && <RequestyModelPicker />}
{selectedProvider === "openrouter" && (
<ModelPicker
apiConfiguration={apiConfiguration}
setApiConfigurationField={setApiConfigurationField}
defaultModelId={openRouterDefaultModelId}
defaultModelInfo={openRouterDefaultModelInfo}
models={openRouterModels}
modelIdKey="openRouterModelId"
modelInfoKey="openRouterModelInfo"
serviceName="OpenRouter"
serviceUrl="https://openrouter.ai/models"
recommendedModel="anthropic/claude-3.5-sonnet:beta"
/>
)}
{selectedProvider === "requesty" && (
<ModelPicker
apiConfiguration={apiConfiguration}
setApiConfigurationField={setApiConfigurationField}
defaultModelId={requestyDefaultModelId}
defaultModelInfo={requestyDefaultModelInfo}
models={requestyModels}
modelIdKey="requestyModelId"
modelInfoKey="requestyModelInfo"
serviceName="Requesty"
serviceUrl="https://requesty.ai"
recommendedModel="anthropic/claude-3-5-sonnet-latest"
/>
)}

{selectedProvider !== "glama" &&
selectedProvider !== "openrouter" &&
Expand Down
15 changes: 0 additions & 15 deletions webview-ui/src/components/settings/GlamaModelPicker.tsx

This file was deleted.

Loading