Skip to content

feat: Add OpenAI reasoning support and new model configs #1818

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 45 additions & 0 deletions src/renderer/components/ReasoningEffortSelect.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import { useEffect } from 'react'
import { Select, MenuItem, Typography, Box } from '@mui/material'
import { useTranslation } from 'react-i18next'

export interface Props {
value: string
onChange: (value: string) => void
className?: string
}

export default function ReasoningEffortSelect(props: Props) {
const { t } = useTranslation()

useEffect(() => {
if (!props.value) {
props.onChange('medium')
}
}, [])

const handleChange = (event: any) => {
props.onChange(event.target.value as string)
}

return (
<Box sx={{ margin: '10px' }} className={props.className}>
<Box>
<Typography id="reasoning-effort-select" gutterBottom>
{t('Reasoning Effort')}
</Typography>
</Box>
<Box sx={{ display: 'flex', alignItems: 'center' }}>
<Select
value={props.value || 'medium'}
onChange={handleChange}
fullWidth
size="small"
>
<MenuItem value="low">{t('Low')}</MenuItem>
<MenuItem value="medium">{t('Medium')}</MenuItem>
<MenuItem value="high">{t('High')}</MenuItem>
</Select>
</Box>
</Box>
)
}
43 changes: 39 additions & 4 deletions src/renderer/packages/models/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ interface Options {
apiPath?: string
model: Model | 'custom-model'
openaiCustomModel?: string
openaiReasoningEffort: string
temperature: number
topP: number
}
Expand Down Expand Up @@ -48,10 +49,25 @@ export default class OpenAI extends Base {

rawMessages = injectModelSystemPrompt(model, rawMessages)

if (model.startsWith('o1')) {
const messages = await populateO1Message(rawMessages)
return this.requestChatCompletionsNotStream({ model, messages }, signal, onResultChange)
// o1-mini and o1-preview does not support reasoning unlike o1 relase
if (model.startsWith('o1-mini') || model.startsWith('o1-preview')) {
const messages = await populateReasoningMessage(rawMessages)
return this.requestChatCompletionsNotStream({
model,
messages,
}, signal, onResultChange)
}

// https://platform.openai.com/docs/guides/reasoning
if (model.startsWith('o')) {
const messages = await populateReasoningMessage(rawMessages)
return this.requestChatCompletionsNotStream({
model,
messages,
reasoning_effort: this.options.openaiReasoningEffort,
}, signal, onResultChange)
}

const messages = await populateGPTMessage(rawMessages)
return this.requestChatCompletionsStream({
messages,
Expand Down Expand Up @@ -184,6 +200,15 @@ export const openaiModelConfigs = {
maxContextTokens: 128_000,
},

// https://platform.openai.com/docs/models#o1
'o1': {
maxTokens: 100_000,
maxContextTokens: 200_000,
},
'o1-2024-12-17': {
maxTokens: 100_000,
maxContextTokens: 200_000,
},
'o1-preview': {
maxTokens: 32_768,
maxContextTokens: 128_000,
Expand All @@ -201,6 +226,16 @@ export const openaiModelConfigs = {
maxContextTokens: 128_000,
},

// https://platform.openai.com/docs/models#o3-mini
'o3-mini': {
maxTokens: 100_000,
maxContextTokens: 200_000,
},
'o3-mini-2025-01-31': {
maxTokens: 100_000,
maxContextTokens: 200_000,
},

'gpt-4': {
maxTokens: 4_096,
maxContextTokens: 8_192,
Expand Down Expand Up @@ -267,7 +302,7 @@ export async function populateGPTMessage(rawMessages: Message[]): Promise<OpenAI
return messages
}

export async function populateO1Message(rawMessages: Message[]): Promise<OpenAIMessage[]> {
export async function populateReasoningMessage(rawMessages: Message[]): Promise<OpenAIMessage[]> {
const messages: OpenAIMessage[] = rawMessages.map((m) => ({
role: m.role === 'system' ? 'user' : m.role,
content: m.content,
Expand Down
34 changes: 26 additions & 8 deletions src/renderer/pages/SettingDialog/OpenAISetting.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { Typography, Box } from '@mui/material'
import { ModelSettings } from '../../../shared/types'
import { useTranslation } from 'react-i18next'
import { Accordion, AccordionSummary, AccordionDetails } from '../../components/Accordion'
import ReasoningEffortSelect from '../../components/ReasoningEffortSelect'
import TemperatureSlider from '../../components/TemperatureSlider'
import TopPSlider from '../../components/TopPSlider'
import PasswordTextField from '../../components/PasswordTextField'
Expand All @@ -17,6 +18,11 @@ interface ModelConfigProps {
export default function OpenAISetting(props: ModelConfigProps) {
const { settingsEdit, setSettingsEdit } = props
const { t } = useTranslation()
const model = settingsEdit.model;
const isReasoningModel = model?.startsWith('o') &&
!model?.startsWith('o1-preview') &&
!model?.startsWith('o1-mini');

return (
<Box>
<PasswordTextField
Expand Down Expand Up @@ -61,14 +67,26 @@ export default function OpenAISetting(props: ModelConfigProps) {
}
/>

<TemperatureSlider
value={settingsEdit.temperature}
onChange={(value) => setSettingsEdit({ ...settingsEdit, temperature: value })}
/>
<TopPSlider
topP={settingsEdit.topP}
setTopP={(v) => setSettingsEdit({ ...settingsEdit, topP: v })}
/>
{isReasoningModel && (
<ReasoningEffortSelect
value={settingsEdit.openaiReasoningEffort}
onChange={(value) => setSettingsEdit({ ...settingsEdit, openaiReasoningEffort: value })}
/>
)}

{!model?.startsWith('o') && (
<>
<TemperatureSlider
value={settingsEdit.temperature}
onChange={(value) => setSettingsEdit({ ...settingsEdit, temperature: value })}
/>
<TopPSlider
topP={settingsEdit.topP}
setTopP={(v) => setSettingsEdit({ ...settingsEdit, topP: v })}
/>
</>
)}

<MaxContextMessageCountSlider
value={settingsEdit.openaiMaxContextMessageCount}
onChange={(v) => setSettingsEdit({ ...settingsEdit, openaiMaxContextMessageCount: v })}
Expand Down
1 change: 1 addition & 0 deletions src/shared/defaults.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ export function settings(): Settings {
siliconCloudModel: 'THUDM/glm-4-9b-chat',

autoGenerateTitle: true,
openaiReasoningEffort: 'medium',
}
}

Expand Down
1 change: 1 addition & 0 deletions src/shared/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ export interface ModelSettings {
temperature: number
topP: number
openaiMaxContextMessageCount: number
openaiReasoningEffort: string
}

export interface Settings extends ModelSettings {
Expand Down