Skip to content

Commit 62a40da

Browse files
Merge pull request #25 from antropia-studio/fix-lmstudio
Fix LM Studio integration
2 parents 7cdb4f3 + ec9e115 commit 62a40da

8 files changed

Lines changed: 289 additions & 349 deletions

File tree

npm-shrinkwrap.json

Lines changed: 231 additions & 313 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,42 +20,40 @@
2020
"ai": "^4.3.16",
2121
"color-json": "^3.0.5",
2222
"configstore": "^7",
23-
"espree": "^10.3.0",
2423
"gettext-parser": "^8",
2524
"ink": "^5.2.1",
2625
"npm-check-updates": "^18.0.1",
2726
"ollama-ai-provider": "^1.2.0",
2827
"react": "^18.3.1",
2928
"ts-deepmerge": "^7.0.3",
30-
"zod": "^3.25.32"
29+
"zod": "^3.25.48"
3130
},
3231
"devDependencies": {
3332
"@commitlint/config-conventional": "^19.8.1",
3433
"@eslint/eslintrc": "^3.3.1",
35-
"@eslint/js": "^9.27.0",
34+
"@eslint/js": "^9.28.0",
3635
"@oclif/prettier-config": "^0.2.1",
3736
"@oclif/test": "^4.1.13",
3837
"@release-it/conventional-changelog": "^10.0.1",
3938
"@types/chai": "^5.2.2",
4039
"@types/configstore": "^6.0.2",
4140
"@types/espree": "^10.1.0",
4241
"@types/gettext-parser": "^8.0.0",
43-
"@types/node": "^22.15.23",
42+
"@types/node": "^22.15.29",
4443
"@types/react": "^19.1.6",
4544
"chai": "^5.2.0",
4645
"commitlint": "^19.8.1",
47-
"eslint": "^9.27.0",
48-
"eslint-config-oclif": "^6.0.62",
46+
"eslint": "^9.28.0",
47+
"eslint-config-oclif": "^6.0.65",
4948
"eslint-config-prettier": "^10.1.5",
5049
"eslint-plugin-chai-friendly": "^1.0.1",
5150
"eslint-plugin-perfectionist": "^4.13.0",
52-
"eslint-plugin-prettier": "^5.4.0",
51+
"eslint-plugin-prettier": "^5.4.1",
5352
"lefthook": "^1.11.13",
5453
"oclif": "^4.17.46",
5554
"prettier": "^3.5.3",
56-
"release-it": "^19.0.2",
55+
"release-it": "^19.0.3",
5756
"shx": "^0.4.0",
58-
"ts-morph": "^26.0.0",
5957
"tsc-alias": "^1.8.16",
6058
"tsx": "^4.19.4",
6159
"typescript": "^5.8.3",

src/lib/llm/llm.ts

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -41,13 +41,7 @@ export class Llm {
4141

4242
async translateOne(missingTranslation: MissingTranslation): Promise<FilledTranslation> {
4343
const llmService = await this.getService()
44-
const provider = await llmService.getProvider()
45-
const availableModelIds = await llmService.getAvailableModelIds()
46-
47-
invariant(availableModelIds.length > 0, 'llm:no_models_found')
48-
49-
const modelId = availableModelIds[0]
50-
const model = provider(modelId, {structuredOutputs: true})
44+
const model = await llmService.getModel()
5145

5246
const fileContents = await fs.readFile(missingTranslation.reference.filePath, 'utf-8')
5347

src/lib/llm/services/claude.ts

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,12 @@ import {AnthropicProvider, createAnthropic} from '@ai-sdk/anthropic'
22

33
import {invariant} from '@/lib/command/invariant.js'
44
import {nonEmptyStringOrUndefined} from '@/lib/common/string.js'
5-
import {Config} from '@/lib/common/types.js'
65
import {Defaults} from '@/lib/llm/defaults.js'
76
import {LlmProvider, LlmService} from '@/lib/llm/services/llm-service.js'
87

9-
export class Claude implements LlmService {
8+
export class Claude extends LlmService {
109
private provider!: AnthropicProvider
1110

12-
constructor(private config: Config) {}
13-
1411
async getAvailableModelIds(): Promise<string[]> {
1512
invariant(this.config.llmSettings?.provider === 'claude', 'internal_error')
1613

Lines changed: 20 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,33 @@
11
import {AnthropicProvider} from '@ai-sdk/anthropic'
22
import {OpenAIProvider} from '@ai-sdk/openai'
33
import {OpenAICompatibleProvider} from '@ai-sdk/openai-compatible'
4+
import {LanguageModelV1} from 'ai'
45
import {OllamaProvider} from 'ollama-ai-provider'
56

7+
import {invariant} from '@/lib/command/invariant'
8+
import {Config} from '@/lib/common/types'
9+
610
export type LlmProvider = AnthropicProvider | OllamaProvider | OpenAICompatibleProvider | OpenAIProvider
711

812
/**
913
* Interface representing the base service for language model operations.
1014
* This class must be extended to define specific implementations for interacting
1115
* with language models.
1216
*/
13-
export interface LlmService {
14-
getAvailableModelIds(): Promise<string[]>
15-
getProvider(): Promise<LlmProvider>
17+
export abstract class LlmService {
18+
constructor(protected config: Config) {}
19+
20+
async getModel(): Promise<LanguageModelV1> {
21+
const provider = await this.getProvider()
22+
const availableModelIds = await this.getAvailableModelIds()
23+
24+
invariant(availableModelIds.length > 0, 'llm:no_models_found')
25+
26+
const modelId = availableModelIds[0]
27+
return provider(modelId, {structuredOutputs: true})
28+
}
29+
30+
protected abstract getAvailableModelIds(): Promise<string[]>
31+
32+
protected abstract getProvider(): Promise<LlmProvider>
1633
}

src/lib/llm/services/lmstudio.ts

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
1-
import {createOpenAICompatible} from '@ai-sdk/openai-compatible'
1+
import {createOpenAICompatible, OpenAICompatibleChatLanguageModel} from '@ai-sdk/openai-compatible'
2+
import {LanguageModelV1} from 'ai'
23

34
import {invariant} from '@/lib/command/invariant.js'
4-
import {Config} from '@/lib/common/types.js'
55
import {Defaults} from '@/lib/llm/defaults.js'
66
import {LlmProvider, LlmService} from '@/lib/llm/services/llm-service.js'
77

@@ -14,12 +14,10 @@ type LmStudioModelsResponse = {
1414
object: string
1515
}
1616

17-
export class LmStudio implements LlmService {
17+
export class LmStudio extends LlmService {
1818
private modelIds!: string[]
1919
private provider!: LlmProvider
2020

21-
constructor(private config: Config) {}
22-
2321
async getAvailableModelIds(): Promise<string[]> {
2422
invariant(this.config.llmSettings?.provider === 'lmstudio', 'internal_error')
2523

@@ -32,6 +30,30 @@ export class LmStudio implements LlmService {
3230
return this.modelIds
3331
}
3432

33+
async getModel(): Promise<LanguageModelV1> {
34+
const llmSettings = this.config.llmSettings
35+
invariant(llmSettings?.provider === 'lmstudio', 'internal_error')
36+
37+
const models = await this.getAvailableModelIds()
38+
39+
invariant(models.length > 0, 'llm:no_models_found')
40+
41+
return new OpenAICompatibleChatLanguageModel(
42+
models[0],
43+
{},
44+
{
45+
defaultObjectGenerationMode: 'json',
46+
headers: () => ({}),
47+
provider: `lmstudio.chat`,
48+
supportsStructuredOutputs: true,
49+
url: ({path}) => {
50+
const url = new URL(`${llmSettings.url ?? Defaults.llmSettings.lmstudio.url}/v1${path}`)
51+
return url.toString()
52+
},
53+
},
54+
)
55+
}
56+
3557
async getProvider(): Promise<LlmProvider> {
3658
invariant(this.config.llmSettings?.provider === 'lmstudio', 'internal_error')
3759

src/lib/llm/services/ollama.ts

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import {createOllama} from 'ollama-ai-provider'
22

33
import {invariant} from '@/lib/command/invariant.js'
4-
import {Config} from '@/lib/common/types.js'
54
import {Defaults} from '@/lib/llm/defaults.js'
65
import {LlmProvider, LlmService} from '@/lib/llm/services/llm-service.js'
76

@@ -11,12 +10,10 @@ type OllamaModelsResponse = {
1110
}[]
1211
}
1312

14-
export class Ollama implements LlmService {
13+
export class Ollama extends LlmService {
1514
private modelIds!: string[]
1615
private provider!: LlmProvider
1716

18-
constructor(private config: Config) {}
19-
2017
async getAvailableModelIds(): Promise<string[]> {
2118
invariant(this.config.llmSettings?.provider === 'ollama', 'internal_error')
2219

src/lib/llm/services/openai.ts

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,12 @@ import {createOpenAI, OpenAIProvider} from '@ai-sdk/openai'
22

33
import {invariant} from '@/lib/command/invariant.js'
44
import {nonEmptyStringOrUndefined} from '@/lib/common/string.js'
5-
import {Config} from '@/lib/common/types.js'
65
import {Defaults} from '@/lib/llm/defaults.js'
76
import {LlmProvider, LlmService} from '@/lib/llm/services/llm-service.js'
87

9-
export class OpenAi implements LlmService {
8+
export class OpenAi extends LlmService {
109
private provider!: OpenAIProvider
1110

12-
constructor(private config: Config) {}
13-
1411
async getAvailableModelIds(): Promise<string[]> {
1512
invariant(this.config.llmSettings?.provider === 'openai', 'internal_error')
1613

0 commit comments

Comments
 (0)