Skip to content

Commit b55bcd5

Browse files
authored
feat(engine): add DeepSeekEngine (#446)
Add DeepSeekEngine to support DeepSeek API. This includes a new DeepSeekConfig interface and updates to the engine selection logic. feat(README.md, src/commands/config.ts): Add DeepSeek support Adds support for the DeepSeek AI provider. Updates the README, config validation, and model list to include DeepSeek. This allows users to utilize DeepSeek models with the OpenCommit tool. fix(deepseek.ts): update DeepSeek API base URL to include version number v1 refactor(deepseek.ts): improve DeepSeekEngine constructor The DeepSeekEngine constructor is refactored to use the spread syntax for better readability and maintainability when merging config parameters. The baseURL is now explicitly set within the constructor. fix(README.md): remove Groq from the list of supported AI providers refactor(deepseek.ts): rename interface DeepseekConfig to DeepSeekEngineeekConfig and fix typo Revert "refactor(deepseek.ts): rename interface DeepseekConfig to DeepSeekEngineeekConfig and fix typo" This reverts commit f492367. refactor(deepseek.ts): Rename DeepseekConfig to DeepSeekConfig for consistency ✨ feat(engine): add DeepSeekEngine to support DeepSeek API ♻️ refactor(engine): improve OpenAiEngine and create a new DeepSeekEngine class to handle DeepSeek API requests. The DeepSeekEngine class inherits from OpenAiEngine and overrides the generateCommitMessage method to use the DeepSeek API. This change improves code organization and maintainability. 🐛 Fix: Correct DeepSeekEngine import and class name The import path and class name for DeepSeekEngine were incorrect, causing a runtime error. This commit corrects the import path and class name to `DeepseekEngine` to resolve the issue. Revert "🐛 Fix: Correct DeepSeekEngine import and class name" This reverts commit 738fd36. 🐛 Fix: Correct DeepSeekEngine import and class name The import path and class name for DeepSeekEngine were corrected to match the actual file and class name. This fixes a runtime error. Restore ./out directory to master state
1 parent 6816379 commit b55bcd5

File tree

4 files changed

+77
-5
lines changed

4 files changed

+77
-5
lines changed

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ Create a `.env` file and add OpenCommit config variables there like this:
106106

107107
```env
108108
...
109-
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise>
109+
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek>
110110
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
111111
OCO_API_URL=<may be used to set proxy path to OpenAI api>
112112
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>

src/commands/config.ts

+12-4
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,10 @@ export const MODEL_LIST = {
128128
'mistral-embed',
129129
'mistral-moderation-2411',
130130
'mistral-moderation-latest',
131+
],
132+
deepseek : [
133+
'deepseek-chat',
134+
'deepseek-reasoner',
131135
]
132136
};
133137

@@ -145,6 +149,8 @@ const getDefaultModel = (provider: string | undefined): string => {
145149
return MODEL_LIST.groq[0];
146150
case 'mistral':
147151
return MODEL_LIST.mistral[0];
152+
case 'deepseek':
153+
return MODEL_LIST.deepseek[0];
148154
default:
149155
return MODEL_LIST.openai[0];
150156
}
@@ -184,7 +190,7 @@ export const configValidators = {
184190
validateConfig(
185191
'OCO_API_KEY',
186192
value,
187-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
193+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
188194
);
189195

190196
return value;
@@ -307,9 +313,10 @@ export const configValidators = {
307313
'azure',
308314
'test',
309315
'flowise',
310-
'groq'
316+
'groq',
317+
'deepseek'
311318
].includes(value) || value.startsWith('ollama'),
312-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
319+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
313320
);
314321

315322
return value;
@@ -356,7 +363,8 @@ export enum OCO_AI_PROVIDER_ENUM {
356363
FLOWISE = 'flowise',
357364
GROQ = 'groq',
358365
MISTRAL = 'mistral',
359-
MLX = 'mlx'
366+
MLX = 'mlx',
367+
DEEPSEEK = 'deepseek'
360368
}
361369

362370
export type ConfigType = {

src/engine/deepseek.ts

+60
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import axios from 'axios';
2+
import { OpenAI } from 'openai';
3+
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
4+
import { tokenCount } from '../utils/tokenCount';
5+
import { OpenAiEngine, OpenAiConfig } from './openAI';
6+
7+
export interface DeepseekConfig extends OpenAiConfig {}
8+
9+
export class DeepseekEngine extends OpenAiEngine {
10+
constructor(config: DeepseekConfig) {
11+
// Call OpenAIEngine constructor with forced Deepseek baseURL
12+
super({
13+
...config,
14+
baseURL: 'https://api.deepseek.com/v1'
15+
});
16+
}
17+
18+
// Identical method from OpenAiEngine, re-implemented here
19+
public generateCommitMessage = async (
20+
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
21+
): Promise<string | null> => {
22+
const params = {
23+
model: this.config.model,
24+
messages,
25+
temperature: 0,
26+
top_p: 0.1,
27+
max_tokens: this.config.maxTokensOutput
28+
};
29+
30+
try {
31+
const REQUEST_TOKENS = messages
32+
.map((msg) => tokenCount(msg.content as string) + 4)
33+
.reduce((a, b) => a + b, 0);
34+
35+
if (
36+
REQUEST_TOKENS >
37+
this.config.maxTokensInput - this.config.maxTokensOutput
38+
)
39+
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
40+
41+
const completion = await this.client.chat.completions.create(params);
42+
43+
const message = completion.choices[0].message;
44+
45+
return message?.content;
46+
} catch (error) {
47+
const err = error as Error;
48+
if (
49+
axios.isAxiosError<{ error?: { message: string } }>(error) &&
50+
error.response?.status === 401
51+
) {
52+
const openAiError = error.response.data.error;
53+
54+
if (openAiError) throw new Error(openAiError.message);
55+
}
56+
57+
throw err;
58+
}
59+
};
60+
}

src/utils/engine.ts

+4
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import { MistralAiEngine } from '../engine/mistral';
1010
import { TestAi, TestMockType } from '../engine/testAi';
1111
import { GroqEngine } from '../engine/groq';
1212
import { MLXEngine } from '../engine/mlx';
13+
import { DeepseekEngine } from '../engine/deepseek';
1314

1415
export function getEngine(): AiEngine {
1516
const config = getConfig();
@@ -51,6 +52,9 @@ export function getEngine(): AiEngine {
5152
case OCO_AI_PROVIDER_ENUM.MLX:
5253
return new MLXEngine(DEFAULT_CONFIG);
5354

55+
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
56+
return new DeepseekEngine(DEFAULT_CONFIG);
57+
5458
default:
5559
return new OpenAiEngine(DEFAULT_CONFIG);
5660
}

0 commit comments

Comments
 (0)