Skip to content

Commit 1ca9fd0

Browse files
committed
feat(engine): add DeepSeekEngine
Add DeepSeekEngine to support DeepSeek API. This includes a new DeepSeekConfig interface and updates to the engine selection logic. feat(README.md, src/commands/config.ts): Add DeepSeek support Adds support for the DeepSeek AI provider. Updates the README, config validation, and model list to include DeepSeek. This allows users to utilize DeepSeek models with the OpenCommit tool. fix(deepseek.ts): update DeepSeek API base URL to include version number v1 refactor(deepseek.ts): improve DeepSeekEngine constructor The DeepSeekEngine constructor is refactored to use the spread syntax for better readability and maintainability when merging config parameters. The baseURL is now explicitly set within the constructor. fix(README.md): remove Groq from the list of supported AI providers refactor(deepseek.ts): rename interface DeepseekConfig to DeepSeekEngineeekConfig and fix typo Revert "refactor(deepseek.ts): rename interface DeepseekConfig to DeepSeekEngineeekConfig and fix typo" This reverts commit f492367. refactor(deepseek.ts): Rename DeepseekConfig to DeepSeekConfig for consistency ✨ feat(engine): add DeepSeekEngine to support DeepSeek API ♻️ refactor(engine): improve OpenAiEngine and create a new DeepSeekEngine class to handle DeepSeek API requests. The DeepSeekEngine class inherits from OpenAiEngine and overrides the generateCommitMessage method to use the DeepSeek API. This change improves code organization and maintainability. 🐛 Fix: Correct DeepSeekEngine import and class name The import path and class name for DeepSeekEngine were incorrect, causing a runtime error. This commit corrects the import path and class name to `DeepseekEngine` to resolve the issue. Revert "🐛 Fix: Correct DeepSeekEngine import and class name" This reverts commit 738fd36. 🐛 Fix: Correct DeepSeekEngine import and class name The import path and class name for DeepSeekEngine were corrected to match the actual file and class name. This fixes a runtime error.
1 parent 6816379 commit 1ca9fd0

File tree

6 files changed

+134
-15
lines changed

6 files changed

+134
-15
lines changed

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ Create a `.env` file and add OpenCommit config variables there like this:
106106

107107
```env
108108
...
109-
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise>
109+
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek>
110110
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
111111
OCO_API_URL=<may be used to set proxy path to OpenAI api>
112112
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>

out/cli.cjs

+29-5
Original file line numberDiff line numberDiff line change
@@ -49153,6 +49153,10 @@ var MODEL_LIST = {
4915349153
"mistral-embed",
4915449154
"mistral-moderation-2411",
4915549155
"mistral-moderation-latest"
49156+
],
49157+
deepseek: [
49158+
"deepseek-chat",
49159+
"deepseek-reasoner"
4915649160
]
4915749161
};
4915849162
var getDefaultModel = (provider) => {
@@ -49169,6 +49173,8 @@ var getDefaultModel = (provider) => {
4916949173
return MODEL_LIST.groq[0];
4917049174
case "mistral":
4917149175
return MODEL_LIST.mistral[0];
49176+
case "deepseek":
49177+
return MODEL_LIST.deepseek[0];
4917249178
default:
4917349179
return MODEL_LIST.openai[0];
4917449180
}
@@ -49194,7 +49200,7 @@ var configValidators = {
4919449200
validateConfig(
4919549201
"OCO_API_KEY",
4919649202
value,
49197-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
49203+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
4919849204
);
4919949205
return value;
4920049206
},
@@ -49300,9 +49306,10 @@ var configValidators = {
4930049306
"azure",
4930149307
"test",
4930249308
"flowise",
49303-
"groq"
49309+
"groq",
49310+
"deepseek"
4930449311
].includes(value) || value.startsWith("ollama"),
49305-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
49312+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
4930649313
);
4930749314
return value;
4930849315
},
@@ -49344,6 +49351,7 @@ var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
4934449351
OCO_AI_PROVIDER_ENUM2["GROQ"] = "groq";
4934549352
OCO_AI_PROVIDER_ENUM2["MISTRAL"] = "mistral";
4934649353
OCO_AI_PROVIDER_ENUM2["MLX"] = "mlx";
49354+
OCO_AI_PROVIDER_ENUM2["DEEPSEEK"] = "deepseek";
4934749355
return OCO_AI_PROVIDER_ENUM2;
4934849356
})(OCO_AI_PROVIDER_ENUM || {});
4934949357
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -59438,8 +59446,12 @@ var OllamaEngine = class {
5943859446
this.client.getUri(this.config),
5943959447
params
5944059448
);
59441-
const message = response.data.message;
59442-
return message?.content;
59449+
const { message } = response.data;
59450+
let content = message?.content;
59451+
if (content && content.includes("<think>")) {
59452+
return content.replace(/<think>[\s\S]*?<\/think>/g, "").trim();
59453+
}
59454+
return content;
5944359455
} catch (err) {
5944459456
const message = err.response?.data?.error ?? err.message;
5944559457
throw new Error(`Ollama provider error: ${message}`);
@@ -63830,6 +63842,16 @@ var MLXEngine = class {
6383063842
}
6383163843
};
6383263844

63845+
// src/engine/deepseek.ts
63846+
var DeepSeekEngine = class extends OpenAiEngine {
63847+
constructor(config7) {
63848+
super({
63849+
...config7,
63850+
baseURL: "https://api.deepseek.com/v1"
63851+
});
63852+
}
63853+
};
63854+
6383363855
// src/utils/engine.ts
6383463856
function getEngine() {
6383563857
const config7 = getConfig();
@@ -63860,6 +63882,8 @@ function getEngine() {
6386063882
return new MistralAiEngine(DEFAULT_CONFIG2);
6386163883
case "mlx" /* MLX */:
6386263884
return new MLXEngine(DEFAULT_CONFIG2);
63885+
case "deepseek" /* DEEPSEEK */:
63886+
return new DeepSeekEngine(DEFAULT_CONFIG2);
6386363887
default:
6386463888
return new OpenAiEngine(DEFAULT_CONFIG2);
6386563889
}

out/github-action.cjs

+28-5
Original file line numberDiff line numberDiff line change
@@ -67961,6 +67961,10 @@ var MODEL_LIST = {
6796167961
"mistral-embed",
6796267962
"mistral-moderation-2411",
6796367963
"mistral-moderation-latest"
67964+
],
67965+
deepseek: [
67966+
"deepseek-chat",
67967+
"deepseek-reasoner"
6796467968
]
6796567969
};
6796667970
var getDefaultModel = (provider) => {
@@ -67977,6 +67981,8 @@ var getDefaultModel = (provider) => {
6797767981
return MODEL_LIST.groq[0];
6797867982
case "mistral":
6797967983
return MODEL_LIST.mistral[0];
67984+
case "deepseek":
67985+
return MODEL_LIST.deepseek[0];
6798067986
default:
6798167987
return MODEL_LIST.openai[0];
6798267988
}
@@ -68002,7 +68008,7 @@ var configValidators = {
6800268008
validateConfig(
6800368009
"OCO_API_KEY",
6800468010
value,
68005-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
68011+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
6800668012
);
6800768013
return value;
6800868014
},
@@ -68108,9 +68114,10 @@ var configValidators = {
6810868114
"azure",
6810968115
"test",
6811068116
"flowise",
68111-
"groq"
68117+
"groq",
68118+
"deepseek"
6811268119
].includes(value) || value.startsWith("ollama"),
68113-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
68120+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
6811468121
);
6811568122
return value;
6811668123
},
@@ -78233,8 +78240,12 @@ var OllamaEngine = class {
7823378240
this.client.getUri(this.config),
7823478241
params
7823578242
);
78236-
const message = response.data.message;
78237-
return message?.content;
78243+
const { message } = response.data;
78244+
let content = message?.content;
78245+
if (content && content.includes("<think>")) {
78246+
return content.replace(/<think>[\s\S]*?<\/think>/g, "").trim();
78247+
}
78248+
return content;
7823878249
} catch (err) {
7823978250
const message = err.response?.data?.error ?? err.message;
7824078251
throw new Error(`Ollama provider error: ${message}`);
@@ -82625,6 +82636,16 @@ var MLXEngine = class {
8262582636
}
8262682637
};
8262782638

82639+
// src/engine/deepseek.ts
82640+
var DeepSeekEngine = class extends OpenAiEngine {
82641+
constructor(config6) {
82642+
super({
82643+
...config6,
82644+
baseURL: "https://api.deepseek.com/v1"
82645+
});
82646+
}
82647+
};
82648+
8262882649
// src/utils/engine.ts
8262982650
function getEngine() {
8263082651
const config6 = getConfig();
@@ -82655,6 +82676,8 @@ function getEngine() {
8265582676
return new MistralAiEngine(DEFAULT_CONFIG2);
8265682677
case "mlx" /* MLX */:
8265782678
return new MLXEngine(DEFAULT_CONFIG2);
82679+
case "deepseek" /* DEEPSEEK */:
82680+
return new DeepSeekEngine(DEFAULT_CONFIG2);
8265882681
default:
8265982682
return new OpenAiEngine(DEFAULT_CONFIG2);
8266082683
}

src/commands/config.ts

+12-4
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,10 @@ export const MODEL_LIST = {
128128
'mistral-embed',
129129
'mistral-moderation-2411',
130130
'mistral-moderation-latest',
131+
],
132+
deepseek : [
133+
'deepseek-chat',
134+
'deepseek-reasoner',
131135
]
132136
};
133137

@@ -145,6 +149,8 @@ const getDefaultModel = (provider: string | undefined): string => {
145149
return MODEL_LIST.groq[0];
146150
case 'mistral':
147151
return MODEL_LIST.mistral[0];
152+
case 'deepseek':
153+
return MODEL_LIST.deepseek[0];
148154
default:
149155
return MODEL_LIST.openai[0];
150156
}
@@ -184,7 +190,7 @@ export const configValidators = {
184190
validateConfig(
185191
'OCO_API_KEY',
186192
value,
187-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
193+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
188194
);
189195

190196
return value;
@@ -307,9 +313,10 @@ export const configValidators = {
307313
'azure',
308314
'test',
309315
'flowise',
310-
'groq'
316+
'groq',
317+
'deepseek'
311318
].includes(value) || value.startsWith('ollama'),
312-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
319+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
313320
);
314321

315322
return value;
@@ -356,7 +363,8 @@ export enum OCO_AI_PROVIDER_ENUM {
356363
FLOWISE = 'flowise',
357364
GROQ = 'groq',
358365
MISTRAL = 'mistral',
359-
MLX = 'mlx'
366+
MLX = 'mlx',
367+
DEEPSEEK = 'deepseek'
360368
}
361369

362370
export type ConfigType = {

src/engine/deepseek.ts

+60
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import axios from 'axios';
2+
import { OpenAI } from 'openai';
3+
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
4+
import { tokenCount } from '../utils/tokenCount';
5+
import { OpenAiEngine, OpenAiConfig } from './openAI';
6+
7+
export interface DeepseekConfig extends OpenAiConfig {}
8+
9+
export class DeepseekEngine extends OpenAiEngine {
10+
constructor(config: DeepseekConfig) {
11+
// Call OpenAIEngine constructor with forced Deepseek baseURL
12+
super({
13+
...config,
14+
baseURL: 'https://api.deepseek.com/v1'
15+
});
16+
}
17+
18+
// Identical method from OpenAiEngine, re-implemented here
19+
public generateCommitMessage = async (
20+
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
21+
): Promise<string | null> => {
22+
const params = {
23+
model: this.config.model,
24+
messages,
25+
temperature: 0,
26+
top_p: 0.1,
27+
max_tokens: this.config.maxTokensOutput
28+
};
29+
30+
try {
31+
const REQUEST_TOKENS = messages
32+
.map((msg) => tokenCount(msg.content as string) + 4)
33+
.reduce((a, b) => a + b, 0);
34+
35+
if (
36+
REQUEST_TOKENS >
37+
this.config.maxTokensInput - this.config.maxTokensOutput
38+
)
39+
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
40+
41+
const completion = await this.client.chat.completions.create(params);
42+
43+
const message = completion.choices[0].message;
44+
45+
return message?.content;
46+
} catch (error) {
47+
const err = error as Error;
48+
if (
49+
axios.isAxiosError<{ error?: { message: string } }>(error) &&
50+
error.response?.status === 401
51+
) {
52+
const openAiError = error.response.data.error;
53+
54+
if (openAiError) throw new Error(openAiError.message);
55+
}
56+
57+
throw err;
58+
}
59+
};
60+
}

src/utils/engine.ts

+4
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import { MistralAiEngine } from '../engine/mistral';
1010
import { TestAi, TestMockType } from '../engine/testAi';
1111
import { GroqEngine } from '../engine/groq';
1212
import { MLXEngine } from '../engine/mlx';
13+
import { DeepseekEngine } from '../engine/deepseek';
1314

1415
export function getEngine(): AiEngine {
1516
const config = getConfig();
@@ -51,6 +52,9 @@ export function getEngine(): AiEngine {
5152
case OCO_AI_PROVIDER_ENUM.MLX:
5253
return new MLXEngine(DEFAULT_CONFIG);
5354

55+
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
56+
return new DeepseekEngine(DEFAULT_CONFIG);
57+
5458
default:
5559
return new OpenAiEngine(DEFAULT_CONFIG);
5660
}

0 commit comments

Comments
 (0)