Skip to content

Commit 738fd36

Browse files
committed
πŸ› Fix: Correct DeepSeekEngine import and class name
The import path and class name for DeepSeekEngine were incorrect, causing a runtime error. This commit corrects the import path and class name to `DeepseekEngine` to resolve the issue.
1 parent bad1fc9 commit 738fd36

File tree

3 files changed

+56
-6
lines changed

3 files changed

+56
-6
lines changed

β€Žout/cli.cjs

+27-2
Original file line numberDiff line numberDiff line change
@@ -63843,12 +63843,37 @@ var MLXEngine = class {
6384363843
};
6384463844

6384563845
// src/engine/deepseek.ts
63846-
var DeepSeekEngine = class extends OpenAiEngine {
63846+
var DeepseekEngine = class extends OpenAiEngine {
6384763847
constructor(config7) {
6384863848
super({
6384963849
...config7,
6385063850
baseURL: "https://api.deepseek.com/v1"
6385163851
});
63852+
this.generateCommitMessage = async (messages) => {
63853+
const params = {
63854+
model: this.config.model,
63855+
messages,
63856+
temperature: 0,
63857+
top_p: 0.1,
63858+
max_tokens: this.config.maxTokensOutput
63859+
};
63860+
try {
63861+
const REQUEST_TOKENS = messages.map((msg) => tokenCount(msg.content) + 4).reduce((a4, b7) => a4 + b7, 0);
63862+
if (REQUEST_TOKENS > this.config.maxTokensInput - this.config.maxTokensOutput)
63863+
throw new Error("TOO_MUCH_TOKENS" /* tooMuchTokens */);
63864+
const completion = await this.client.chat.completions.create(params);
63865+
const message = completion.choices[0].message;
63866+
return message?.content;
63867+
} catch (error) {
63868+
const err = error;
63869+
if (axios_default.isAxiosError(error) && error.response?.status === 401) {
63870+
const openAiError = error.response.data.error;
63871+
if (openAiError)
63872+
throw new Error(openAiError.message);
63873+
}
63874+
throw err;
63875+
}
63876+
};
6385263877
}
6385363878
};
6385463879

@@ -63883,7 +63908,7 @@ function getEngine() {
6388363908
case "mlx" /* MLX */:
6388463909
return new MLXEngine(DEFAULT_CONFIG2);
6388563910
case "deepseek" /* DEEPSEEK */:
63886-
return new DeepSeekEngine(DEFAULT_CONFIG2);
63911+
return new DeepseekEngine(DEFAULT_CONFIG2);
6388763912
default:
6388863913
return new OpenAiEngine(DEFAULT_CONFIG2);
6388963914
}

β€Žout/github-action.cjs

+27-2
Original file line numberDiff line numberDiff line change
@@ -82637,12 +82637,37 @@ var MLXEngine = class {
8263782637
};
8263882638

8263982639
// src/engine/deepseek.ts
82640-
var DeepSeekEngine = class extends OpenAiEngine {
82640+
var DeepseekEngine = class extends OpenAiEngine {
8264182641
constructor(config6) {
8264282642
super({
8264382643
...config6,
8264482644
baseURL: "https://api.deepseek.com/v1"
8264582645
});
82646+
this.generateCommitMessage = async (messages) => {
82647+
const params = {
82648+
model: this.config.model,
82649+
messages,
82650+
temperature: 0,
82651+
top_p: 0.1,
82652+
max_tokens: this.config.maxTokensOutput
82653+
};
82654+
try {
82655+
const REQUEST_TOKENS = messages.map((msg) => tokenCount(msg.content) + 4).reduce((a3, b3) => a3 + b3, 0);
82656+
if (REQUEST_TOKENS > this.config.maxTokensInput - this.config.maxTokensOutput)
82657+
throw new Error("TOO_MUCH_TOKENS" /* tooMuchTokens */);
82658+
const completion = await this.client.chat.completions.create(params);
82659+
const message = completion.choices[0].message;
82660+
return message?.content;
82661+
} catch (error) {
82662+
const err = error;
82663+
if (axios_default.isAxiosError(error) && error.response?.status === 401) {
82664+
const openAiError = error.response.data.error;
82665+
if (openAiError)
82666+
throw new Error(openAiError.message);
82667+
}
82668+
throw err;
82669+
}
82670+
};
8264682671
}
8264782672
};
8264882673

@@ -82677,7 +82702,7 @@ function getEngine() {
8267782702
case "mlx" /* MLX */:
8267882703
return new MLXEngine(DEFAULT_CONFIG2);
8267982704
case "deepseek" /* DEEPSEEK */:
82680-
return new DeepSeekEngine(DEFAULT_CONFIG2);
82705+
return new DeepseekEngine(DEFAULT_CONFIG2);
8268182706
default:
8268282707
return new OpenAiEngine(DEFAULT_CONFIG2);
8268382708
}

β€Žsrc/utils/engine.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import { MistralAiEngine } from '../engine/mistral';
1010
import { TestAi, TestMockType } from '../engine/testAi';
1111
import { GroqEngine } from '../engine/groq';
1212
import { MLXEngine } from '../engine/mlx';
13-
import { DeepSeekEngine } from '../engine/deepseek';
13+
import { DeepseekEngine } from '../engine/deepseek';
1414

1515
export function getEngine(): AiEngine {
1616
const config = getConfig();
@@ -53,7 +53,7 @@ export function getEngine(): AiEngine {
5353
return new MLXEngine(DEFAULT_CONFIG);
5454

5555
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
56-
return new DeepSeekEngine(DEFAULT_CONFIG);
56+
return new DeepseekEngine(DEFAULT_CONFIG);
5757

5858
default:
5959
return new OpenAiEngine(DEFAULT_CONFIG);

0 commit comments

Comments
Β (0)