Skip to content

Commit e3b84c0

Browse files
committed
Restore ./out directory to master state
1 parent 1ca9fd0 commit e3b84c0

File tree

2 files changed

+10
-57
lines changed

2 files changed

+10
-57
lines changed

out/cli.cjs

+5-29
Original file line numberDiff line numberDiff line change
@@ -49153,10 +49153,6 @@ var MODEL_LIST = {
4915349153
"mistral-embed",
4915449154
"mistral-moderation-2411",
4915549155
"mistral-moderation-latest"
49156-
],
49157-
deepseek: [
49158-
"deepseek-chat",
49159-
"deepseek-reasoner"
4916049156
]
4916149157
};
4916249158
var getDefaultModel = (provider) => {
@@ -49173,8 +49169,6 @@ var getDefaultModel = (provider) => {
4917349169
return MODEL_LIST.groq[0];
4917449170
case "mistral":
4917549171
return MODEL_LIST.mistral[0];
49176-
case "deepseek":
49177-
return MODEL_LIST.deepseek[0];
4917849172
default:
4917949173
return MODEL_LIST.openai[0];
4918049174
}
@@ -49200,7 +49194,7 @@ var configValidators = {
4920049194
validateConfig(
4920149195
"OCO_API_KEY",
4920249196
value,
49203-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
49197+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
4920449198
);
4920549199
return value;
4920649200
},
@@ -49306,10 +49300,9 @@ var configValidators = {
4930649300
"azure",
4930749301
"test",
4930849302
"flowise",
49309-
"groq",
49310-
"deepseek"
49303+
"groq"
4931149304
].includes(value) || value.startsWith("ollama"),
49312-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
49305+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
4931349306
);
4931449307
return value;
4931549308
},
@@ -49351,7 +49344,6 @@ var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
4935149344
OCO_AI_PROVIDER_ENUM2["GROQ"] = "groq";
4935249345
OCO_AI_PROVIDER_ENUM2["MISTRAL"] = "mistral";
4935349346
OCO_AI_PROVIDER_ENUM2["MLX"] = "mlx";
49354-
OCO_AI_PROVIDER_ENUM2["DEEPSEEK"] = "deepseek";
4935549347
return OCO_AI_PROVIDER_ENUM2;
4935649348
})(OCO_AI_PROVIDER_ENUM || {});
4935749349
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -59446,12 +59438,8 @@ var OllamaEngine = class {
5944659438
this.client.getUri(this.config),
5944759439
params
5944859440
);
59449-
const { message } = response.data;
59450-
let content = message?.content;
59451-
if (content && content.includes("<think>")) {
59452-
return content.replace(/<think>[\s\S]*?<\/think>/g, "").trim();
59453-
}
59454-
return content;
59441+
const message = response.data.message;
59442+
return message?.content;
5945559443
} catch (err) {
5945659444
const message = err.response?.data?.error ?? err.message;
5945759445
throw new Error(`Ollama provider error: ${message}`);
@@ -63842,16 +63830,6 @@ var MLXEngine = class {
6384263830
}
6384363831
};
6384463832

63845-
// src/engine/deepseek.ts
63846-
var DeepSeekEngine = class extends OpenAiEngine {
63847-
constructor(config7) {
63848-
super({
63849-
...config7,
63850-
baseURL: "https://api.deepseek.com/v1"
63851-
});
63852-
}
63853-
};
63854-
6385563833
// src/utils/engine.ts
6385663834
function getEngine() {
6385763835
const config7 = getConfig();
@@ -63882,8 +63860,6 @@ function getEngine() {
6388263860
return new MistralAiEngine(DEFAULT_CONFIG2);
6388363861
case "mlx" /* MLX */:
6388463862
return new MLXEngine(DEFAULT_CONFIG2);
63885-
case "deepseek" /* DEEPSEEK */:
63886-
return new DeepSeekEngine(DEFAULT_CONFIG2);
6388763863
default:
6388863864
return new OpenAiEngine(DEFAULT_CONFIG2);
6388963865
}

out/github-action.cjs

+5-28
Original file line numberDiff line numberDiff line change
@@ -67961,10 +67961,6 @@ var MODEL_LIST = {
6796167961
"mistral-embed",
6796267962
"mistral-moderation-2411",
6796367963
"mistral-moderation-latest"
67964-
],
67965-
deepseek: [
67966-
"deepseek-chat",
67967-
"deepseek-reasoner"
6796867964
]
6796967965
};
6797067966
var getDefaultModel = (provider) => {
@@ -67981,8 +67977,6 @@ var getDefaultModel = (provider) => {
6798167977
return MODEL_LIST.groq[0];
6798267978
case "mistral":
6798367979
return MODEL_LIST.mistral[0];
67984-
case "deepseek":
67985-
return MODEL_LIST.deepseek[0];
6798667980
default:
6798767981
return MODEL_LIST.openai[0];
6798867982
}
@@ -68008,7 +68002,7 @@ var configValidators = {
6800868002
validateConfig(
6800968003
"OCO_API_KEY",
6801068004
value,
68011-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
68005+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
6801268006
);
6801368007
return value;
6801468008
},
@@ -68114,10 +68108,9 @@ var configValidators = {
6811468108
"azure",
6811568109
"test",
6811668110
"flowise",
68117-
"groq",
68118-
"deepseek"
68111+
"groq"
6811968112
].includes(value) || value.startsWith("ollama"),
68120-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
68113+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
6812168114
);
6812268115
return value;
6812368116
},
@@ -78240,12 +78233,8 @@ var OllamaEngine = class {
7824078233
this.client.getUri(this.config),
7824178234
params
7824278235
);
78243-
const { message } = response.data;
78244-
let content = message?.content;
78245-
if (content && content.includes("<think>")) {
78246-
return content.replace(/<think>[\s\S]*?<\/think>/g, "").trim();
78247-
}
78248-
return content;
78236+
const message = response.data.message;
78237+
return message?.content;
7824978238
} catch (err) {
7825078239
const message = err.response?.data?.error ?? err.message;
7825178240
throw new Error(`Ollama provider error: ${message}`);
@@ -82636,16 +82625,6 @@ var MLXEngine = class {
8263682625
}
8263782626
};
8263882627

82639-
// src/engine/deepseek.ts
82640-
var DeepSeekEngine = class extends OpenAiEngine {
82641-
constructor(config6) {
82642-
super({
82643-
...config6,
82644-
baseURL: "https://api.deepseek.com/v1"
82645-
});
82646-
}
82647-
};
82648-
8264982628
// src/utils/engine.ts
8265082629
function getEngine() {
8265182630
const config6 = getConfig();
@@ -82676,8 +82655,6 @@ function getEngine() {
8267682655
return new MistralAiEngine(DEFAULT_CONFIG2);
8267782656
case "mlx" /* MLX */:
8267882657
return new MLXEngine(DEFAULT_CONFIG2);
82679-
case "deepseek" /* DEEPSEEK */:
82680-
return new DeepSeekEngine(DEFAULT_CONFIG2);
8268182658
default:
8268282659
return new OpenAiEngine(DEFAULT_CONFIG2);
8268382660
}

0 commit comments

Comments
 (0)