Skip to content

Commit 19f6288

Browse files
committed
feat: chat 命令
1 parent c3139e4 commit 19f6288

File tree

12 files changed

+143
-60
lines changed

12 files changed

+143
-60
lines changed

.github/workflows/publish.yaml

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@ jobs:
1313
registry-url: 'https://registry.npmjs.org'
1414
- name: Install dependencies
1515
run: |
16-
npm install [email protected] -g
17-
pnpm install --frozen-lockfile
16+
corepack install
17+
corepack pnpm install --frozen-lockfile
1818
- name: Build
1919
run: npm run build
2020
- name: Publish

.prettierrc.cjs

-9
Original file line numberDiff line numberDiff line change
@@ -8,15 +8,6 @@ module.exports = {
88
bracketSameLine: true,
99
useTabs: false,
1010
plugins: [
11-
'@ianvs/prettier-plugin-sort-imports',
1211
'prettier-plugin-packagejson'
13-
],
14-
importOrder: [
15-
'<BUILTIN_MODULES>',
16-
'<THIRD_PARTY_MODULES>',
17-
'',
18-
'^@/(.*)$',
19-
'',
20-
'^[./]'
2112
]
2213
};

README.md

+6
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,12 @@ Finally, tell us the content that needs to be detected and we can proceed.
4242
$ aigc-detector detect [CONTENT]
4343
```
4444

45+
Also, You can chat with the large model with the following command:
46+
47+
```sh
48+
$ aigc-detector chat
49+
```
50+
4551
For more ways to use `aigc-detector`, please refer to the help command.
4652

4753
```sh

package.json

+2-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "aigc-detector",
3-
"version": "1.0.2",
3+
"version": "1.0.3",
44
"description": "Detect if content is generated by AI",
55
"keywords": [
66
"aigc",
@@ -58,7 +58,6 @@
5858
"@babel/core": "^7.24.4",
5959
"@commitlint/cli": "^19.2.2",
6060
"@commitlint/config-conventional": "^19.2.2",
61-
"@ianvs/prettier-plugin-sort-imports": "^4.2.1",
6261
"@oclif/prettier-config": "^0.2.1",
6362
"@types/inquirer": "^9.0.7",
6463
"@types/node": "^18.11.9",
@@ -77,5 +76,5 @@
7776
"engines": {
7877
"node": ">=18.0.0"
7978
},
80-
"packageManager": "pnpm@9.0.6"
79+
"packageManager": "pnpm@9.1.0"
8180
}

pnpm-lock.yaml

+7-31
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/cli/commands/chat.ts

+94
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
import { AIMessage, HumanMessage } from '@langchain/core/messages';
2+
import chalk from 'chalk';
3+
import { ChatMessageHistory } from 'langchain/stores/message/in_memory';
4+
import readline from 'node:readline';
5+
6+
import type { Platform } from '../../platform';
7+
8+
import { AIGC } from '../../core';
9+
import BaseCommand from '../extends/command';
10+
11+
enum PromptRole {
12+
AI = 'ai',
13+
USER = 'user'
14+
}
15+
16+
const promptMessageMap = {
17+
[PromptRole.AI]: AIMessage,
18+
[PromptRole.USER]: HumanMessage
19+
};
20+
const promptRoleDisplayMap = {
21+
[PromptRole.AI]: {
22+
color: 'yellow',
23+
name: 'AI'
24+
},
25+
[PromptRole.USER]: {
26+
color: 'green',
27+
name: 'You'
28+
}
29+
} as const;
30+
31+
const reader = readline.createInterface({
32+
input: process.stdin,
33+
output: process.stdout
34+
});
35+
36+
class ChatCommand extends BaseCommand {
37+
static args = {};
38+
39+
static description = 'Chat with the LLM';
40+
41+
static examples = [];
42+
43+
static flags = {};
44+
45+
private lastMessage = 'How can I help you today?';
46+
47+
private messages = new ChatMessageHistory();
48+
49+
async run(): Promise<void> {
50+
const config = await this.configManager.getAll();
51+
52+
if (Object.keys(config).length > 0) {
53+
const detector = new AIGC({
54+
apiKey: config.apiKey,
55+
platform: config.platform as unknown as Platform
56+
});
57+
const userDisplay = this.getDisplayContent(PromptRole.USER);
58+
59+
// eslint-disable-next-line no-constant-condition
60+
while (true) {
61+
const aiMessage = await this.addMessage(PromptRole.AI, this.lastMessage);
62+
const userMessage = await this.getUserMessage(aiMessage + `\n${userDisplay}`);
63+
const answer = await detector.chat(userMessage, await this.messages.getMessages());
64+
65+
await this.addMessage(PromptRole.USER, userMessage);
66+
this.lastMessage = answer;
67+
}
68+
} else {
69+
this.showHelp();
70+
}
71+
}
72+
73+
private async addMessage(role: PromptRole, content: string): Promise<string> {
74+
const Message = promptMessageMap[role];
75+
76+
await this.messages.addMessage(new Message(content));
77+
78+
return this.getDisplayContent(role) + content;
79+
}
80+
81+
private getDisplayContent(role: PromptRole): string {
82+
const roleDisplay = promptRoleDisplayMap[role];
83+
84+
return chalk[roleDisplay.color](`[${roleDisplay.name}] `);
85+
}
86+
87+
private getUserMessage(aiMessage: string): Promise<string> {
88+
return new Promise<string>((resolve) => {
89+
reader.question(aiMessage, resolve);
90+
});
91+
}
92+
}
93+
94+
export default ChatCommand;

src/cli/commands/detect.ts

+2-1
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@ import { Args, Flags } from '@oclif/core';
22
import chalk from 'chalk';
33
import ora from 'ora';
44

5+
import type { Platform } from '../../platform';
6+
57
import { AIGC } from '../../core';
6-
import { type Platform } from '../../platform';
78
import BaseCommand from '../extends/command';
89

910
class DetectCommand extends BaseCommand {

src/core/index.ts

+15-1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
import type { BaseMessage } from '@langchain/core/messages';
2+
3+
import { PROMPT } from '../const';
14
import { getPlatform, type Platform } from '../platform';
25
import { getEnvConfig } from './env';
36
import { getDetectResult } from './utils';
@@ -19,9 +22,20 @@ export class AIGC {
1922
this.platform = (env.platform as unknown as Platform) || options.platform;
2023
}
2124

25+
public async chat(content: string, messages: BaseMessage[]) {
26+
const platform = getPlatform(this.platform);
27+
const result = await platform.invoke(
28+
'You are a helpful assistant. Answer all questions to the best of your ability.',
29+
{ content, messages },
30+
this.apiKey
31+
);
32+
33+
return result;
34+
}
35+
2236
public async detect(content: string): Promise<ReturnType<typeof getDetectResult>> {
2337
const platform = getPlatform(this.platform);
24-
const result = await platform.invoke(content, this.apiKey);
38+
const result = await platform.invoke(PROMPT, { content }, this.apiKey);
2539

2640
return getDetectResult(result);
2741
}

src/platform/base.ts

+9-10
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1-
import { type BaseLanguageModel } from '@langchain/core/language_models/base';
1+
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
2+
23
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts';
34
import { ChatOpenAI } from '@langchain/openai';
45
import { LLMChain } from 'langchain/chains';
56

6-
import { PROMPT } from '../const';
7+
type InvokeParameter = Parameters<InstanceType<typeof LLMChain>['invoke']>[0];
78

89
abstract class Platform {
910
protected temperature = 0.7;
@@ -20,22 +21,20 @@ abstract class Platform {
2021
});
2122
}
2223

23-
protected getPrompt(): ChatPromptTemplate {
24+
protected getPrompt(prompt: string): ChatPromptTemplate {
2425
return ChatPromptTemplate.fromMessages([
25-
SystemMessagePromptTemplate.fromTemplate(PROMPT),
26+
SystemMessagePromptTemplate.fromTemplate(prompt),
2627
HumanMessagePromptTemplate.fromTemplate('Here is what needs to be evaluated: \n{content}')
2728
]);
2829
}
2930

30-
public async invoke(content?: string, apiKey?: string): Promise<string> {
31-
const prompt = this.getPrompt();
31+
public async invoke(prompt: string, params: InvokeParameter, apiKey?: string): Promise<string> {
32+
const promptTemplate = this.getPrompt(prompt);
3233
const chain = new LLMChain({
3334
llm: this.getChatModel(apiKey),
34-
prompt
35-
});
36-
const result = await chain.invoke({
37-
content
35+
prompt: promptTemplate
3836
});
37+
const result = await chain.invoke(params);
3938

4039
return result.text;
4140
}

src/platform/minimax.ts

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
2+
13
import { ChatMinimax } from '@langchain/community/chat_models/minimax';
2-
import { type BaseLanguageModel } from '@langchain/core/language_models/base';
34

45
import Platform from './base';
56

src/platform/tongyi.ts

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
2+
13
import { ChatAlibabaTongyi } from '@langchain/community/chat_models/alibaba_tongyi';
2-
import { type BaseLanguageModel } from '@langchain/core/language_models/base';
34

45
import Platform from './base';
56

src/platform/zhipu.ts

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
2+
13
import { ChatZhipuAI } from '@langchain/community/chat_models/zhipuai';
2-
import { type BaseLanguageModel } from '@langchain/core/language_models/base';
34

45
import Platform from './base';
56

0 commit comments

Comments
 (0)