Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .npmrc

This file was deleted.

2 changes: 0 additions & 2 deletions docs/.vitepress/components.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,7 @@ declare module 'vue' {
AdsASide: typeof import('./theme/components/AdsASide.vue')['default']
Badge: typeof import('./theme/components/Badge.vue')['default']
CodeGroupItem: typeof import('./theme/components/CodeGroupItem.vue')['default']
copy: typeof import('./theme/components/StepFlow copy.vue')['default']
HomePage: typeof import('./theme/components/HomePage.vue')['default']
StepFlow: typeof import('./theme/components/StepFlow.vue')['default']
StepFlowItem: typeof import('./theme/components/StepFlowItem.vue')['default']
}
}
2 changes: 1 addition & 1 deletion docs/recipes/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ czg --api-key=sk-xxxxx
1. Get DeepSeek [API Key](https://platform.deepseek.com/api_keys)
2. Run the command to configure
```sh
npx czg --api-key="sk-xxxxxx" --api-endpoint="https://api.deepseek.com" --api-model="deepseek-chat"
npx czg --api-key="sk-xxxxxx" --api-endpoint="https://api.deepseek.com" --api-model="deepseek-v4-flash"
```
:::

Expand Down
2 changes: 1 addition & 1 deletion docs/zh/recipes/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ czg --api-key=sk-xxxxx
1. 获取 DeepSeek [API Key](https://platform.deepseek.com/api_keys)
2. 运行命令进行配置
```sh
npx czg --api-key="sk-xxxxxx" --api-endpoint="https://api.deepseek.com" --api-model="deepseek-chat"
npx czg --api-key="sk-xxxxxx" --api-endpoint="https://api.deepseek.com" --api-model="deepseek-v4-flash"
```
:::

Expand Down
26 changes: 2 additions & 24 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "cz-git",
"version": "1.12.0",
"private": true,
"packageManager": "pnpm@9.11.0",
"packageManager": "pnpm@10.33.2",
"description": "A better customizable and git support commitizen adapter",
"author": "Zhengqbbb <zhengqbbb@gmail.com> (https://github.com/Zhengqbbb)",
"license": "MIT",
Expand Down Expand Up @@ -69,7 +69,7 @@
"npm-run-all2": "^6.2.3",
"ora": "^8.1.0",
"pathe": "^1.1.2",
"pnpm": "^9.11.0",
"pnpm": "^10.33.2",
"rimraf": "catalog:rimraf",
"simple-git-hooks": "^2.11.1",
"ts-json-schema-generator": "^2.3.0",
Expand All @@ -78,28 +78,6 @@
"typescript": "^5.5.4",
"vitest": "^2.0.5"
},
"pnpm": {
"overrides": {
"@commitlint/config-validator": "catalog:commitlint",
"chalk": "4.1.2",
"color-convert": "2.0.1",
"import-meta-resolve": "4.1.0",
"resolve-from": "5.0.0",
"supports-color": "8.1.1"
},
"peerDependencyRules": {
"ignoreMissing": [
"@algolia/client-search",
"@types/react",
"eslint-plugin-import",
"eslint-plugin-n",
"eslint-plugin-promise",
"react",
"react-dom",
"webpack"
]
}
},
"simple-git-hooks": {
"pre-commit": "pnpm lint-staged",
"commit-msg": "pnpm commitlint --edit $1",
Expand Down
23 changes: 15 additions & 8 deletions packages/cz-git/src/generator/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@ import { style } from '@cz-git/inquirer'
import HttpsProxyAgent from 'https-proxy-agent'
import { isNodeVersionInRange, log, transformSubjectCase } from '../shared'
import type { CommitizenGitOptions } from '../shared'
import { bodyToNodeReadable, readChatCompletionStreamToSubjects } from '../shared/utils/stream'

/** Enough headroom for reasoning + short subject (legacy default was 200). */
const AI_MAX_COMPLETION_TOKENS = 4096

/** Streaming first byte and full generation can exceed the old 10s window. */
const AI_FETCH_TIMEOUT_MS = 60 * 1000

export async function fetchOpenAIMessage(options: CommitizenGitOptions, prompt: string) {
if (!options.openAIToken) {
Expand Down Expand Up @@ -33,7 +40,7 @@ export async function fetchOpenAIMessage(options: CommitizenGitOptions, prompt:
},
method: 'POST',
body: JSON.stringify(aiContext.payload),
signal: isNodeVersionInRange(18) ? AbortSignal?.timeout(10 * 1000) : undefined,
signal: isNodeVersionInRange(18) ? AbortSignal?.timeout(AI_FETCH_TIMEOUT_MS) : undefined,
})

if (
Expand All @@ -44,10 +51,11 @@ export async function fetchOpenAIMessage(options: CommitizenGitOptions, prompt:
const errorJson: any = await response.json()
throw new APIError(errorJson?.error?.message, response.status)
}
const json: any = await response.json()
return json
.choices
.map((r: any) => parseAISubject(options, aiContext.parseFn(r)))

const choiceCount = options.aiNumber || 1
const readable = bodyToNodeReadable(response.body)
const rawSubjects = await readChatCompletionStreamToSubjects(readable, choiceCount)
return rawSubjects.map(s => parseAISubject(options, s))
}
catch (err: any) {
let errorMsg = 'Fetch OpenAI API message failure.'
Expand All @@ -74,14 +82,13 @@ function useModelStrategy(options: CommitizenGitOptions, prompt: string) {
payload: {
model: options.aiModel,
messages: [{ role: 'user', content: prompt }],
stream: false,
stream: true,
top_p: 1,
temperature: 0.7,
max_tokens: 200,
max_tokens: AI_MAX_COMPLETION_TOKENS,
n: options.aiNumber || 1,
},
url: `${options.apiEndpoint}/chat/completions`,
parseFn: (res: any) => res?.message?.content,
}
}

Expand Down
3 changes: 2 additions & 1 deletion packages/cz-git/src/shared/utils/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
export * from './editor'
export * from './util'
export * from './rule'
export * from './stream'
export * from './util'
export * from './wrap'
99 changes: 99 additions & 0 deletions packages/cz-git/src/shared/utils/stream.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
/**
* @description Parse OpenAI-compatible `chat/completions` streaming (SSE) bodies
* @author Zhengqbbb <zhengqbbb@gmail.com>
* @license MIT
*/

import readline from 'node:readline'
import { Readable } from 'node:stream'
import type { ReadableStream as WebReadableStream } from 'node:stream/web'

/**
* Normalize `fetch` response body to a Node.js readable stream for `readline`.
*/
export function bodyToNodeReadable(body: unknown): NodeJS.ReadableStream {
if (body == null)
throw new Error('Response has no body')
if (typeof (body as WebReadableStream).getReader === 'function')
return Readable.fromWeb(body as WebReadableStream)
return body as NodeJS.ReadableStream
}

/**
* Append only user-visible completion tokens from `delta.content`.
* Skips reasoning / `reasoning_content` (not present on `content` in typical deltas).
*/
export function appendVisibleDelta(acc: string, delta: { content?: unknown } | undefined): string {
if (!delta)
return acc
const c = delta.content
if (c == null)
return acc
if (typeof c === 'string')
return acc + c
if (Array.isArray(c)) {
let s = acc
for (const p of c) {
if (p && typeof p === 'object' && (p as { type?: string, text?: string }).type === 'text') {
const t = (p as { text?: string }).text
if (typeof t === 'string')
s += t
}
}
return s
}
return acc
}

interface StreamChoiceChunk { index?: number, delta?: { content?: unknown } }

/**
* Read an SSE stream and return one finished string per completion choice.
* Buckets by `choices[].index` up to `choiceCount` (requested `n`).
* Returned length matches how many indices actually appeared in the stream (capped by `choiceCount`),
* mirroring non-stream `json.choices.length` when the provider returns fewer parallel completions.
*/
export async function readChatCompletionStreamToSubjects(
input: NodeJS.ReadableStream,
choiceCount: number,
): Promise<string[]> {
if (choiceCount < 1)
throw new Error('choiceCount must be at least 1')

const buffers = Array.from({ length: choiceCount }, () => '')
Comment thread
cursor[bot] marked this conversation as resolved.
let maxIndexSeen = -1
const rl = readline.createInterface({ input, crlfDelay: Infinity })

for await (const line of rl) {
const trimmed = line.trim()
if (!trimmed.startsWith('data:'))
continue
const payload = trimmed.slice(5).trim()
if (payload === '[DONE]')
continue
try {
const json = JSON.parse(payload) as {
error?: { message?: string }
choices?: StreamChoiceChunk[]
}
if (json.error)
throw new Error(json.error.message || 'OpenAI stream error')

for (const ch of json.choices ?? []) {
const idx = typeof ch.index === 'number' ? ch.index : 0
if (idx >= 0 && idx < choiceCount) {
buffers[idx] = appendVisibleDelta(buffers[idx], ch.delta)
maxIndexSeen = Math.max(maxIndexSeen, idx)
}
}
}
catch (e) {
if (e instanceof SyntaxError)
continue
throw e
}
}

const effectiveLen = maxIndexSeen < 0 ? 1 : maxIndexSeen + 1
return buffers.slice(0, effectiveLen)
Comment thread
Zhengqbbb marked this conversation as resolved.
Outdated
}
19 changes: 14 additions & 5 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

19 changes: 19 additions & 0 deletions pnpm-workspace.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,25 @@ packages:
- "docs"
- "packages/**"
- "!**/__tests__/**"
shellEmulator: true
shamefullyHoist: true
overrides:
'@commitlint/config-validator': "catalog:commitlint"
'chalk': "4.1.2"
'color-convert': "2.0.1"
'import-meta-resolve': "4.1.0"
'resolve-from': "5.0.0"
'supports-color': "8.1.1"
peerDependencyRules:
ignoreMissing:
- "@algolia/client-search"
- "@types/react"
- "eslint-plugin-import"
- "eslint-plugin-n"
- "eslint-plugin-promise"
- "react"
- "react-dom"
- "webpack"

catalog:
# cosmiconfig > 8.2.0 will lead bundle size to 10MB +
Expand Down
Loading