Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .npmrc

This file was deleted.

2 changes: 0 additions & 2 deletions docs/.vitepress/components.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,7 @@ declare module 'vue' {
AdsASide: typeof import('./theme/components/AdsASide.vue')['default']
Badge: typeof import('./theme/components/Badge.vue')['default']
CodeGroupItem: typeof import('./theme/components/CodeGroupItem.vue')['default']
copy: typeof import('./theme/components/StepFlow copy.vue')['default']
HomePage: typeof import('./theme/components/HomePage.vue')['default']
StepFlow: typeof import('./theme/components/StepFlow.vue')['default']
StepFlowItem: typeof import('./theme/components/StepFlowItem.vue')['default']
}
}
2 changes: 1 addition & 1 deletion docs/recipes/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ czg --api-key=sk-xxxxx
1. Get DeepSeek [API Key](https://platform.deepseek.com/api_keys)
2. Run the command to configure
```sh
npx czg --api-key="sk-xxxxxx" --api-endpoint="https://api.deepseek.com" --api-model="deepseek-chat"
npx czg --api-key="sk-xxxxxx" --api-endpoint="https://api.deepseek.com" --api-model="deepseek-v4-flash"
```
:::

Expand Down
2 changes: 1 addition & 1 deletion docs/zh/recipes/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ czg --api-key=sk-xxxxx
1. 获取 DeepSeek [API Key](https://platform.deepseek.com/api_keys)
2. 运行命令进行配置
```sh
npx czg --api-key="sk-xxxxxx" --api-endpoint="https://api.deepseek.com" --api-model="deepseek-chat"
npx czg --api-key="sk-xxxxxx" --api-endpoint="https://api.deepseek.com" --api-model="deepseek-v4-flash"
```
:::

Expand Down
26 changes: 2 additions & 24 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "cz-git",
"version": "1.12.0",
"private": true,
"packageManager": "pnpm@9.11.0",
"packageManager": "pnpm@10.33.2",
"description": "A better customizable and git support commitizen adapter",
"author": "Zhengqbbb <zhengqbbb@gmail.com> (https://github.com/Zhengqbbb)",
"license": "MIT",
Expand Down Expand Up @@ -69,7 +69,7 @@
"npm-run-all2": "^6.2.3",
"ora": "^8.1.0",
"pathe": "^1.1.2",
"pnpm": "^9.11.0",
"pnpm": "^10.33.2",
"rimraf": "catalog:rimraf",
"simple-git-hooks": "^2.11.1",
"ts-json-schema-generator": "^2.3.0",
Expand All @@ -78,28 +78,6 @@
"typescript": "^5.5.4",
"vitest": "^2.0.5"
},
"pnpm": {
"overrides": {
"@commitlint/config-validator": "catalog:commitlint",
"chalk": "4.1.2",
"color-convert": "2.0.1",
"import-meta-resolve": "4.1.0",
"resolve-from": "5.0.0",
"supports-color": "8.1.1"
},
"peerDependencyRules": {
"ignoreMissing": [
"@algolia/client-search",
"@types/react",
"eslint-plugin-import",
"eslint-plugin-n",
"eslint-plugin-promise",
"react",
"react-dom",
"webpack"
]
}
},
"simple-git-hooks": {
"pre-commit": "pnpm lint-staged",
"commit-msg": "pnpm commitlint --edit $1",
Expand Down
40 changes: 40 additions & 0 deletions packages/cz-git/__tests__/stream.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { Readable } from 'node:stream'
import { describe, expect, it } from 'vitest'
import { readChatCompletionStreamToSubjects } from '../src/shared/utils/stream'

function asStream(body: string): NodeJS.ReadableStream {
return Readable.from([body])
}

describe('readChatCompletionStreamToSubjects', () => {
it('parses non-stream JSON when no SSE choice deltas appear', async () => {
const json = JSON.stringify({
choices: [{ index: 0, message: { role: 'assistant', content: 'fix login redirect' } }],
})
const subjects = await readChatCompletionStreamToSubjects(asStream(json), 1)
expect(subjects).toEqual(['fix login redirect'])
})

it('parses SSE data lines as before', async () => {
const sse = [
'data: {"choices":[{"index":0,"delta":{"content":"hello"}}]}',
'',
'data: {"choices":[{"index":0,"delta":{"content":" world"}}]}',
'',
'data: [DONE]',
'',
].join('\n')
const subjects = await readChatCompletionStreamToSubjects(asStream(sse), 1)
expect(subjects).toEqual(['hello world'])
})

it('throws when body has neither SSE choices nor non-stream completion', async () => {
await expect(readChatCompletionStreamToSubjects(asStream('not json'), 1)).rejects.toThrow(
/no streamed choice deltas/,
)
})

it('does not return a single empty subject when stream is empty', async () => {
await expect(readChatCompletionStreamToSubjects(asStream(''), 1)).rejects.toThrow()
})
})
23 changes: 15 additions & 8 deletions packages/cz-git/src/generator/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@ import { style } from '@cz-git/inquirer'
import HttpsProxyAgent from 'https-proxy-agent'
import { isNodeVersionInRange, log, transformSubjectCase } from '../shared'
import type { CommitizenGitOptions } from '../shared'
import { bodyToNodeReadable, readChatCompletionStreamToSubjects } from '../shared/utils/stream'

/** Enough headroom for reasoning + short subject (legacy default was 200). */
const AI_MAX_COMPLETION_TOKENS = 4096

/** Streaming first byte and full generation can exceed the old 10s window. */
const AI_FETCH_TIMEOUT_MS = 60 * 1000

export async function fetchOpenAIMessage(options: CommitizenGitOptions, prompt: string) {
if (!options.openAIToken) {
Expand Down Expand Up @@ -33,7 +40,7 @@ export async function fetchOpenAIMessage(options: CommitizenGitOptions, prompt:
},
method: 'POST',
body: JSON.stringify(aiContext.payload),
signal: isNodeVersionInRange(18) ? AbortSignal?.timeout(10 * 1000) : undefined,
signal: isNodeVersionInRange(18) ? AbortSignal?.timeout(AI_FETCH_TIMEOUT_MS) : undefined,
})

if (
Expand All @@ -44,10 +51,11 @@ export async function fetchOpenAIMessage(options: CommitizenGitOptions, prompt:
const errorJson: any = await response.json()
throw new APIError(errorJson?.error?.message, response.status)
}
const json: any = await response.json()
return json
.choices
.map((r: any) => parseAISubject(options, aiContext.parseFn(r)))

const choiceCount = options.aiNumber || 1
const readable = bodyToNodeReadable(response.body)
const rawSubjects = await readChatCompletionStreamToSubjects(readable, choiceCount)
return rawSubjects.map(s => parseAISubject(options, s))
}
catch (err: any) {
let errorMsg = 'Fetch OpenAI API message failure.'
Expand All @@ -74,14 +82,13 @@ function useModelStrategy(options: CommitizenGitOptions, prompt: string) {
payload: {
model: options.aiModel,
messages: [{ role: 'user', content: prompt }],
stream: false,
stream: true,
top_p: 1,
temperature: 0.7,
max_tokens: 200,
max_tokens: AI_MAX_COMPLETION_TOKENS,
n: options.aiNumber || 1,
},
url: `${options.apiEndpoint}/chat/completions`,
parseFn: (res: any) => res?.message?.content,
}
}

Expand Down
3 changes: 2 additions & 1 deletion packages/cz-git/src/shared/utils/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
export * from './editor'
export * from './util'
export * from './rule'
export * from './stream'
export * from './util'
export * from './wrap'
171 changes: 171 additions & 0 deletions packages/cz-git/src/shared/utils/stream.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
/**
* @description Parse OpenAI-compatible `chat/completions` streaming (SSE) bodies
* @author Zhengqbbb <zhengqbbb@gmail.com>
* @license MIT
*/

import { Buffer } from 'node:buffer'
import { Readable } from 'node:stream'
import type { ReadableStream as WebReadableStream } from 'node:stream/web'

/**
* Normalize `fetch` response body to a Node.js readable stream.
*/
export function bodyToNodeReadable(body: unknown): NodeJS.ReadableStream {
if (body == null)
throw new Error('Response has no body')
if (typeof (body as WebReadableStream).getReader === 'function')
return Readable.fromWeb(body as WebReadableStream)
return body as NodeJS.ReadableStream
}

/**
* Append only user-visible completion tokens from `delta.content`.
* Skips reasoning / `reasoning_content` (not present on `content` in typical deltas).
*/
export function appendVisibleDelta(acc: string, delta: { content?: unknown } | undefined): string {
if (!delta)
return acc
const c = delta.content
if (c == null)
return acc
if (typeof c === 'string')
return acc + c
if (Array.isArray(c)) {
let s = acc
for (const p of c) {
if (p && typeof p === 'object' && (p as { type?: string, text?: string }).type === 'text') {
const t = (p as { text?: string }).text
if (typeof t === 'string')
s += t
}
}
return s
}
return acc
}

interface StreamChoiceChunk { index?: number, delta?: { content?: unknown } }

interface NonStreamChoice { index?: number, message?: { content?: unknown } }

async function readableToUtf8String(stream: NodeJS.ReadableStream): Promise<string> {
const chunks: Buffer[] = []
for await (const chunk of stream as AsyncIterable<string | Buffer>) {
if (Buffer.isBuffer(chunk))
chunks.push(chunk)
else if (typeof chunk === 'string')
chunks.push(Buffer.from(chunk))
else
chunks.push(Buffer.from(String(chunk)))
}
return Buffer.concat(chunks as readonly Uint8Array[]).toString('utf8')
}

/**
* Parse a non-streaming `chat/completions` JSON body when `stream: true` was ignored.
* @returns subjects slice, or `undefined` if the body is not a usable completion object.
*/
function trySubjectsFromNonStreamCompletionJson(
body: string,
choiceCount: number,
): string[] | undefined {
const t = body.trim()
if (!t.startsWith('{'))
return undefined
let json: unknown
try {
json = JSON.parse(t)
}
catch {
return undefined
}
if (!json || typeof json !== 'object')
return undefined
const o = json as { choices?: NonStreamChoice[], error?: { message?: string } }
if (o.error)
throw new Error(o.error.message || 'OpenAI API error')
if (!Array.isArray(o.choices))
return undefined

const buffers = Array.from({ length: choiceCount }, () => '')
Comment thread
cursor[bot] marked this conversation as resolved.
let maxIndexSeen = -1
for (const ch of o.choices) {
const idx = typeof ch.index === 'number' ? ch.index : 0
if (idx >= 0 && idx < choiceCount) {
buffers[idx] = appendVisibleDelta('', { content: ch.message?.content })
maxIndexSeen = Math.max(maxIndexSeen, idx)
}
}
if (maxIndexSeen < 0)
return undefined
return buffers.slice(0, maxIndexSeen + 1)
}

function collectSubjectsFromSseLines(
body: string,
choiceCount: number,
): { buffers: string[], maxIndexSeen: number } {
const buffers = Array.from({ length: choiceCount }, () => '')
let maxIndexSeen = -1
for (const line of body.split(/\r?\n/)) {
const trimmed = line.trim()
if (!trimmed.startsWith('data:'))
continue
const payload = trimmed.slice(5).trim()
if (payload === '[DONE]')
continue
try {
const json = JSON.parse(payload) as {
error?: { message?: string }
choices?: StreamChoiceChunk[]
}
if (json.error)
throw new Error(json.error.message || 'OpenAI stream error')

for (const ch of json.choices ?? []) {
const idx = typeof ch.index === 'number' ? ch.index : 0
if (idx >= 0 && idx < choiceCount) {
buffers[idx] = appendVisibleDelta(buffers[idx], ch.delta)
maxIndexSeen = Math.max(maxIndexSeen, idx)
}
}
}
catch (e) {
if (e instanceof SyntaxError)
continue
throw e
}
}
return { buffers, maxIndexSeen }
}

/**
* Read an OpenAI-style `chat/completions` response body and return one finished string per choice.
* Primary path: SSE lines (`data: {...}`) with `choices[].delta`, bucketed by `choices[].index`
* up to `choiceCount` (requested `n`). Returned length is `maxSeenIndex + 1` (capped by `choiceCount`),
* mirroring non-stream `choices.length` when fewer parallel completions appear.
* Fallback: if no choice index ever appears (e.g. provider ignores `stream: true` and returns one JSON object),
* the full body is parsed as a non-streaming completion using `choices[].message.content`.
*/
export async function readChatCompletionStreamToSubjects(
input: NodeJS.ReadableStream,
choiceCount: number,
): Promise<string[]> {
if (choiceCount < 1)
throw new Error('choiceCount must be at least 1')

const body = await readableToUtf8String(input)
const { buffers, maxIndexSeen } = collectSubjectsFromSseLines(body, choiceCount)

if (maxIndexSeen >= 0)
return buffers.slice(0, maxIndexSeen + 1)

const fromJson = trySubjectsFromNonStreamCompletionJson(body, choiceCount)
if (fromJson !== undefined)
return fromJson

throw new Error(
'Chat completions response had no streamed choice deltas and is not a parseable non-streaming JSON body with choices (or choices were empty).',
)
}
Loading
Loading