feat: 支持停止对话

This commit is contained in:
digua
2025-12-05 00:45:14 +08:00
parent ac99203075
commit 04dc2a79c1
13 changed files with 259 additions and 53 deletions
+80 -3
View File
@@ -75,6 +75,8 @@ export interface AgentConfig {
maxToolRounds?: number
/** LLM 选项 */
llmOptions?: ChatOptions
/** 中止信号,用于取消执行 */
abortSignal?: AbortSignal
}
/**
@@ -164,15 +166,24 @@ export class Agent {
private messages: ChatMessage[] = []
private toolsUsed: string[] = []
private toolRounds: number = 0
private abortSignal?: AbortSignal
constructor(context: ToolContext, config: AgentConfig = {}) {
this.context = context
this.abortSignal = config.abortSignal
this.config = {
maxToolRounds: config.maxToolRounds ?? 5,
llmOptions: config.llmOptions ?? { temperature: 0.7, maxTokens: 2048 },
}
}
/**
* 检查是否已中止
*/
private isAborted(): boolean {
return this.abortSignal?.aborted ?? false
}
/**
* 执行对话(非流式)
* @param userMessage 用户消息
@@ -180,6 +191,12 @@ export class Agent {
async execute(userMessage: string): Promise<AgentResult> {
aiLogger.info('Agent', '开始执行', { userMessage: userMessage.slice(0, 100) })
// 检查是否已中止
if (this.isAborted()) {
aiLogger.info('Agent', '执行前已中止')
return { content: '', toolsUsed: [], toolRounds: 0 }
}
// 初始化消息
this.messages = [
{ role: 'system', content: getSystemPrompt() },
@@ -194,9 +211,20 @@ export class Agent {
// 执行循环
while (this.toolRounds < this.config.maxToolRounds!) {
// 每轮开始时检查是否中止
if (this.isAborted()) {
aiLogger.info('Agent', '循环中检测到中止信号')
return {
content: '',
toolsUsed: this.toolsUsed,
toolRounds: this.toolRounds,
}
}
const response = await chat(this.messages, {
...this.config.llmOptions,
tools,
abortSignal: this.abortSignal,
})
aiLogger.info('Agent', 'LLM 响应', {
@@ -281,6 +309,13 @@ export class Agent {
async executeStream(userMessage: string, onChunk: (chunk: AgentStreamChunk) => void): Promise<AgentResult> {
aiLogger.info('Agent', '开始流式执行', { userMessage: userMessage.slice(0, 100) })
// 检查是否已中止
if (this.isAborted()) {
aiLogger.info('Agent', '执行前已中止')
onChunk({ type: 'done', isFinished: true })
return { content: '', toolsUsed: [], toolRounds: 0 }
}
// 初始化消息
this.messages = [
{ role: 'system', content: getSystemPrompt() },
@@ -294,16 +329,38 @@ export class Agent {
// 执行循环
while (this.toolRounds < this.config.maxToolRounds!) {
// 每轮开始时检查是否中止
if (this.isAborted()) {
aiLogger.info('Agent', '循环中检测到中止信号')
onChunk({ type: 'done', isFinished: true })
return {
content: finalContent,
toolsUsed: this.toolsUsed,
toolRounds: this.toolRounds,
}
}
let accumulatedContent = ''
let displayedContent = '' // 已发送给前端的内容
let toolCalls: ToolCall[] | undefined
let isBufferingToolCall = false // 是否正在缓冲 tool_call 内容
// 流式调用 LLM
// 流式调用 LLM(传入 abortSignal
for await (const chunk of chatStream(this.messages, {
...this.config.llmOptions,
tools,
abortSignal: this.abortSignal,
})) {
// 每个 chunk 时检查是否中止
if (this.isAborted()) {
aiLogger.info('Agent', '流式过程中检测到中止信号')
onChunk({ type: 'done', isFinished: true })
return {
content: finalContent + accumulatedContent,
toolsUsed: this.toolsUsed,
toolRounds: this.toolRounds,
}
}
if (chunk.content) {
accumulatedContent += chunk.content
@@ -426,13 +483,33 @@ export class Agent {
// 超过最大轮数
aiLogger.warn('Agent', '达到最大工具调用轮数', { maxRounds: this.config.maxToolRounds })
// 检查是否已中止
if (this.isAborted()) {
aiLogger.info('Agent', '达到最大轮数时已中止')
onChunk({ type: 'done', isFinished: true })
return {
content: finalContent,
toolsUsed: this.toolsUsed,
toolRounds: this.toolRounds,
}
}
this.messages.push({
role: 'user',
content: '请根据已获取的信息给出回答,不要再调用工具。',
})
// 最后一轮不带 tools
for await (const chunk of chatStream(this.messages, this.config.llmOptions)) {
// 最后一轮不带 tools(传入 abortSignal
for await (const chunk of chatStream(this.messages, {
...this.config.llmOptions,
abortSignal: this.abortSignal,
})) {
if (this.isAborted()) {
aiLogger.info('Agent', '最后一轮流式过程中检测到中止信号')
onChunk({ type: 'done', isFinished: true })
break
}
if (chunk.content) {
finalContent += chunk.content
onChunk({ type: 'content', content: chunk.content })
+15 -1
View File
@@ -85,6 +85,7 @@ export class DeepSeekService implements ILLMService {
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify(requestBody),
signal: options?.abortSignal,
})
if (!response.ok) {
@@ -163,6 +164,7 @@ export class DeepSeekService implements ILLMService {
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify(requestBody),
signal: options?.abortSignal,
})
if (!response.ok) {
@@ -183,6 +185,12 @@ export class DeepSeekService implements ILLMService {
try {
while (true) {
// 检查是否已中止
if (options?.abortSignal?.aborted) {
yield { content: '', isFinished: true, finishReason: 'stop' }
return
}
const { done, value } = await reader.read()
if (done) break
@@ -278,6 +286,13 @@ export class DeepSeekService implements ILLMService {
}
}
}
} catch (error) {
// 如果是中止错误,正常返回
if (error instanceof Error && error.name === 'AbortError') {
yield { content: '', isFinished: true, finishReason: 'stop' }
return
}
throw error
} finally {
reader.releaseLock()
}
@@ -298,4 +313,3 @@ export class DeepSeekService implements ILLMService {
}
}
}
+6 -2
View File
@@ -423,8 +423,12 @@ export async function validateApiKey(provider: LLMProvider, apiKey: string): Pro
/**
* 发送聊天请求(使用当前配置)
* 返回完整的 ChatResponse 对象,包含 finishReason 和 tool_calls
*/
export async function chat(messages: ChatMessage[], options?: ChatOptions): Promise<string> {
export async function chat(
messages: ChatMessage[],
options?: ChatOptions
): Promise<{ content: string; finishReason: string; tool_calls?: import('./types').ToolCall[] }> {
aiLogger.info('LLM', '开始非流式聊天请求', {
messagesCount: messages.length,
firstMessageRole: messages[0]?.role,
@@ -447,7 +451,7 @@ export async function chat(messages: ChatMessage[], options?: ChatOptions): Prom
finishReason: response.finishReason,
usage: response.usage,
})
return response.content
return response
} catch (error) {
aiLogger.error('LLM', '非流式请求失败', { error: String(error) })
throw error
+15
View File
@@ -98,6 +98,7 @@ export class OpenAICompatibleService implements ILLMService {
method: 'POST',
headers,
body: JSON.stringify(requestBody),
signal: options?.abortSignal,
})
if (!response.ok) {
@@ -180,6 +181,7 @@ export class OpenAICompatibleService implements ILLMService {
method: 'POST',
headers,
body: JSON.stringify(requestBody),
signal: options?.abortSignal,
})
if (!response.ok) {
@@ -198,6 +200,12 @@ export class OpenAICompatibleService implements ILLMService {
try {
while (true) {
// 检查是否已中止
if (options?.abortSignal?.aborted) {
yield { content: '', isFinished: true, finishReason: 'stop' }
return
}
const { done, value } = await reader.read()
if (done) break
@@ -287,6 +295,13 @@ export class OpenAICompatibleService implements ILLMService {
}
}
}
} catch (error) {
// 如果是中止错误,正常返回
if (error instanceof Error && error.name === 'AbortError') {
yield { content: '', isFinished: true, finishReason: 'stop' }
return
}
throw error
} finally {
reader.releaseLock()
}
+15 -1
View File
@@ -83,6 +83,7 @@ export class QwenService implements ILLMService {
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify(requestBody),
signal: options?.abortSignal,
})
if (!response.ok) {
@@ -161,6 +162,7 @@ export class QwenService implements ILLMService {
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify(requestBody),
signal: options?.abortSignal,
})
if (!response.ok) {
@@ -179,6 +181,12 @@ export class QwenService implements ILLMService {
try {
while (true) {
// 检查是否已中止
if (options?.abortSignal?.aborted) {
yield { content: '', isFinished: true, finishReason: 'stop' }
return
}
const { done, value } = await reader.read()
if (done) break
@@ -256,6 +264,13 @@ export class QwenService implements ILLMService {
}
}
}
} catch (error) {
// 如果是中止错误,正常返回
if (error instanceof Error && error.name === 'AbortError') {
yield { content: '', isFinished: true, finishReason: 'stop' }
return
}
throw error
} finally {
reader.releaseLock()
}
@@ -276,4 +291,3 @@ export class QwenService implements ILLMService {
}
}
}
+2
View File
@@ -39,6 +39,8 @@ export interface ChatOptions {
stream?: boolean
/** 可用的工具列表 */
tools?: ToolDefinition[]
/** 中止信号,用于取消请求 */
abortSignal?: AbortSignal
}
/**