import type { Topic, Message, Conversation, SendMessageOptions, StreamEvent, TopicFilter } from '../types/chat' import { modelServiceManager } from './modelServiceManager' import { mcpClientService } from './MCPClientService' class ChatService { private static instance: ChatService private topics: Map = new Map() private conversations: Map = new Map() private mcpClient = mcpClientService // 使用单例实例 static getInstance(): ChatService { if (!ChatService.instance) { ChatService.instance = new ChatService() } return ChatService.instance } // ==================== 话题管理 ==================== /** * 创建新话题 */ createTopic(name: string, options?: { description?: string modelId?: string }): Topic { const topic: Topic = { id: this.generateId(), name: name || '新对话', description: options?.description, createdAt: new Date(), updatedAt: new Date(), messageCount: 0, pinned: false, archived: false, favorite: false, model: options?.modelId } this.topics.set(topic.id, topic) this.saveTopics() // 创建对应的对话 const conversation: Conversation = { id: this.generateId(), topicId: topic.id, messages: [], createdAt: new Date(), updatedAt: new Date(), metadata: { model: options?.modelId } } this.conversations.set(conversation.id, conversation) this.saveConversations() return topic } /** * 获取所有话题 */ getTopics(filter?: TopicFilter): Topic[] { let topics = Array.from(this.topics.values()) if (filter) { if (filter.search) { const search = filter.search.toLowerCase() topics = topics.filter(t => t.name.toLowerCase().includes(search) || t.description?.toLowerCase().includes(search) || t.lastMessage?.toLowerCase().includes(search) ) } if (filter.pinned !== undefined) { topics = topics.filter(t => t.pinned === filter.pinned) } if (filter.archived !== undefined) { topics = topics.filter(t => t.archived === filter.archived) } if (filter.favorite !== undefined) { topics = topics.filter(t => t.favorite === filter.favorite) } } // 排序:置顶 > 更新时间 return topics.sort((a, b) => { if (a.pinned !== b.pinned) return a.pinned ? -1 : 1 return b.updatedAt.getTime() - a.updatedAt.getTime() }) } /** * 获取单个话题 */ getTopic(topicId: string): Topic | undefined { return this.topics.get(topicId) } /** * 更新话题 */ updateTopic(topicId: string, updates: Partial): Topic | undefined { const topic = this.topics.get(topicId) if (!topic) return undefined Object.assign(topic, updates, { updatedAt: new Date() }) this.topics.set(topicId, topic) this.saveTopics() return topic } /** * 删除话题 */ deleteTopic(topicId: string): boolean { const deleted = this.topics.delete(topicId) if (deleted) { // 删除关联的对话 for (const [convId, conv] of this.conversations) { if (conv.topicId === topicId) { this.conversations.delete(convId) } } this.saveTopics() this.saveConversations() } return deleted } /** * 切换话题置顶状态 */ toggleTopicPin(topicId: string): boolean { const topic = this.topics.get(topicId) if (!topic) return false topic.pinned = !topic.pinned topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() return topic.pinned } /** * 切换话题收藏状态 */ toggleTopicFavorite(topicId: string): boolean { const topic = this.topics.get(topicId) if (!topic) return false topic.favorite = !topic.favorite topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() return topic.favorite } /** * 归档话题 */ archiveTopic(topicId: string): boolean { const topic = this.topics.get(topicId) if (!topic) return false topic.archived = true topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() return true } // ==================== 消息管理 ==================== /** * 获取话题的所有消息 */ getMessages(topicId: string): Message[] { for (const conv of this.conversations.values()) { if (conv.topicId === topicId) { return conv.messages } } return [] } /** * 发送消息 */ async sendMessage(options: SendMessageOptions): Promise { const { topicId, content, role = 'user', model } = options // 查找对话 let conversation: Conversation | undefined for (const conv of this.conversations.values()) { if (conv.topicId === topicId) { conversation = conv break } } if (!conversation) { throw new Error('对话不存在') } // 创建用户消息 const userMessage: Message = { id: this.generateId(), role, content, status: 'success', timestamp: new Date() } conversation.messages.push(userMessage) conversation.updatedAt = new Date() // 更新话题 const topic = this.topics.get(topicId) if (topic) { topic.messageCount = conversation.messages.length topic.lastMessage = this.getMessagePreview(content) topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() } this.conversations.set(conversation.id, conversation) this.saveConversations() // 如果不是用户消息,直接返回 if (role !== 'user') { return userMessage } // 创建助手消息占位符 const assistantMessage: Message = { id: this.generateId(), role: 'assistant', content: '', status: 'sending', timestamp: new Date(), model: model || conversation.metadata?.model } conversation.messages.push(assistantMessage) this.conversations.set(conversation.id, conversation) try { // 调用 AI 模型 const response = await this.callModel(conversation, model) // 更新助手消息 assistantMessage.content = response.content assistantMessage.status = 'success' assistantMessage.tokens = response.tokens conversation.updatedAt = new Date() this.conversations.set(conversation.id, conversation) this.saveConversations() // 更新话题 if (topic) { topic.messageCount = conversation.messages.length topic.lastMessage = this.getMessagePreview(response.content) topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() } return assistantMessage } catch (error) { assistantMessage.status = 'error' assistantMessage.error = error instanceof Error ? error.message : '发送失败' this.conversations.set(conversation.id, conversation) this.saveConversations() throw error } } /** * 流式发送消息 */ async sendMessageStream( options: SendMessageOptions, onChunk: (event: StreamEvent) => void, mcpServerId?: string, // 新增:可选的 MCP 服务器 ID signal?: AbortSignal // 新增:取消信号 ): Promise { const { topicId, content, role = 'user', model } = options // 查找对话 let conversation: Conversation | undefined for (const conv of this.conversations.values()) { if (conv.topicId === topicId) { conversation = conv break } } if (!conversation) { throw new Error('对话不存在') } // 创建用户消息 const userMessage: Message = { id: this.generateId(), role, content, status: 'success', timestamp: new Date() } conversation.messages.push(userMessage) conversation.updatedAt = new Date() this.conversations.set(conversation.id, conversation) this.saveConversations() // 更新话题(用户消息) const topic = this.topics.get(topicId) if (topic) { topic.messageCount = conversation.messages.length topic.lastMessage = this.getMessagePreview(content) topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() } // 创建助手消息 const assistantMessage: Message = { id: this.generateId(), role: 'assistant', content: '', status: 'sending', timestamp: new Date(), model: model || conversation.metadata?.model } conversation.messages.push(assistantMessage) conversation.updatedAt = new Date() this.conversations.set(conversation.id, conversation) this.saveConversations() // 再次更新话题计数 if (topic) { topic.messageCount = conversation.messages.length this.topics.set(topicId, topic) this.saveTopics() } onChunk({ type: 'start', messageId: assistantMessage.id }) try { // 调用流式 API await this.callModelStream( conversation, model, (chunk) => { assistantMessage.content += chunk conversation.updatedAt = new Date() this.conversations.set(conversation.id, conversation) this.saveConversations() onChunk({ type: 'delta', content: chunk, messageId: assistantMessage.id }) }, mcpServerId, // 传递 MCP 服务器 ID signal // 传递取消信号 ) assistantMessage.status = 'success' conversation.updatedAt = new Date() this.conversations.set(conversation.id, conversation) this.saveConversations() onChunk({ type: 'end', messageId: assistantMessage.id }) // 更新话题(完成) if (topic) { topic.messageCount = conversation.messages.length topic.lastMessage = this.getMessagePreview(assistantMessage.content) topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() } } catch (error) { // 检查是否是用户主动取消(参考 cherry-studio 的 PAUSED 状态) const isAborted = error instanceof Error && error.name === 'AbortError' if (isAborted) { // 用户主动停止,保留已生成的内容,状态标记为 paused console.log('⏸️ [sendMessageStream] 用户主动停止生成,保留已生成内容') assistantMessage.status = 'paused' assistantMessage.error = undefined // 清除错误信息 } else { // 其他错误 assistantMessage.status = 'error' assistantMessage.error = error instanceof Error ? error.message : '发送失败' } conversation.updatedAt = new Date() this.conversations.set(conversation.id, conversation) this.saveConversations() if (isAborted) { onChunk({ type: 'paused', messageId: assistantMessage.id }) // 更新话题(暂停) if (topic) { topic.messageCount = conversation.messages.length topic.lastMessage = this.getMessagePreview(assistantMessage.content) topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() } } else { onChunk({ type: 'error', error: assistantMessage.error, messageId: assistantMessage.id }) } } } /** * 删除消息 */ deleteMessage(topicId: string, messageId: string): boolean { for (const conv of this.conversations.values()) { if (conv.topicId === topicId) { const index = conv.messages.findIndex(m => m.id === messageId) if (index !== -1) { conv.messages.splice(index, 1) conv.updatedAt = new Date() this.conversations.set(conv.id, conv) this.saveConversations() // 更新话题 const topic = this.topics.get(topicId) if (topic) { topic.messageCount = conv.messages.length if (conv.messages.length > 0) { const lastMsg = conv.messages[conv.messages.length - 1] topic.lastMessage = this.getMessagePreview(lastMsg.content) } else { topic.lastMessage = undefined } topic.updatedAt = new Date() this.topics.set(topicId, topic) this.saveTopics() } return true } } } return false } /** * 重新生成消息 */ async regenerateMessage(topicId: string, messageId: string): Promise { // 找到要重新生成的消息 let conversation: Conversation | undefined let messageIndex = -1 for (const conv of this.conversations.values()) { if (conv.topicId === topicId) { conversation = conv messageIndex = conv.messages.findIndex(m => m.id === messageId) if (messageIndex !== -1) break } } if (!conversation || messageIndex === -1) { throw new Error('消息不存在') } // 删除该消息之后的所有消息 conversation.messages.splice(messageIndex) // 获取最后一条用户消息 let lastUserMessage: Message | undefined for (let i = conversation.messages.length - 1; i >= 0; i--) { if (conversation.messages[i].role === 'user') { lastUserMessage = conversation.messages[i] break } } if (!lastUserMessage) { throw new Error('没有找到用户消息') } // 重新发送 return await this.sendMessage({ topicId, content: lastUserMessage.content, model: conversation.metadata?.model }) } // ==================== 私有方法 ==================== /** * 调用模型 */ private async callModel( conversation: Conversation, model?: string ): Promise<{ content: string; tokens?: any }> { const callModelStartTime = performance.now() console.log('⏱️ [callModel] 开始处理', { model, 对话消息数: conversation.messages.length }) // 准备消息历史 const beforePrepare = performance.now() const messages = conversation.messages .filter(m => m.status === 'success') .map(m => ({ role: m.role, content: m.content })) const afterPrepare = performance.now() console.log('⏱️ [callModel] 准备消息耗时:', (afterPrepare - beforePrepare).toFixed(2), 'ms', '处理后消息数:', messages.length) // 获取已连接的服务 - 从 modelServiceManager 获取 const allServices = modelServiceManager.getAllServices() console.log('🔍 [callModel] 所有服务:', allServices.map(s => ({ name: s.name, status: s.status, models: s.models?.length || 0 }))) const services = allServices.filter(s => s.status === 'connected') console.log('🔍 [callModel] 已连接的服务:', services.length, '个') if (services.length === 0) { console.error('❌ [callModel] 没有已连接的服务!') console.error('📋 [callModel] 请检查:') console.error(' 1. 是否在"模型服务"中添加了服务?') console.error(' 2. 服务是否已启用(enabled=true)?') console.error(' 3. 服务是否有可用的模型列表?') console.error(' 4. localStorage中的数据:', localStorage.getItem('model-providers')) throw new Error('没有可用的模型服务,请先在"模型服务"中添加并连接服务') } let service = services[0] // 默认使用第一个可用服务 let selectedModel = model || service.models?.[0] || 'default' // 如果指定了模型,尝试找到拥有该模型的服务 if (model) { const foundService = services.find(s => s.models && s.models.includes(model) ) if (foundService) { service = foundService selectedModel = model } else { console.warn(`⚠️ 未找到包含模型 "${model}" 的服务,使用默认服务`) } } console.log('🔍 [callModel] 使用服务:', service.name, '模型:', selectedModel) // 调用服务 const beforeServiceCall = performance.now() const result = await modelServiceManager.sendChatRequest( service.id, messages, selectedModel ) const afterServiceCall = performance.now() console.log('⏱️ [callModel] 服务调用耗时:', (afterServiceCall - beforeServiceCall).toFixed(2), 'ms') if (!result.success) { throw new Error(result.error || '请求失败') } // 解析响应 const beforeParse = performance.now() const parsedContent = this.parseModelResponse(result.data) const afterParse = performance.now() console.log('⏱️ [callModel] 解析响应耗时:', (afterParse - beforeParse).toFixed(2), 'ms') console.log('⏱️ [callModel] callModel总耗时:', (afterParse - callModelStartTime).toFixed(2), 'ms') return { content: parsedContent, tokens: result.data?.usage } } /** * 流式调用模型 */ private async callModelStream( conversation: Conversation, model: string | undefined, onChunk: (chunk: string) => void, mcpServerId?: string, // 可选的 MCP 服务器 ID signal?: AbortSignal // 取消信号 ): Promise { const streamStartTime = performance.now() console.log('⏱️ [callModelStream] 开始真流式处理') // 获取 MCP 工具列表(如果选择了 MCP 服务器) let tools: any[] = [] let mcpServerName = '' if (mcpServerId) { console.log('🔧 [callModelStream] 获取 MCP 服务器工具:', mcpServerId) const mcpTools = this.mcpClient.getTools(mcpServerId) const serverInfo = this.mcpClient.getServerInfo(mcpServerId) mcpServerName = serverInfo?.name || 'mcp' console.log('🔧 [callModelStream] MCP 服务器名称:', mcpServerName) console.log('🔧 [callModelStream] MCP 原始工具列表:', mcpTools) tools = this.convertToolsToOpenAIFormat(mcpTools, mcpServerName) console.log('🔧 [callModelStream] 转换后的工具:', tools.length, '个', tools) } else { console.log('⚠️ [callModelStream] 未选择 MCP 服务器,不注入工具') } // 准备消息历史 let messages = conversation.messages .filter(m => m.status === 'success') .map(m => ({ role: m.role, content: m.content })) // 如果有工具,添加系统提示词指导 AI 使用工具 if (tools.length > 0 && messages.length > 0 && messages[0].role !== 'system') { const systemPrompt = this.createSystemPromptWithTools(tools, mcpServerName) messages = [ { role: 'system', content: systemPrompt }, ...messages ] } console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') console.log('🎯 [callModelStream] === 完整的消息列表 ===') console.log(' 消息总数:', messages.length) messages.forEach((msg, idx) => { console.log(` 消息 [${idx}]:`, { role: msg.role, content: msg.content?.substring(0, 100) + (msg.content?.length > 100 ? '...' : ''), contentLength: msg.content?.length || 0 }) }) console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') // 获取已连接的服务 const allServices = modelServiceManager.getAllServices() const services = allServices.filter(s => s.status === 'connected') if (services.length === 0) { throw new Error('没有可用的模型服务,请先在"模型服务"中添加并连接服务') } let service = services[0] let selectedModel = model || service.models?.[0] || 'default' // 如果指定了模型,尝试找到拥有该模型的服务 if (model) { console.log('🎯 [callModelStream] 用户选择的模型:', model) const foundService = services.find(s => s.models && s.models.includes(model) ) if (foundService) { service = foundService selectedModel = model console.log('✅ [callModelStream] 找到匹配服务:', foundService.name) } else { console.warn('⚠️ [callModelStream] 未找到包含该模型的服务,使用默认服务') } } else { console.log('ℹ️ [callModelStream] 未指定模型,使用默认模型') } console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') console.log('🔍 [callModelStream] 最终选择:') console.log(' 服务:', service.name, `(${service.type})`) console.log(' 模型:', selectedModel) console.log(' MCP:', mcpServerId || '未选择') console.log(' 工具:', tools.length, '个') console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') console.log('🚀 [callModelStream] === 开始真正的流式请求 ===') // 调用真正的流式API const beforeStreamCall = performance.now() let chunkCount = 0 let buffer = '' // 缓冲区,用于批量输出 const BATCH_SIZE = 3 // 每3个字符输出一次,增强流式效果 const result = await modelServiceManager.sendChatRequestStream( service.id, messages, selectedModel, (chunk) => { // 实时输出,但批量处理增强视觉效果 chunkCount++ if (chunkCount === 1) { const firstChunkTime = performance.now() console.log('⚡ [callModelStream] 首字延迟:', (firstChunkTime - beforeStreamCall).toFixed(2), 'ms') } // 累积到缓冲区 buffer += chunk // 当缓冲区达到批量大小时输出 if (buffer.length >= BATCH_SIZE) { const output = buffer buffer = '' onChunk(output) } }, tools.length > 0 ? tools : undefined, signal // 传递取消信号 ) // 输出剩余的缓冲区内容 if (buffer.length > 0) { onChunk(buffer) } const afterStreamCall = performance.now() console.log('🚀 [callModelStream] 流式请求完成,收到块数:', chunkCount) console.log('⏱️ [callModelStream] 流式调用总耗时:', (afterStreamCall - beforeStreamCall).toFixed(2), 'ms') if (!result.success) { throw new Error(result.error || '流式请求失败') } // 处理工具调用 console.log('🔍 [callModelStream] 检查工具调用:', { hasData: !!result.data, hasToolCalls: !!result.data?.toolCalls, toolCallsCount: result.data?.toolCalls?.length || 0, hasMcpServerId: !!mcpServerId, mcpServerId, toolCalls: result.data?.toolCalls }) if (result.data?.toolCalls && result.data.toolCalls.length > 0 && mcpServerId) { console.log('🔧 [callModelStream] 开始执行工具调用,共', result.data.toolCalls.length, '个') // 传递 tools 参数,让 AI 可以继续调用其他工具 await this.executeToolCalls(conversation, result.data.toolCalls, mcpServerId, model, onChunk, tools) } else { console.log('⚠️ [callModelStream] 没有工具调用需要执行') } const endTime = performance.now() console.log('⏱️ [callModelStream] 真流式总耗时:', (endTime - streamStartTime).toFixed(2), 'ms') } /** * 解析模型响应 */ private parseModelResponse(data: any, _serviceType?: string): string { if (!data) return '' // OpenAI 格式 if (data.choices && data.choices[0]?.message?.content) { return data.choices[0].message.content } // Claude 格式 if (data.content && Array.isArray(data.content)) { return data.content .filter((c: any) => c.type === 'text') .map((c: any) => c.text) .join('') } // Gemini 格式 if (data.candidates && data.candidates[0]?.content?.parts) { return data.candidates[0].content.parts .map((p: any) => p.text) .join('') } // 通用格式 if (typeof data === 'string') return data if (data.content) return data.content if (data.text) return data.text if (data.message) return data.message return JSON.stringify(data) } /** * 获取消息预览 */ private getMessagePreview(content: string, maxLength = 50): string { if (!content) return '' const text = content.replace(/\n/g, ' ').trim() return text.length > maxLength ? text.slice(0, maxLength) + '...' : text } /** * 生成唯一 ID */ private generateId(): string { return `${Date.now()}_${Math.random().toString(36).substr(2, 9)}` } // ==================== 持久化 ==================== private saveTopics(): void { try { const data = Array.from(this.topics.values()) localStorage.setItem('chat-topics', JSON.stringify(data)) } catch (error) { console.error('保存话题失败:', error) } } private loadTopics(): void { try { const data = localStorage.getItem('chat-topics') if (data) { const topics = JSON.parse(data) as Topic[] topics.forEach(topic => { // 恢复 Date 对象 topic.createdAt = new Date(topic.createdAt) topic.updatedAt = new Date(topic.updatedAt) this.topics.set(topic.id, topic) }) } } catch (error) { console.error('加载话题失败:', error) } } private saveConversations(): void { try { const data = Array.from(this.conversations.values()) localStorage.setItem('chat-conversations', JSON.stringify(data)) } catch (error) { console.error('保存对话失败:', error) } } private loadConversations(): void { try { const data = localStorage.getItem('chat-conversations') if (data) { const conversations = JSON.parse(data) as Conversation[] conversations.forEach(conv => { // 恢复 Date 对象 conv.createdAt = new Date(conv.createdAt) conv.updatedAt = new Date(conv.updatedAt) conv.messages.forEach(msg => { msg.timestamp = new Date(msg.timestamp) }) this.conversations.set(conv.id, conv) }) } } catch (error) { console.error('加载对话失败:', error) } } /** * 初始化 */ initialize(): void { this.loadTopics() this.loadConversations() // 如果没有话题,创建默认话题 if (this.topics.size === 0) { this.createTopic('欢迎使用', { description: '开始你的第一次对话' }) } } /** * 创建包含工具信息的系统提示词 * @param tools OpenAI 格式的工具列表 * @param serverName MCP 服务器名称 */ private createSystemPromptWithTools(tools: any[], serverName: string): string { console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') console.log('📝 [createSystemPromptWithTools] 开始生成 System Prompt') console.log(' - 服务器名称:', serverName) console.log(' - 工具数量:', tools.length) console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') const toolDescriptions = tools.map(tool => { const func = tool.function const params = func.parameters?.properties || {} const required = func.parameters?.required || [] console.log(` 工具: ${func.name}`) console.log(` 描述: ${func.description}`) // 生成参数描述 const paramDesc = Object.entries(params).map(([name, schema]: [string, any]) => { const isRequired = required.includes(name) const requiredMark = isRequired ? '[必填]' : '[可选]' return ` - ${name} ${requiredMark}: ${schema.description || schema.type}` }).join('\n') return `• ${func.name}\n 描述: ${func.description}\n 参数:\n${paramDesc || ' 无参数'}` }).join('\n\n') const systemPrompt = `你是一个智能助手,可以使用以下工具完成任务: ${toolDescriptions} 使用指南: 1. 当用户需要完成某个任务时,请分析哪个工具最合适 2. 如果需要发布内容(如文章、笔记等),请根据用户意图创作完整的内容 3. 为内容生成合适的标题、正文、标签等所有必需参数 4. 自动调用相应工具,将生成的内容作为参数传递 5. 根据工具执行结果,给用户友好的反馈 注意事项: - **标题必须控制在20字以内**(重要!超过会导致发布失败) - 保持内容质量和平台特色 - 标签要相关且有吸引力 - 分类要准确 - 如果工具执行失败,给出明确的错误说明和建议 当前连接的 MCP 服务器: ${serverName}` console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') console.log('📝 [createSystemPromptWithTools] === System Prompt 内容 ===') console.log(systemPrompt) console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') return systemPrompt } /** * 将 MCP 工具转换为 OpenAI 函数调用格式 * @param mcpTools MCP 工具列表 * @param serverName 服务器名称,用于工具名称前缀 */ private convertToolsToOpenAIFormat(mcpTools: any[], serverName: string): any[] { return mcpTools.map(tool => ({ type: 'function', function: { name: `${serverName}__${tool.name}`, // 添加服务器前缀避免冲突 description: tool.description || '', parameters: tool.inputSchema || { type: 'object', properties: {}, required: [] } } })) } /** * 执行工具调用并将结果返回给 AI */ private async executeToolCalls( conversation: Conversation, toolCalls: any[], mcpServerId: string, model: string | undefined, onChunk: (chunk: string) => void, tools?: any[] // 添加 tools 参数 ): Promise { console.log('🔧 [executeToolCalls] 执行', toolCalls.length, '个工具调用') // 添加工具调用信息到消息中 const toolCallMessage = { role: 'assistant' as const, content: '', tool_calls: toolCalls } // 执行每个工具调用 const toolResults = [] for (const toolCall of toolCalls) { try { const fullFunctionName = toolCall.function.name // 解析工具名称:serverName__toolName const toolName = fullFunctionName.includes('__') ? fullFunctionName.split('__')[1] : fullFunctionName const functionArgs = JSON.parse(toolCall.function.arguments) console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') console.log(`🔧 [executeToolCalls] 工具调用详情:`) console.log(` - 完整工具名: ${fullFunctionName}`) console.log(` - 提取工具名: ${toolName}`) console.log(` - MCP服务器ID: ${mcpServerId}`) console.log(` - 参数:`, JSON.stringify(functionArgs, null, 2)) console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━') onChunk(`\n\n🔧 正在调用工具: ${toolName}...\n`) const result = await this.mcpClient.callTool(mcpServerId, toolName, functionArgs) console.log(`✅ [executeToolCalls] 工具调用成功: ${toolName}`, result) onChunk(`✅ 工具执行完成\n`) toolResults.push({ tool_call_id: toolCall.id, role: 'tool', name: fullFunctionName, // 保持与 AI 调用时的名称一致 content: JSON.stringify(result) }) } catch (error) { console.error(`❌ [executeToolCalls] 工具调用失败:`, error) const errorMsg = error instanceof Error ? error.message : '未知错误' onChunk(`❌ 工具执行失败: ${errorMsg}\n`) toolResults.push({ tool_call_id: toolCall.id, role: 'tool', name: toolCall.function.name, content: JSON.stringify({ error: errorMsg }) }) } } // 将工具调用和结果添加到消息历史 const messages = conversation.messages .filter(m => m.status === 'success') .map(m => ({ role: m.role, content: m.content })) messages.push(toolCallMessage as any) messages.push(...(toolResults as any[])) // 获取已连接的服务 const allServices = modelServiceManager.getAllServices() const services = allServices.filter(s => s.status === 'connected') if (services.length === 0) { throw new Error('没有可用的模型服务') } let service = services[0] let selectedModel = model || service.models?.[0] || 'default' if (model) { const foundService = services.find(s => s.models && s.models.includes(model) ) if (foundService) { service = foundService selectedModel = model } } // 向 AI 发送工具结果,获取最终回复 console.log('🤖 [executeToolCalls] 将工具结果发送给 AI') console.log('🔧 [executeToolCalls] 继续传递工具列表:', tools?.length || 0, '个') onChunk('\n\n🤖 正在生成回复...\n') const result = await modelServiceManager.sendChatRequestStream( service.id, messages, selectedModel, onChunk, tools // ← 传递工具列表,让 AI 可以继续调用工具 ) // 递归处理:如果 AI 再次调用工具,继续执行 if (result.data?.toolCalls && result.data.toolCalls.length > 0) { console.log('🔁 [executeToolCalls] AI 再次调用工具,递归执行:', result.data.toolCalls.length, '个') await this.executeToolCalls(conversation, result.data.toolCalls, mcpServerId, model, onChunk, tools) } else { console.log('✅ [executeToolCalls] 工具调用链完成') } } /** * 获取所有服务(供外部使用) */ getAllServices() { return modelServiceManager.getAllServices() } } export const chatService = ChatService.getInstance()