first commit

This commit is contained in:
douboer
2025-10-14 14:18:20 +08:00
commit d93bc02772
66 changed files with 21393 additions and 0 deletions

237
src/server/LLMService.ts Normal file
View File

@@ -0,0 +1,237 @@
import OpenAI from 'openai';
import type { LLMConfig, Tool } from '../types/index.js';
export class LLMService {
private openai?: OpenAI;
private config?: LLMConfig;
/**
* 配置 LLM 服务
*/
configure(config: LLMConfig): void {
this.config = config;
if (config.provider === 'openai' && config.apiKey) {
this.openai = new OpenAI({
apiKey: config.apiKey,
baseURL: config.baseUrl
});
}
}
/**
* 根据用户输入和工具模式生成参数
*/
async generateParameters(
userInput: string,
tool: Tool
): Promise<Record<string, any>> {
if (!this.openai || !this.config?.enabled) {
throw new Error('LLM 服务未配置或未启用');
}
const systemPrompt = this.buildParameterGenerationPrompt(tool);
try {
const response = await this.openai.chat.completions.create({
model: this.config.model,
messages: [
{ role: 'system', content: systemPrompt },
{ role: 'user', content: userInput }
],
temperature: this.config.temperature || 0.1,
max_tokens: this.config.maxTokens || 1000,
response_format: { type: 'json_object' }
});
const content = response.choices[0]?.message?.content;
if (!content) {
throw new Error('LLM 未返回有效响应');
}
return JSON.parse(content);
} catch (error) {
if (error instanceof Error) {
throw new Error(`LLM 调用失败: ${error.message}`);
}
throw new Error('LLM 调用失败: 未知错误');
}
}
/**
* 分析用户意图并选择合适的工具
*/
async analyzeIntent(
userInput: string,
availableTools: Tool[]
): Promise<{
selectedTool?: string;
confidence: number;
reasoning: string;
suggestedParameters?: Record<string, any>;
}> {
if (!this.openai || !this.config?.enabled) {
return {
confidence: 0,
reasoning: 'LLM 服务未启用,请手动选择工具'
};
}
const systemPrompt = this.buildIntentAnalysisPrompt(availableTools);
try {
const response = await this.openai.chat.completions.create({
model: this.config.model,
messages: [
{ role: 'system', content: systemPrompt },
{ role: 'user', content: userInput }
],
temperature: this.config.temperature || 0.1,
max_tokens: this.config.maxTokens || 1000,
response_format: { type: 'json_object' }
});
const content = response.choices[0]?.message?.content;
if (!content) {
return {
confidence: 0,
reasoning: 'LLM 分析失败'
};
}
return JSON.parse(content);
} catch (error) {
console.error('LLM 意图分析失败:', error);
return {
confidence: 0,
reasoning: 'LLM 分析过程中出现错误'
};
}
}
/**
* 生成对话响应
*/
async generateResponse(
userInput: string,
context?: string
): Promise<string> {
if (!this.openai || !this.config?.enabled) {
throw new Error('LLM 服务未配置或未启用');
}
const messages: Array<{ role: 'system' | 'user' | 'assistant'; content: string }> = [
{
role: 'system',
content: `你是一个 MCP (Model Context Protocol) 客户端的智能助手。你可以帮助用户:
1. 理解和使用各种 MCP 服务器提供的工具
2. 分析工具执行结果并给出建议
3. 协助配置 MCP 服务器
请用友好、专业的语调回复用户。${context ? `\n\n当前上下文${context}` : ''}`
},
{ role: 'user', content: userInput }
];
try {
const response = await this.openai.chat.completions.create({
model: this.config.model,
messages,
temperature: this.config.temperature || 0.7,
max_tokens: this.config.maxTokens || 2000
});
return response.choices[0]?.message?.content || '抱歉,我无法生成回复。';
} catch (error) {
console.error('生成对话回复失败:', error);
throw new Error('生成回复失败,请稍后重试');
}
}
/**
* 构建参数生成提示
*/
private buildParameterGenerationPrompt(tool: Tool): string {
const properties = tool.inputSchema?.properties || {};
const required = tool.inputSchema?.required || [];
const propertiesDesc = Object.entries(properties)
.map(([key, prop]: [string, any]) => {
const isRequired = required.includes(key) ? ' (必需)' : ' (可选)';
const typeInfo = prop.type ? `类型: ${prop.type}` : '';
const enumInfo = prop.enum ? `可选值: ${prop.enum.join(', ')}` : '';
const desc = prop.description || '无描述';
return `- ${key}${isRequired}: ${desc}${typeInfo ? ` | ${typeInfo}` : ''}${enumInfo ? ` | ${enumInfo}` : ''}`;
})
.join('\n');
return `你是一个参数生成助手。根据用户的输入,为工具 "${tool.name}" 生成合适的参数。
工具描述: ${tool.description || '无描述'}
参数说明:
${propertiesDesc || '此工具无参数'}
要求:
1. 仔细分析用户输入,理解其真实意图
2. 为每个必需参数生成合理的值
3. 为相关的可选参数也生成适当的值
4. 如果无法确定某个参数的值,可以设置为 null 或合理的默认值
5. 返回标准的 JSON 对象格式
6. 确保生成的参数符合工具的要求
示例输出格式:
{
"parameter1": "value1",
"parameter2": "value2",
"parameter3": null
}`;
}
/**
* 构建意图分析提示
*/
private buildIntentAnalysisPrompt(tools: Tool[]): string {
const toolList = tools
.map(tool => `- ${tool.name}: ${tool.description || '无描述'}`)
.join('\n');
return `你是一个意图分析助手。分析用户输入,选择最合适的工具来完成用户的请求。
可用工具:
${toolList || '暂无可用工具'}
分析要求:
1. 仔细理解用户的真实意图和需求
2. 从可用工具中选择最匹配的工具
3. 评估匹配的置信度 (0-100数字越高表示越确定)
4. 提供详细的选择理由
5. 如果适用,预生成一些参数建议
返回 JSON 格式:
{
"selectedTool": "最匹配的工具名称如果没有合适的工具则为null",
"confidence": 85,
"reasoning": "选择这个工具的详细理由,或者为什么没有找到合适工具的原因",
"suggestedParameters": {
"param1": "建议的参数值",
"param2": "另一个参数值"
}
}`;
}
/**
* 检查服务是否可用
*/
isAvailable(): boolean {
return !!(this.openai && this.config?.enabled);
}
/**
* 获取当前配置
*/
getConfig(): LLMConfig | undefined {
return this.config;
}
}

279
src/server/MCPManager.ts Normal file
View File

@@ -0,0 +1,279 @@
import { SmartMCPClient } from '../../../dist/smart-client.js';
import type { MCPServerConfig, ServerCapabilities, Tool, Resource, Prompt } from '../types/index.js';
import { EventEmitter } from 'events';
import { randomUUID } from 'crypto';
export class MCPManager extends EventEmitter {
private servers: Map<string, SmartMCPClient> = new Map();
private configs: Map<string, MCPServerConfig> = new Map();
/**
* 添加新的 MCP 服务器
*/
async addServer(config: Omit<MCPServerConfig, 'id' | 'status'>): Promise<MCPServerConfig> {
const serverId = randomUUID();
const serverConfig: MCPServerConfig = {
...config,
id: serverId,
status: 'connecting'
};
this.configs.set(serverId, serverConfig);
this.emit('serverAdded', serverConfig);
try {
// 创建智能客户端
const client = new SmartMCPClient({
name: 'MCP-Client-Vue',
version: '1.0.0'
});
// 根据类型连接服务器
if (config.type === 'http') {
await client.connectHTTP(config.url);
} else if (config.type === 'websocket') {
await client.connectWebSocket(config.url);
} else {
throw new Error(`暂不支持的传输类型: ${config.type}`);
}
// 获取服务器能力
const capabilities: ServerCapabilities = {
tools: client.getAvailableTools() as Tool[],
resources: client.getAvailableResources() as Resource[],
prompts: client.getAvailablePrompts() as Prompt[]
};
// 更新配置状态
const updatedConfig: MCPServerConfig = {
...serverConfig,
status: 'connected',
capabilities
};
this.servers.set(serverId, client);
this.configs.set(serverId, updatedConfig);
this.emit('serverStatusChanged', serverId, 'connected');
console.log(`✅ 服务器 ${config.name} 连接成功`);
return updatedConfig;
} catch (error) {
console.error(`❌ 服务器 ${config.name} 连接失败:`, error);
const errorConfig: MCPServerConfig = {
...serverConfig,
status: 'error'
};
this.configs.set(serverId, errorConfig);
this.emit('serverStatusChanged', serverId, 'error');
this.emit('serverError', serverId, error);
throw error;
}
}
/**
* 移除服务器
*/
async removeServer(id: string): Promise<void> {
const client = this.servers.get(id);
if (client) {
await client.disconnect();
this.servers.delete(id);
}
this.configs.delete(id);
this.emit('serverRemoved', id);
}
/**
* 更新服务器配置
*/
async updateServer(id: string, updates: Partial<MCPServerConfig>): Promise<void> {
const config = this.configs.get(id);
if (!config) {
throw new Error(`服务器 ${id} 不存在`);
}
const updatedConfig = { ...config, ...updates };
this.configs.set(id, updatedConfig);
this.emit('serverUpdated', updatedConfig);
}
/**
* 调用工具
*/
async callTool(serverId: string, toolName: string, parameters: Record<string, any>): Promise<any> {
const client = this.servers.get(serverId);
if (!client) {
throw new Error(`服务器 ${serverId} 未连接`);
}
const result = await client.smartCallTool(toolName, parameters);
if (!result.success) {
throw new Error(result.error);
}
return result.content;
}
/**
* 读取资源
*/
async readResource(serverId: string, uri: string): Promise<any> {
const client = this.servers.get(serverId);
if (!client) {
throw new Error(`服务器 ${serverId} 未连接`);
}
const result = await client.smartReadResource(uri);
if (!result.success) {
throw new Error(result.error);
}
return result.contents;
}
/**
* 获取所有服务器配置
*/
getServerConfigs(): MCPServerConfig[] {
return Array.from(this.configs.values());
}
/**
* 获取单个服务器配置
*/
getServerConfig(id: string): MCPServerConfig | undefined {
return this.configs.get(id);
}
/**
* 测试服务器连接
*/
async testConnection(id: string): Promise<boolean> {
const client = this.servers.get(id);
if (!client || !client.isConnected()) {
return false;
}
try {
// 尝试获取工具列表来测试连接
client.getAvailableTools();
return true;
} catch {
return false;
}
}
/**
* 刷新服务器能力
*/
async refreshServer(id: string): Promise<void> {
const client = this.servers.get(id);
const config = this.configs.get(id);
if (!client || !config) {
throw new Error(`服务器 ${id} 不存在`);
}
try {
// 重新发现能力
await client.discoverCapabilities();
const capabilities: ServerCapabilities = {
tools: client.getAvailableTools() as Tool[],
resources: client.getAvailableResources() as Resource[],
prompts: client.getAvailablePrompts() as Prompt[]
};
const updatedConfig = {
...config,
capabilities
};
this.configs.set(id, updatedConfig);
this.emit('serverUpdated', updatedConfig);
} catch (error) {
console.error(`刷新服务器 ${id} 失败:`, error);
throw error;
}
}
/**
* 启用/禁用服务器
*/
async toggleServer(id: string, enabled: boolean): Promise<void> {
await this.updateServer(id, { enabled });
}
/**
* 启用/禁用工具
*/
async toggleTool(serverId: string, toolName: string, enabled: boolean): Promise<void> {
const config = this.configs.get(serverId);
if (!config || !config.capabilities) {
return;
}
const tool = config.capabilities.tools.find(t => t.name === toolName);
if (tool) {
tool.enabled = enabled;
this.emit('serverUpdated', config);
}
}
/**
* 设置工具自动批准
*/
async toggleAutoApprove(serverId: string, toolName: string, autoApprove: boolean): Promise<void> {
const config = this.configs.get(serverId);
if (!config || !config.capabilities) {
return;
}
const tool = config.capabilities.tools.find(t => t.name === toolName);
if (tool) {
tool.autoApprove = autoApprove;
this.emit('serverUpdated', config);
}
}
/**
* 获取所有已连接的服务器
*/
getConnectedServers(): MCPServerConfig[] {
return this.getServerConfigs().filter(config => config.status === 'connected');
}
/**
* 获取所有可用工具
*/
getAllAvailableTools(): Array<{ serverId: string; serverName: string; tools: any[] }> {
return this.getConnectedServers()
.filter(server => server.capabilities?.tools.length)
.map(server => ({
serverId: server.id,
serverName: server.name,
tools: server.capabilities!.tools.filter(tool => tool.enabled !== false)
}));
}
/**
* 断开所有服务器
*/
async disconnectAll(): Promise<void> {
const disconnectPromises = Array.from(this.servers.entries()).map(async ([id, client]) => {
try {
await client.disconnect();
} catch (error) {
console.error(`断开服务器 ${id} 时出错:`, error);
}
});
await Promise.all(disconnectPromises);
this.servers.clear();
this.configs.clear();
}
}

306
src/server/index.ts Normal file
View File

@@ -0,0 +1,306 @@
import express from 'express';
import cors from 'cors';
import { createServer } from 'http';
import { Server as SocketServer } from 'socket.io';
import path from 'path';
import { fileURLToPath } from 'url';
import { MCPManager } from './MCPManager.js';
import { LLMService } from './LLMService.js';
import type { MCPServerConfig, LLMConfig, APIResponse } from '../types/index.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const app = express();
const server = createServer(app);
const io = new SocketServer(server, {
cors: {
origin: "http://localhost:5173", // Vite 开发服务器
methods: ["GET", "POST"]
}
});
// 初始化服务
const mcpManager = new MCPManager();
const llmService = new LLMService();
// 中间件
app.use(cors());
app.use(express.json());
app.use(express.static(path.join(__dirname, '../../web/dist')));
// API 路由
// 服务器管理
app.get('/api/servers', async (req, res) => {
try {
const servers = mcpManager.getServerConfigs();
res.json({ success: true, data: servers } as APIResponse);
} catch (error) {
console.error('获取服务器列表失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '未知错误'
} as APIResponse);
}
});
app.post('/api/servers', async (req, res) => {
try {
const serverConfig: Omit<MCPServerConfig, 'id' | 'status'> = req.body;
const result = await mcpManager.addServer(serverConfig);
res.json({ success: true, data: result } as APIResponse);
} catch (error) {
console.error('添加服务器失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '添加服务器失败'
} as APIResponse);
}
});
app.delete('/api/servers/:id', async (req, res) => {
try {
await mcpManager.removeServer(req.params.id);
res.json({ success: true } as APIResponse);
} catch (error) {
console.error('删除服务器失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '删除服务器失败'
} as APIResponse);
}
});
app.put('/api/servers/:id', async (req, res) => {
try {
const updates = req.body;
await mcpManager.updateServer(req.params.id, updates);
res.json({ success: true } as APIResponse);
} catch (error) {
console.error('更新服务器失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '更新服务器失败'
} as APIResponse);
}
});
app.post('/api/servers/:id/refresh', async (req, res) => {
try {
await mcpManager.refreshServer(req.params.id);
res.json({ success: true } as APIResponse);
} catch (error) {
console.error('刷新服务器失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '刷新服务器失败'
} as APIResponse);
}
});
// 工具调用
app.post('/api/servers/:id/tools/:toolName/call', async (req, res) => {
try {
const { id, toolName } = req.params;
const { parameters } = req.body;
const result = await mcpManager.callTool(id, toolName, parameters);
res.json({ success: true, data: result } as APIResponse);
} catch (error) {
console.error('工具调用失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '工具调用失败'
} as APIResponse);
}
});
// 资源读取
app.get('/api/servers/:id/resources', async (req, res) => {
try {
const { id } = req.params;
const { uri } = req.query;
if (!uri || typeof uri !== 'string') {
return res.status(400).json({
success: false,
error: '缺少资源 URI'
} as APIResponse);
}
const result = await mcpManager.readResource(id, uri);
res.json({ success: true, data: result } as APIResponse);
} catch (error) {
console.error('资源读取失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '资源读取失败'
} as APIResponse);
}
});
// LLM 服务
app.post('/api/llm/configure', async (req, res) => {
try {
const config: LLMConfig = req.body;
llmService.configure(config);
res.json({ success: true } as APIResponse);
} catch (error) {
console.error('配置 LLM 失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '配置 LLM 失败'
} as APIResponse);
}
});
app.post('/api/llm/analyze-intent', async (req, res) => {
try {
const { userInput, availableTools } = req.body;
const result = await llmService.analyzeIntent(userInput, availableTools);
res.json({ success: true, data: result } as APIResponse);
} catch (error) {
console.error('意图分析失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '意图分析失败'
} as APIResponse);
}
});
app.post('/api/llm/generate-parameters', async (req, res) => {
try {
const { userInput, tool } = req.body;
const result = await llmService.generateParameters(userInput, tool);
res.json({ success: true, data: result } as APIResponse);
} catch (error) {
console.error('参数生成失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '参数生成失败'
} as APIResponse);
}
});
app.get('/api/llm/status', async (req, res) => {
try {
const isAvailable = llmService.isAvailable();
const config = llmService.getConfig();
res.json({
success: true,
data: {
available: isAvailable,
config: config ? { ...config, apiKey: config.apiKey ? '***' : undefined } : null
}
} as APIResponse);
} catch (error) {
console.error('获取 LLM 状态失败:', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : '获取 LLM 状态失败'
} as APIResponse);
}
});
// 健康检查
app.get('/api/health', (req, res) => {
res.json({
success: true,
data: {
status: 'healthy',
timestamp: new Date().toISOString(),
servers: mcpManager.getServerConfigs().length,
connected: mcpManager.getConnectedServers().length
}
} as APIResponse);
});
// 服务所有其他路由到 Vue 应用
app.get('*', (req, res) => {
res.sendFile(path.join(__dirname, '../../web/dist/index.html'));
});
// Socket.IO 实时通信
io.on('connection', (socket) => {
console.log('🔌 客户端连接:', socket.id);
// 订阅服务器状态变化
const statusChangeHandler = (serverId: string, status: string) => {
socket.emit('serverStatusChanged', { serverId, status });
};
const serverAddedHandler = (server: MCPServerConfig) => {
socket.emit('serverAdded', server);
};
const serverRemovedHandler = (serverId: string) => {
socket.emit('serverRemoved', serverId);
};
const serverUpdatedHandler = (server: MCPServerConfig) => {
socket.emit('serverUpdated', server);
};
mcpManager.on('serverStatusChanged', statusChangeHandler);
mcpManager.on('serverAdded', serverAddedHandler);
mcpManager.on('serverRemoved', serverRemovedHandler);
mcpManager.on('serverUpdated', serverUpdatedHandler);
// 处理工具调用请求
socket.on('callTool', async (data) => {
try {
const { serverId, toolName, parameters, requestId } = data;
const result = await mcpManager.callTool(serverId, toolName, parameters);
socket.emit('toolCallResult', {
success: true,
data: result,
requestId
});
} catch (error) {
socket.emit('toolCallResult', {
success: false,
error: error instanceof Error ? error.message : '工具调用失败',
requestId: data.requestId
});
}
});
socket.on('disconnect', () => {
console.log('🔌 客户端断开:', socket.id);
// 清理事件监听器
mcpManager.off('serverStatusChanged', statusChangeHandler);
mcpManager.off('serverAdded', serverAddedHandler);
mcpManager.off('serverRemoved', serverRemovedHandler);
mcpManager.off('serverUpdated', serverUpdatedHandler);
});
});
// 启动服务器
const PORT = process.env.PORT || 3100;
server.listen(PORT, () => {
console.log(`🚀 MCP Vue 客户端服务器启动: http://localhost:${PORT}`);
console.log(`📱 前端开发服务器: http://localhost:5173`);
console.log(`🔧 API 端点: http://localhost:${PORT}/api`);
});
// 优雅关闭
const gracefulShutdown = async () => {
console.log('🛑 正在关闭服务器...');
try {
await mcpManager.disconnectAll();
console.log('✅ 所有 MCP 连接已关闭');
} catch (error) {
console.error('❌ 关闭 MCP 连接时出错:', error);
}
server.close(() => {
console.log('✅ HTTP 服务器已关闭');
process.exit(0);
});
};
process.on('SIGINT', gracefulShutdown);
process.on('SIGTERM', gracefulShutdown);
export { mcpManager, llmService };

105
src/types/index.ts Normal file
View File

@@ -0,0 +1,105 @@
export interface MCPServerConfig {
id: string;
name: string;
version?: string;
url: string;
type: 'http' | 'websocket' | 'sse';
enabled: boolean;
description?: string;
status: 'connected' | 'disconnected' | 'connecting' | 'error';
capabilities?: ServerCapabilities;
settings?: {
autoConnect?: boolean;
retryAttempts?: number;
timeout?: number;
};
}
export interface ServerCapabilities {
tools: Tool[];
resources: Resource[];
prompts: Prompt[];
}
export interface Tool {
name: string;
description?: string;
inputSchema?: {
type: 'object';
properties?: Record<string, any>;
required?: string[];
};
enabled?: boolean;
autoApprove?: boolean;
}
export interface ToolParameter {
type: string;
description?: string;
enum?: string[];
default?: any;
format?: string;
}
export interface Resource {
uri: string;
name?: string;
description?: string;
mimeType?: string;
}
export interface Prompt {
name: string;
description?: string;
arguments?: Array<{
name: string;
type?: string;
description?: string;
required?: boolean;
}>;
}
export interface LLMConfig {
provider: 'openai' | 'claude' | 'ollama' | 'custom';
model: string;
apiKey?: string;
baseUrl?: string;
enabled: boolean;
temperature?: number;
maxTokens?: number;
}
export interface ChatMessage {
id: string;
role: 'user' | 'assistant' | 'system';
content: string;
timestamp: Date;
toolCalls?: ToolCall[];
serverId?: string;
}
export interface ToolCall {
id: string;
toolName: string;
serverId: string;
parameters: Record<string, any>;
result?: any;
error?: string;
status: 'pending' | 'success' | 'error';
}
export interface AppConfig {
servers: MCPServerConfig[];
llm: LLMConfig;
ui: {
theme: 'light' | 'dark' | 'auto';
language: 'zh-CN' | 'en-US';
compactMode: boolean;
};
}
export interface APIResponse<T = any> {
success: boolean;
data?: T;
error?: string;
}