diff --git a/mcp_center/mcp_config/change.py b/mcp_center/mcp_config/change.py new file mode 100644 index 0000000000000000000000000000000000000000..cec514e9fe5c2639d12ac419344b60e2f460b896 --- /dev/null +++ b/mcp_center/mcp_config/change.py @@ -0,0 +1,67 @@ +import json +import toml + +def json_to_toml(json_data, toml_file_path, top_level_key="data"): + """ + 将JSON数据转换为TOML格式并写入文件 + + 参数: + json_data: JSON数据,可以是字典、列表或JSON字符串 + toml_file_path: 输出的TOML文件路径 + top_level_key: 当输入为列表时,用于包装列表的顶级键名 + """ + try: + # 如果输入是JSON字符串,则先解析为Python对象 + if isinstance(json_data, str): + data = json.loads(json_data) + else: + data = json_data + + # TOML不支持顶级列表,需要包装在字典中 + if isinstance(data, list): + data = {top_level_key: data} + + # 将数据转换为TOML格式并写入文件 + with open(toml_file_path, 'w', encoding='utf-8') as f: + toml.dump(data, f) + + print(f"成功将JSON数据转换为TOML并写入文件: {toml_file_path}") + return True + + except json.JSONDecodeError as e: + print(f"JSON解析错误: {e}") + except Exception as e: + print(f"转换过程中发生错误: {e}") + return False + +if __name__ == "__main__": + # 示例JSON数据(列表形式) + sample_json = [ + { + "appType":"agent", + "name":"hce运维助手", + "description":"hce运维助手,用于诊断hce环境和执行shell命令", + "mcpPath":[ + "remote_info_mcp", + "shell_generator_mcp" + ], + "published":True + } + ] + + # 转换并写入TOML文件 + # 对于列表数据,指定一个顶级键名(如"applications")使其符合TOML格式要求 + json_to_toml(sample_json, "mcp_to_app_config.toml", "applications") + + # 测试字典类型的JSON数据 + dict_json = { + "name": "测试", + "version": "1.0.0", + "features": ["简单", "易用"] + } + json_to_toml(dict_json, "from_dict.toml") + + # 测试JSON字符串 + json_str = '{"name": "字符串测试", "version": "2.0.0"}' + json_to_toml(json_str, "from_string.toml") + \ No newline at end of file diff --git a/mcp_center/mcp_config/mcp_server_mcp/config.json b/mcp_center/mcp_config/mcp_server_mcp/config.json new file mode 100644 index 0000000000000000000000000000000000000000..627e246ccf0dc556bd0b8e571dbe594c610dfc85 --- /dev/null +++ b/mcp_center/mcp_config/mcp_server_mcp/config.json @@ -0,0 +1,15 @@ +{ + "mcpServers": { + "mcp_server": { + "headers": {}, + "autoApprove": [], + "autoInstall": false, + "timeout": 60, + "url": "http://127.0.0.1:12555/sse" + } + }, + "name": "mcptool集成管理工具", + "overview": "定制化的配置自己tool", + "description": "定制化的配置自己tool", + "mcpType": "sse", +} \ No newline at end of file diff --git a/mcp_center/mcp_config/mcp_to_app_config.toml b/mcp_center/mcp_config/mcp_to_app_config.toml new file mode 100644 index 0000000000000000000000000000000000000000..4f4d7a2218768865b43a18e1da3182f6505e759f --- /dev/null +++ b/mcp_center/mcp_config/mcp_to_app_config.toml @@ -0,0 +1,9 @@ +[[applications]] +appType = "agent" +name = "OE-智能运维助手" +description = "提供通用系统运维能力,含网络监控、性能分析、硬件信息查询、存储管理等功能" +mcpPath = [ + "remote_info_mcp", + "mcp_server_mcp", +] +published = true diff --git a/mcp_center/mcp_config/remote_info_mcp/config.json b/mcp_center/mcp_config/remote_info_mcp/config.json index b52fd80d54a4c2258d01056062b12f81bddbcbe4..cf7eb8ce68cf411c945db585b3291be2fd73583e 100644 --- a/mcp_center/mcp_config/remote_info_mcp/config.json +++ b/mcp_center/mcp_config/remote_info_mcp/config.json @@ -1,9 +1,15 @@ { + "mcpServers": { + "mcp_server": { + "headers": {}, + "autoApprove": [], + "autoInstall": false, + "timeout": 60, + "url": "http://127.0.0.1:12100/sse" + } + }, "name": "端侧信息收集工具", "overview": "端侧信息收集工具", "description": "端侧信息收集工具", - "mcpType": "sse", - "config": { - "url": "http://127.0.0.1:12100/sse" - } + "mcpType": "sse" } \ No newline at end of file diff --git a/oe-cli-mcp-server/.gitignore b/mcp_center/servers/oe-cli-mcp-server/.gitignore similarity index 100% rename from oe-cli-mcp-server/.gitignore rename to mcp_center/servers/oe-cli-mcp-server/.gitignore diff --git a/oe-cli-mcp-server/README.en.md b/mcp_center/servers/oe-cli-mcp-server/README.en.md similarity index 100% rename from oe-cli-mcp-server/README.en.md rename to mcp_center/servers/oe-cli-mcp-server/README.en.md diff --git a/oe-cli-mcp-server/README.md b/mcp_center/servers/oe-cli-mcp-server/README.md similarity index 92% rename from oe-cli-mcp-server/README.md rename to mcp_center/servers/oe-cli-mcp-server/README.md index 029f872ca685ab1fb7abda847b2365faa2df7ec1..4dc845de5dd83d39d30d3e8cf4a322465da79cd7 100644 --- a/oe-cli-mcp-server/README.md +++ b/mcp_center/servers/oe-cli-mcp-server/README.md @@ -17,7 +17,7 @@ Gitee 是 OSCHINA 推出的基于 Git 的代码托管平台(同时支持 SVN #### 使用说明 -1. export PYTHONPATH=/home/tsn/oe-cli-mcp-server:$PYTHONPATH +1. export PYTHONPATH=/usr/lib/euler-copilot-framework/mcp_center/servers/oe-cli-mcp-server/:$PYTHONPATH 2. xxxx 3. xxxx diff --git a/oe-cli-mcp-server/client/client.py b/mcp_center/servers/oe-cli-mcp-server/client/client.py similarity index 95% rename from oe-cli-mcp-server/client/client.py rename to mcp_center/servers/oe-cli-mcp-server/client/client.py index 69e390c4259cde2b7ff48e49c2dfa80f1aa56c62..ed2b13ffcfa4a883eef1dcb49efdf8bb49694036 100644 --- a/oe-cli-mcp-server/client/client.py +++ b/mcp_center/servers/oe-cli-mcp-server/client/client.py @@ -4,11 +4,13 @@ import asyncio import logging from contextlib import AsyncExitStack -from typing import Union +from typing import TYPE_CHECKING, Union +from pydantic import BaseModel, Field from enum import Enum -from mcp import ClientSession +from mcp import ClientSession, StdioServerParameters from mcp.client.sse import sse_client -from numpy.f2py.crackfortran import reset_global_f2py_vars +from mcp.client.stdio import stdio_client + logger = logging.getLogger(__name__) @@ -32,7 +34,7 @@ class MCPClient: self.status = MCPStatus.UNINITIALIZED async def _main_loop( - self + self ) -> None: """ 创建MCP Client @@ -122,7 +124,6 @@ async def main() -> None: headers = {} client = MCPClient(url, headers) await client.init() - #result = await client.call_tool("list_knowledge_bases", {}) result = await client.call_tool("nvidia_smi_status", {}) print(result) await client.stop() diff --git a/mcp_center/servers/oe-cli-mcp-server/mcp-server.service b/mcp_center/servers/oe-cli-mcp-server/mcp-server.service new file mode 100644 index 0000000000000000000000000000000000000000..9e36464cb0a6e31e29f646babb948600819b0789 --- /dev/null +++ b/mcp_center/servers/oe-cli-mcp-server/mcp-server.service @@ -0,0 +1,33 @@ +[Unit] +Description=MCP Tool Registration Service +After=network.target +After=multi-user.target +# 新增:确保文件系统就绪(避免工作目录未挂载) +RequiresMountsFor=/home/tsn/oe-cli-mcp-server + +[Service] +User=root +Group=root +# 工作目录:必须是项目根目录(已正确配置,确保 server.py 中相对路径生效) +WorkingDirectory=/usr/lib/euler-copilot-framework/mcp_center/servers/oe-cli-mcp-server/ + +Environment="PATH=/usr/lib/euler-copilot-framework/mcp_center/servers/oe-cli-mcp-server/venv/global/bin:$PATH" + +ExecStart=/usr/lib/euler-copilot-framework/mcp_center/servers/oe-cli-mcp-server/venv/global/bin/python /usr/lib/euler-copilot-framework/mcp_center/servers/oe-cli-mcp-server/mcp_server/server.py + +# 原有合理配置保留 +Restart=always +RestartSec=5 +KillMode=control-group +Environment="LANGUAGE=zh" +Environment="LOG_LEVEL=INFO" +StandardOutput=journal+console +StandardError=journal+console + +# 新增:限制服务资源(可选,避免占用过多CPU/内存) +LimitCPU=4 +LimitMEMLOCK=infinity +LimitNOFILE=65535 + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/oe-cli-mcp-server/mcp_server/MCP_TOOLS.json b/mcp_center/servers/oe-cli-mcp-server/mcp_server/MCP_TOOLS.json similarity index 100% rename from oe-cli-mcp-server/mcp_server/MCP_TOOLS.json rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/MCP_TOOLS.json diff --git a/oe-cli-mcp-server/mcp_server/cli.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/cli.py similarity index 100% rename from oe-cli-mcp-server/mcp_server/cli.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/cli.py diff --git a/oe-cli-mcp-server/mcp_server/cli/__init__.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/cli/__init__.py similarity index 100% rename from oe-cli-mcp-server/mcp_server/cli/__init__.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/cli/__init__.py diff --git a/oe-cli-mcp-server/mcp_server/cli/handle.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/cli/handle.py similarity index 74% rename from oe-cli-mcp-server/mcp_server/cli/handle.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/cli/handle.py index ef2838aaca5286c91f435d6825d44571166b6bb4..2a52caa1d6a63e81db61a0da80176197b96c7fad 100644 --- a/oe-cli-mcp-server/mcp_server/cli/handle.py +++ b/mcp_center/servers/oe-cli-mcp-server/mcp_server/cli/handle.py @@ -2,16 +2,43 @@ import logging import os import subprocess import toml +import requests # 新增:导入 requests 库(用于 HTTP 调用) from mcp_tools.tool_type import ToolType from util.get_project_root import get_project_root from util.test_llm_valid import is_llm_config_valid -from .socket import send_socket_request + +# 新增:FastAPI 服务地址(和 api_server.py 配置一致,端口 12556) +FASTAPI_BASE_URL = "http://127.0.0.1:12556" # 路径配置(直接硬编码,简化) PUBLIC_CONFIG_PATH = os.path.join(get_project_root(), "config/public_config.toml") logger = logging.getLogger(__name__) +# 新增:替代 send_socket_request 的 HTTP 调用函数 +def send_http_request(action: str, params: dict = None): + """调用 FastAPI 接口(替代原 Socket 调用)""" + try: + if action == "add": + url = f"{FASTAPI_BASE_URL}/tool/add" + response = requests.post(url, params=params) + elif action == "remove": + url = f"{FASTAPI_BASE_URL}/tool/remove" + response = requests.post(url, params=params) + elif action == "list": + url = f"{FASTAPI_BASE_URL}/tool/list" + response = requests.get(url) + elif action == "init": + url = f"{FASTAPI_BASE_URL}/tool/init" + response = requests.post(url) + else: + return {"success": False, "message": f"不支持的操作:{action}"} + + response.raise_for_status() # 抛出 HTTP 错误(如 404、500) + return response.json() + except requests.exceptions.RequestException as e: + return {"success": False, "message": f"接口调用失败:{str(e)}"} + # -------------------------- 工具包操作 -------------------------- def handle_add(pkg_input): """处理 -add 命令""" @@ -26,7 +53,8 @@ def handle_add(pkg_input): print(f"❌ 不支持的包类型:{pkg_input}") raise SystemExit(1) - result = send_socket_request("add", params) + # 替换:send_socket_request → send_http_request + result = send_http_request("add", params) print(f"✅ {result['message']}" if result["success"] else f"❌ {result['message']}") return result["success"] @@ -37,25 +65,29 @@ def handle_remove(pkg_input): params = {"type": "system" if pkg_input in type_map else "custom", "value": type_map.get(pkg_input, pkg_input)} - result = send_socket_request("remove", params) + # 替换:send_socket_request → send_http_request + result = send_http_request("remove", params) print(f"✅ {result['message']}" if result["success"] else f"❌ {result['message']}") return result["success"] def handle_tool(): """处理 -tool 命令""" - result = send_socket_request("list") + # 替换:send_socket_request → send_http_request + result = send_http_request("list") if not result["success"]: print(f"❌ {result['message']}") return False - print(f"\n📋 当前已加载工具包(共{result['total']}个):") - for pkg, funcs in result["pkg_funcs"].items(): + # 注意:FastAPI 接口返回的结构是 data.pkg_funcs 和 data.total_packages(需调整) + print(f"\n📋 当前已加载工具包(共{result['data']['total_packages']}个):") + for pkg, funcs in result["data"]["pkg_funcs"].items(): print(f"- {pkg}:{len(funcs)}个工具 → {', '.join(funcs)}") return True def handle_init(): """处理 -init 命令""" - result = send_socket_request("init") + # 替换:send_socket_request → send_http_request + result = send_http_request("init") print(f"✅ {result['message']}" if result["success"] else f"❌ {result['message']}") return result["success"] diff --git a/oe-cli-mcp-server/mcp_server/cli/parse_args.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/cli/parse_args.py similarity index 100% rename from oe-cli-mcp-server/mcp_server/cli/parse_args.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/cli/parse_args.py diff --git a/oe-cli-mcp-server/mcp_server/dependency.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/dependency.py similarity index 100% rename from oe-cli-mcp-server/mcp_server/dependency.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/dependency.py diff --git a/oe-cli-mcp-server/mcp_server/manager/manager.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/manager/manager.py similarity index 100% rename from oe-cli-mcp-server/mcp_server/manager/manager.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/manager/manager.py diff --git a/oe-cli-mcp-server/mcp_server/manager/package_loader.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/manager/package_loader.py similarity index 100% rename from oe-cli-mcp-server/mcp_server/manager/package_loader.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/manager/package_loader.py diff --git a/oe-cli-mcp-server/mcp_server/manager/package_unloader.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/manager/package_unloader.py similarity index 100% rename from oe-cli-mcp-server/mcp_server/manager/package_unloader.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/manager/package_unloader.py diff --git a/oe-cli-mcp-server/mcp_server/manager/tool_repository.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/manager/tool_repository.py similarity index 100% rename from oe-cli-mcp-server/mcp_server/manager/tool_repository.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/manager/tool_repository.py diff --git a/oe-cli-mcp-server/mcp_server/mcp_manager.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/mcp_manager.py similarity index 53% rename from oe-cli-mcp-server/mcp_server/mcp_manager.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/mcp_manager.py index 50ade9fe70712fd9f124bfa4a7b0f7fe86d10e3b..62fd8b64a82ef07291081c2cf3017ff6a2b746eb 100644 --- a/oe-cli-mcp-server/mcp_server/mcp_manager.py +++ b/mcp_center/servers/oe-cli-mcp-server/mcp_server/mcp_manager.py @@ -1,18 +1,18 @@ # mcp_server/server.py -import json import os.path -import threading from functools import wraps -from typing import Dict, Any, Optional from mcp.server import FastMCP from config.base_config_loader import BaseConfig from mcp_server.manager.manager import ToolManager, logger from mcp_tools.tool_type import ToolType from util.get_project_root import get_project_root from util.zip_tool_util import unzip_tool +import threading +import uvicorn +from fastapi import FastAPI # -------------------------- 导入独立的 FastAPI 启动函数 -------------------------- -from mcp_server.api_server import start_fastapi_server + PUBLIC_CONFIG_PATH = os.path.join(get_project_root(), "config/public_config.toml") PERSIST_FILE = os.path.join(get_project_root(), "data/tool_state.json") @@ -49,6 +49,8 @@ class McpServer(ToolManager): self.port = port self.language = BaseConfig().get_config().public_config.language self.PERSIST_FILE = PERSIST_FILE + self.fastapi_app = None # 存储 FastAPI 实例 + self.fastapi_thread = None # 存储 FastAPI 线程 # -------------------------- 原有核心业务方法不变 -------------------------- def _mcp_register(self, packages=None): @@ -138,14 +140,110 @@ class McpServer(ToolManager): self._reset() self.start() - # -------------------------- 简化 start 方法:调用独立的 FastAPI 启动函数 -------------------------- def start(self): - # 1. 启动独立的 FastAPI 服务(线程不阻塞) - start_fastapi_server(host="0.0.0.0", port=8003) - # 2. 启动 FastMCP 主服务(原有逻辑不变) + + # 1. 先初始化 MCP 核心逻辑(确保 self.list_packages 等方法可用) self._reset() + logger.info(f"MCP 实例初始化完成,已加载 {len(self.list_packages())} 个工具包") + + # 2. 启动 FastAPI 服务(直接调用实例方法,绑定 self) + self._start_fastapi(host="0.0.0.0", port=12556) + + # 3. 最后启动 FastMCP 主服务(阻塞主线程) + logger.info("启动 FastMCP 主服务...") self.mcp.run(transport='sse') + # -------------------------- 简化 start 方法:调用独立的 FastAPI 启动函数 -------------------------- + + def _create_fastapi_app(self): + """创建 FastAPI 应用(内部方法,绑定当前实例 self)""" + app = FastAPI(title="MCP Tool API", version="1.0") + + # -------------------------- FastAPI 接口(完全对齐 Socket 版本逻辑)-------------------------- + @app.get("/tool/list", summary="查询所有已加载工具包") + def list_tools(): + """查询工具包(直接用 self 调用 list_packages/list_funcs,与 Socket list 逻辑一致)""" + try: + # 直接用 self 调用实例方法(和 Socket 版本 _exec_socket_action 的 list 逻辑完全一致) + pkg_funcs = {} + for pkg in self.list_packages(): + pkg_funcs[pkg] = self.list_funcs(pkg) + return { + "success": True, + "data": { + "pkg_funcs": pkg_funcs, + "total_packages": len(pkg_funcs) # 适配 handle.py 预期的返回结构 + } + } + except Exception as e: + logger.error(f"查询工具包失败:{str(e)}", exc_info=True) + return {"success": False, "message": f"查询失败:{str(e)}"} + + @app.post("/tool/add", summary="添加工具包") + def add_tool(type: str, value: str): + """添加系统/自定义工具包(与 Socket add 逻辑完全一致)""" + try: + # 完全对齐 _exec_socket_action 的 add 逻辑 + if type == "system": + self.load(ToolType(value)) # 系统包:按 ToolType 加载 + else: # custom 类型 + self.load(value) # 自定义包:按 zip 路径加载 + return {"success": True, "message": f"新增{value}成功"} + except Exception as e: + logger.error(f"添加工具包 {value} 失败:{str(e)}", exc_info=True) + return {"success": False, "message": f"添加失败:{str(e)}"} + + @app.post("/tool/remove", summary="删除工具包") + def remove_tool(type: str, value: str): + """删除工具包(与 Socket remove 逻辑完全一致,重启后生效)""" + try: + # 完全对齐 _exec_socket_action 的 remove 逻辑 + if type == "system": + self.remove(ToolType(value)) # 系统包:按 ToolType 删除 + else: # custom 类型 + self.remove(value) # 自定义包:按包名/路径删除 + return {"success": True, "message": f"删除{value}成功,重启后生效"} + except Exception as e: + logger.error(f"删除工具包 {value} 失败:{str(e)}", exc_info=True) + return {"success": False, "message": f"删除失败:{str(e)}"} + + @app.post("/tool/init", summary="初始化工具包") + def init_tool(): + """初始化工具包(与 Socket init 逻辑完全一致,仅保留基础运维包)""" + try: + self.init() # 直接调用实例的 init 方法(和 Socket 版本一致) + return {"success": True, "message": "初始化成功(仅保留基础运维包)"} + except Exception as e: + logger.error(f"初始化工具包失败:{str(e)}", exc_info=True) + return {"success": False, "message": f"初始化失败:{str(e)}"} + + return app + + def _start_fastapi(self, host: str = "0.0.0.0", port: int = 12556): + """启动 FastAPI 服务(实例方法,直接绑定 self)""" + # 1. 创建 FastAPI 应用(绑定当前实例) + self.fastapi_app = self._create_fastapi_app() + logger.info(f"FastAPI 应用创建完成,接口文档:http://{host}:{port}/docs") + + # 2. 定义线程内运行的服务逻辑 + def run_server(): + try: + uvicorn.run( + self.fastapi_app, + host=host, + port=port, + log_level="warning", + access_log=False + ) + except Exception as e: + logger.error(f"FastAPI 服务启动失败:{str(e)}", exc_info=True) + + # 3. 启动独立线程(不阻塞主服务) + self.fastapi_thread = threading.Thread(target=run_server, daemon=True) + self.fastapi_thread.start() + logger.info(f"FastAPI 服务线程启动成功(线程ID:{self.fastapi_thread.ident})") + + # -------------------------- 启动服务(原有逻辑不变)-------------------------- if __name__ == "__main__": config = BaseConfig().get_config().public_config diff --git a/oe-cli-mcp-server/mcp_server/server.py b/mcp_center/servers/oe-cli-mcp-server/mcp_server/server.py old mode 100755 new mode 100644 similarity index 100% rename from oe-cli-mcp-server/mcp_server/server.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_server/server.py diff --git a/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/base.py b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/base.py new file mode 100644 index 0000000000000000000000000000000000000000..136b114245fc9ffc9110460eb193ca459b64b823 --- /dev/null +++ b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/base.py @@ -0,0 +1,158 @@ +import subprocess +from typing import Any, Dict, Optional +import paramiko +from config.base_config_loader import LanguageEnum + + +def _format_gpu_info(raw_info: Dict[str, Any], + host: str, include_processes: bool, lang: Optional[LanguageEnum]) -> Dict[str, Any]: + """格式化输出(双语适配描述)""" + no_process_msg = "未开启进程查询(需设置include_processes=True)" if lang == LanguageEnum.ZH else "Process query not enabled (set include_processes=True)" + return { + "host": host, + "gpu_count": len(raw_info["gpu"]), + "gpu_details": raw_info["gpu"], + "include_processes": include_processes, + "process_details": raw_info["processes"] if include_processes else no_process_msg + } + +def _get_local_gpu_status(gpu_index: Optional[int], include_processes: bool, lang: Optional[LanguageEnum]) -> Dict[str, Any]: + """本地GPU查询(双语错误提示)""" + try: + # 构建基础查询命令 + cmd = "/usr/bin/nvidia-smi --query-gpu=index,name,memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits" + if gpu_index is not None: + cmd += f" -i {gpu_index}" + + # 执行本地命令 + result = subprocess.run( + cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" + ) + + # 解析GPU信息 + gpu_info = [] + for line in result.stdout.strip().split('\n'): + if not line: + continue + try: + idx, name, mem_used, mem_total, gpu_util = line.split(', ') + gpu_info.append({ + "index": int(idx), + "name": name.strip(), + "memory_used": int(mem_used), + "memory_total": int(mem_total), + "gpu_utilization": int(gpu_util) + }) + except ValueError: + # 解析行数据失败(双语提示) + warn_msg = f"跳过无效的GPU信息行: {line}" if lang == LanguageEnum.ZH else f"Skipping invalid GPU info line: {line}" + print(warn_msg) # 或使用logger + continue + + # 处理进程信息 + proc_info = [] + if include_processes: + proc_cmd = "/usr/bin/nvidia-smi --query-compute-apps=pid,name,used_memory --format=csv,noheader,nounits" + try: + proc_result = subprocess.run( + proc_cmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" + ) + for line in proc_result.stdout.strip().split('\n'): + if not line: + continue + try: + pid, proc_name, used_mem = line.split(', ') + proc_info.append({ + "pid": int(pid), + "name": proc_name.strip(), + "used_memory": int(used_mem) + }) + except ValueError: + # 进程信息解析失败(双语提示) + warn_msg = f"跳过无效的进程信息行: {line}" if lang == LanguageEnum.ZH else f"Skipping invalid process info line: {line}" + print(warn_msg) + continue + except subprocess.CalledProcessError as e: + # 进程查询命令执行失败(双语提示) + err_msg = f"查询GPU进程信息失败: {e.stderr}" if lang == LanguageEnum.ZH else f"Failed to query GPU process info: {e.stderr}" + raise RuntimeError(err_msg) + + return {"gpu": gpu_info, "processes": proc_info} + + except FileNotFoundError: + # 未找到nvidia-smi命令(双语提示) + err_msg = "未找到nvidia-smi命令,请确认已安装NVIDIA驱动" if lang == LanguageEnum.ZH else "/usr/bin/nvidia-smi command not found, please ensure NVIDIA driver is installed" + raise RuntimeError(err_msg) + except subprocess.CalledProcessError as e: + # GPU基础查询失败(双语提示) + err_msg = f"执行GPU查询命令失败: {e.stderr}" if lang == LanguageEnum.ZH else f"Failed to execute GPU query command: {e.stderr}" + raise RuntimeError(err_msg) + + +def _get_remote_gpu_status_via_ssh(ssh: paramiko.SSHClient, gpu_index: Optional[int], + include_processes: bool, lang: Optional[LanguageEnum]) -> Dict[str, Any]: + """远程GPU查询(双语错误提示)""" + # 1. 查询GPU基础信息 + cmd = "/usr/bin/nvidia-smi --query-gpu=index,name,memory.used,memory.total,utilization.gpu --format=csv,noheader,nounits" + if gpu_index is not None: + cmd += f" -i {gpu_index}" + + stdin, stdout, stderr = ssh.exec_command(cmd) + exit_status = stdout.channel.recv_exit_status() + error = stderr.read().decode("utf-8").strip() + + if exit_status != 0: + # 远程命令执行失败(双语提示) + if "command not found" in error: + err_msg = "远程主机未找到nvidia-smi命令,可能未安装NVIDIA驱动" if lang == LanguageEnum.ZH else "/usr/bin/nvidia-smi command not found on remote host, possibly no NVIDIA driver installed" + else: + err_msg = f"远程GPU查询命令执行失败: {error}" if lang == LanguageEnum.ZH else f"Remote GPU query command failed: {error}" + raise RuntimeError(err_msg) + + # 解析GPU信息 + gpu_info = [] + for line in stdout.read().decode("utf-8").strip().split('\n'): + if not line: + continue + try: + idx, name, mem_used, mem_total, gpu_util = line.split(', ') + gpu_info.append({ + "index": int(idx), + "name": name.strip(), + "memory_used": int(mem_used), + "memory_total": int(mem_total), + "gpu_utilization": int(gpu_util) + }) + except ValueError: + warn_msg = f"跳过远程无效的GPU信息行: {line}" if lang == LanguageEnum.ZH else f"Skipping invalid remote GPU info line: {line}" + print(warn_msg) + continue + + # 2. 查询进程信息(按需) + proc_info = [] + if include_processes: + proc_cmd = "/usr/bin/nvidia-smi --query-compute-apps=pid,name,used_memory --format=csv,noheader,nounits" + stdin_proc, stdout_proc, stderr_proc = ssh.exec_command(proc_cmd) + exit_status_proc = stdout_proc.channel.recv_exit_status() + error_proc = stderr_proc.read().decode("utf-8").strip() + + if exit_status_proc != 0: + err_msg = f"远程GPU进程查询失败: {error_proc}" if lang == LanguageEnum.ZH else f"Remote GPU process query failed: {error_proc}" + raise RuntimeError(err_msg) + + for line in stdout_proc.read().decode("utf-8").strip().split('\n'): + if not line: + continue + try: + pid, proc_name, used_mem = line.split(', ') + proc_info.append({ + "pid": int(pid), + "name": proc_name.strip(), + "used_memory": int(used_mem) + }) + except ValueError: + warn_msg = f"跳过远程无效的进程信息行: {line}" if lang == LanguageEnum.ZH else f"Skipping invalid remote process info line: {line}" + print(warn_msg) + continue + + return {"gpu": gpu_info, "processes": proc_info} \ No newline at end of file diff --git a/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/config.json b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/config.json new file mode 100644 index 0000000000000000000000000000000000000000..d6a9ef52bdde199a26d61bbde7f41eb6684f8446 --- /dev/null +++ b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/config.json @@ -0,0 +1,8 @@ +{ + "tools": { + "nvidia_smi_status": { + "zh": "使用nvidia-smi获取本地或远程服务器的GPU状态信息(远程需提供SSH信息)。返回GPU的利用率、显存使用量、温度等关键指标。\n支持本地和远程的GPU状态查询工具\n\n本地查询:不填host、username、password即可\n远程查询:必须提供host、username、password(port可选,默认22)\n\n1. 输入值如下:\n - host:远程主机IP或hostname,不填则查询本地\n - port:SSH端口,默认22\n - username:SSH用户名,远程查询时必填\n - password:SSH密码,远程查询时必填\n - gpu_index:GPU索引(0-based,可选,不填则查询所有GPU)\n - include_processes:是否包含占用GPU的进程信息(默认False)\n\n2. 返回值为包含查询结果的字典\n - success:布尔值,表示查询是否成功\n - message:字符串,描述查询结果(成功信息或错误原因)\n - data:字典,包含GPU状态详细信息\n - host:查询的主机(本地为\"localhost\")\n - gpus:列表,每个元素为GPU信息字典\n - index:GPU索引(整数)\n - name:GPU型号名称\n - utilization_gpu:GPU利用率(百分比)\n - utilization_memory:显存利用率(百分比)\n - temperature:温度(摄氏度)\n - memory_total:总显存(MB)\n - memory_used:已用显存(MB)\n - memory_free:空闲显存(MB)\n - processes:占用进程列表(仅当include_processes=True时返回)\n - pid:进程ID\n - name:进程名称\n - memory_used:进程占用显存(MB)", + "en": "GPU status query tool supporting local and remote servers using nvidia-smi\n\nLocal query: Leave host, username, password empty\nRemote query: Must provide host, username, password (port is optional, default 22)\n\n1. Input values are as follows:\n - host: Remote host IP or hostname, leave empty for local query\n - port: SSH port, default 22\n - username: SSH username, required for remote query\n - password: SSH password, required for remote query\n - gpu_index: GPU index (0-based, optional, all GPUs if not specified)\n - include_processes: Whether to include GPU-using processes (default False)\n\n2. Return value is a dictionary containing query results\n - success: Boolean, indicating whether the query was successful\n - message: String, describing the query result (success information or error reason)\n - data: Dictionary, containing detailed GPU status information\n - host: Queried host (\"localhost\" for local)\n - gpus: List, each element is a GPU information dictionary\n - index: GPU index (integer)\n - name: GPU model name\n - utilization_gpu: GPU utilization (percentage)\n - utilization_memory: Memory utilization (percentage)\n - temperature: Temperature (celsius)\n - memory_total: Total memory (MB)\n - memory_used: Used memory (MB)\n - memory_free: Free memory (MB)\n - processes: List of using processes (returned only if include_processes=True)\n - pid: Process ID\n - name: Process name\n - memory_used: Memory used by process (MB)" + } + } +} \ No newline at end of file diff --git a/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/deps.toml b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/deps.toml new file mode 100644 index 0000000000000000000000000000000000000000..debad42b1de55bd82c816e9a5dfe31161b38d706 --- /dev/null +++ b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/deps.toml @@ -0,0 +1,23 @@ +# deps.toml 简化版(仅保留 Python+系统依赖,文档2-64节核心需求) +# 说明:用于自定义mcptool,定义运行必需的系统级工具和Python库 + + +# ===================== 1. 系统依赖(system_deps)===================== +# 用途:安装mcptool依赖的系统级工具(需通过yum/apt安装) +# 格式:{依赖名 = ["openEuler/CentOS安装命令(yum)", "Ubuntu/Debian安装命令(apt)"]} +# 备注:无需手动验证是否已安装,venv_util.py会自动检查并跳过已安装项 + +[system] + +# 示例3:若mcptool是智算调优类(如GPU监控工具) +#nvidia_driver = [ +# "yum install -y nvidia-driver-latest-dkms" # openEuler 安装NVIDIA驱动 +#] + +# ===================== 2. Python依赖(pip_deps)===================== +# 用途:安装mcptool依赖的Python库(会安装到mcp虚拟环境,文档2-144节) +# 格式:{依赖名 = "版本约束"}(版本约束可选,如"==2.31.0"或">=2.25.0") + +[pip] +# 示例1:基础网络请求(若mcptool需调用远程API) +requests = "==2.31.0" # 固定版本,避免版本冲突 \ No newline at end of file diff --git a/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/tool.py b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/tool.py new file mode 100644 index 0000000000000000000000000000000000000000..f92dec77cb1aadade56a40eaef5de4d55961532b --- /dev/null +++ b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/AI_tools/nvidia_tool/tool.py @@ -0,0 +1,82 @@ +from typing import Any, Dict, Optional, Union +import paramiko +from config.base_config_loader import BaseConfig, LanguageEnum + +from mcp_tools.AI_tools.nvidia_tool.base import _format_gpu_info, _get_local_gpu_status, _get_remote_gpu_status_via_ssh + + +async def nvidia_smi_status( + host: Union[str, None] = None, + gpu_index: Optional[int] = None, + include_processes: bool = False, + lang: Optional[LanguageEnum] = LanguageEnum.ZH, + config: Optional[Any] = None + ) -> Dict[str, Any]: + """获取GPU状态信息""" + result = { + "success": False, + "message": "", + "data": {} + } + + # 1. 本地查询分支(host为空) + if host is None: + try: + raw_info = _get_local_gpu_status(gpu_index, include_processes, lang) + formatted_data = _format_gpu_info(raw_info, "localhost", include_processes, lang) + + result["success"] = True + result["message"] = "成功获取本地主机的GPU状态信息" if lang == LanguageEnum.ZH else "Successfully obtained GPU status information for the local host" + result["data"] = formatted_data + return result + except Exception as e: + error_msg = f"获取本地GPU状态信息失败: {str(e)}" if lang == LanguageEnum.ZH else f"Failed to obtain local GPU status information: {str(e)}" + result["message"] = error_msg + return result + + # 2. 远程查询分支(host不为空) + else: + for host_config in BaseConfig().get_config().public_config.remote_hosts: + if host == host_config.name or host == host_config.host: + try: + # 建立SSH连接 + ssh = paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + ssh.connect( + hostname=host_config.host, + port=host_config.port, + username=host_config.username, + password=host_config.password + ) + + # 远程查询GPU状态 + raw_info = _get_remote_gpu_status_via_ssh(ssh, gpu_index, include_processes, lang) + ssh.close() + + # 格式化结果 + formatted_data = _format_gpu_info(raw_info, host_config.host, include_processes, lang) + result["success"] = True + result["message"] = f"成功获取远程主机 {host_config.host} 的GPU状态信息" if lang == LanguageEnum.ZH else f"Successfully obtained GPU status information for remote host {host_config.host}" + result["data"] = formatted_data + return result + + except paramiko.AuthenticationException: + # 认证失败(双语提示) + if 'ssh' in locals(): + ssh.close() + err_msg = "SSH认证失败,请检查用户名和密码" if lang == LanguageEnum.ZH else "SSH authentication failed, please check username and password" + result["message"] = err_msg + return result + except Exception as e: + # 其他远程执行异常(双语提示) + if 'ssh' in locals(): + ssh.close() + err_msg = f"远程主机 {host_config.host} 查询异常: {str(e)}" if lang == LanguageEnum.ZH else f"Remote host {host_config.host} query error: {str(e)}" + result["message"] = err_msg + return result + + # 未匹配到远程主机(双语异常) + if lang == LanguageEnum.ZH: + raise ValueError(f"未找到远程主机配置: {host}") + else: + raise ValueError(f"Remote host configuration not found: {host}") \ No newline at end of file diff --git a/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/base.py b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/base.py similarity index 100% rename from oe-cli-mcp-server/mcp_tools/base_tools/file_tools/base.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/base.py diff --git a/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/config.json b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/config.json similarity index 100% rename from oe-cli-mcp-server/mcp_tools/base_tools/file_tools/config.json rename to mcp_center/servers/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/config.json diff --git a/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/deps.toml b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/deps.toml similarity index 100% rename from oe-cli-mcp-server/mcp_tools/base_tools/file_tools/deps.toml rename to mcp_center/servers/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/deps.toml diff --git a/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/tool.py b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/tool.py similarity index 100% rename from oe-cli-mcp-server/mcp_tools/base_tools/file_tools/tool.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_tools/base_tools/file_tools/tool.py diff --git a/oe-cli-mcp-server/mcp_tools/tool_type.py b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/tool_type.py similarity index 88% rename from oe-cli-mcp-server/mcp_tools/tool_type.py rename to mcp_center/servers/oe-cli-mcp-server/mcp_tools/tool_type.py index 0d90d873e5bfb6507fbea8c8faac1d013b51bf0d..cafb249cb61c9be9d51660cc9f1923034eb98ff2 100644 --- a/oe-cli-mcp-server/mcp_tools/tool_type.py +++ b/mcp_center/servers/oe-cli-mcp-server/mcp_tools/tool_type.py @@ -7,3 +7,4 @@ class ToolType(Enum): AI = "AI_tools" MIRROR = "mirror_tools" CAL = "cal_tools" + RAG = "rag_tools" diff --git a/oe-cli-mcp-server/requirements.txt b/mcp_center/servers/oe-cli-mcp-server/requirements.txt similarity index 80% rename from oe-cli-mcp-server/requirements.txt rename to mcp_center/servers/oe-cli-mcp-server/requirements.txt index 23a5e665ae7db56b7a49bb31f9941fe89c12a674..d6df13ee848c6e598c0156cd6378d8b275603c58 100644 --- a/oe-cli-mcp-server/requirements.txt +++ b/mcp_center/servers/oe-cli-mcp-server/requirements.txt @@ -6,4 +6,5 @@ toml==0.10.2 mcp==1.9.4 scp==0.15.0 fastapi>=0.122.0 -uvicorn>=0.38.0 \ No newline at end of file +uvicorn>=0.38.0 +requests==2.31.0 \ No newline at end of file diff --git a/oe-cli-mcp-server/run.sh b/mcp_center/servers/oe-cli-mcp-server/run.sh similarity index 37% rename from oe-cli-mcp-server/run.sh rename to mcp_center/servers/oe-cli-mcp-server/run.sh index a368a26557f4f1954099e5b11208817df634c410..0c9a3d82bfd7056c098a425a2b5df91a14c0b874 100755 --- a/oe-cli-mcp-server/run.sh +++ b/mcp_center/servers/oe-cli-mcp-server/run.sh @@ -2,12 +2,14 @@ cp mcp-server.service /etc/systemd/system/ source venv/global/bin/activate +pip install --upgrade pip +pip install -r /usr/lib/euler-copilot-framework/mcp_center/servers/oe-cli-mcp-server/requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple systemctl daemon-reload systemctl enable mcp-server.service systemctl start mcp-server.service systemctl status mcp-server -chmod +x /home/tsn/oe-cli-mcp-server/mcp_server/cli.py +chmod +x /usr/lib/euler-copilot-framework/mcp_center/servers/oe-cli-mcp-server/mcp_server/cli.py rm -f /usr/local/bin/mcp-server -sudo ln -s /home/tsn/oe-cli-mcp-server/mcp_server/cli.py /usr/local/bin/mcp-server \ No newline at end of file +sudo ln -s /usr/lib/euler-copilot-framework/mcp_center/servers/oe-cli-mcp-server/mcp_server/cli.py /usr/local/bin/mcp-server \ No newline at end of file diff --git a/oe-cli-mcp-server/setup.py b/mcp_center/servers/oe-cli-mcp-server/setup.py similarity index 100% rename from oe-cli-mcp-server/setup.py rename to mcp_center/servers/oe-cli-mcp-server/setup.py diff --git a/oe-cli-mcp-server/util/get_project_root.py b/mcp_center/servers/oe-cli-mcp-server/util/get_project_root.py similarity index 100% rename from oe-cli-mcp-server/util/get_project_root.py rename to mcp_center/servers/oe-cli-mcp-server/util/get_project_root.py diff --git a/oe-cli-mcp-server/util/get_type.py b/mcp_center/servers/oe-cli-mcp-server/util/get_type.py similarity index 100% rename from oe-cli-mcp-server/util/get_type.py rename to mcp_center/servers/oe-cli-mcp-server/util/get_type.py diff --git a/oe-cli-mcp-server/util/test_llm_valid.py b/mcp_center/servers/oe-cli-mcp-server/util/test_llm_valid.py similarity index 100% rename from oe-cli-mcp-server/util/test_llm_valid.py rename to mcp_center/servers/oe-cli-mcp-server/util/test_llm_valid.py diff --git a/oe-cli-mcp-server/util/tool_package_file_check.py b/mcp_center/servers/oe-cli-mcp-server/util/tool_package_file_check.py similarity index 100% rename from oe-cli-mcp-server/util/tool_package_file_check.py rename to mcp_center/servers/oe-cli-mcp-server/util/tool_package_file_check.py diff --git a/oe-cli-mcp-server/util/venv_util.py b/mcp_center/servers/oe-cli-mcp-server/util/venv_util.py similarity index 100% rename from oe-cli-mcp-server/util/venv_util.py rename to mcp_center/servers/oe-cli-mcp-server/util/venv_util.py diff --git a/oe-cli-mcp-server/util/zip_tool_util.py b/mcp_center/servers/oe-cli-mcp-server/util/zip_tool_util.py similarity index 98% rename from oe-cli-mcp-server/util/zip_tool_util.py rename to mcp_center/servers/oe-cli-mcp-server/util/zip_tool_util.py index 4bf9643b52f5b63c2d52c7e9fdcfe5d9ccef1597..d363288af7404e923eb1bddc4d056e5c1a7d50de 100644 --- a/oe-cli-mcp-server/util/zip_tool_util.py +++ b/mcp_center/servers/oe-cli-mcp-server/util/zip_tool_util.py @@ -114,5 +114,5 @@ if __name__ == "__main__": logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") # 测试:解压 custom_gpu_tool.zip,最终路径为 mcp_tools/personal_tools/custom_gpu_tool/xxx - result = unzip_tool(zip_path="/home/tsn/cli_mcp_server/test_tool.zip") + result = unzip_tool(zip_path="/home/tsn/cli_mcp_server/nvidia_tool.zip") print(f"解压结果:{'成功' if result else '失败'}") \ No newline at end of file diff --git a/oe-cli-mcp-server/mcp-server.service b/mcp_center/service/mcp-server.service similarity index 100% rename from oe-cli-mcp-server/mcp-server.service rename to mcp_center/service/mcp-server.service diff --git a/oe-cli-mcp-server/mcp_server/api_server.py b/oe-cli-mcp-server/mcp_server/api_server.py deleted file mode 100644 index fe4f8ab7ef507f0be38fe62eb41e8badff3eb721..0000000000000000000000000000000000000000 --- a/oe-cli-mcp-server/mcp_server/api_server.py +++ /dev/null @@ -1,149 +0,0 @@ -# mcp_server/api_server.py -from fastapi import FastAPI, HTTPException, Query -from pydantic import BaseModel -import uvicorn -import threading -import logging - -# 导入单例 McpServer(用于调用核心业务逻辑) -from mcp_server.server import McpServer -from mcp_tools.tool_type import ToolType - -# 日志配置(与主服务保持一致) -logger = logging.getLogger("McpApiServer") - -# -------------------------- FastAPI 初始化 -------------------------- -def create_fastapi_app() -> FastAPI: - """创建 FastAPI 实例并定义接口(独立于 McpServer 类)""" - app = FastAPI( - title="MCP Tool Manager API", - description="用于管理 MCP 工具包的 HTTP 接口(替代原 Socket 服务)", - version="1.0.0" - ) - - # -------------------------- 数据模型(参数校验)-------------------------- - class AddToolRequest(BaseModel): - type: str = Query(..., description="工具类型:system(系统包)/ custom(自定义包)") - value: str = Query(..., description="系统包填 ToolType 值,自定义包填 zip 路径或包名") - - class RemoveToolRequest(BaseModel): - type: str = Query(..., description="工具类型:system(系统包)/ custom(自定义包)") - value: str = Query(..., description="系统包填 ToolType 值,自定义包填包名") - - # -------------------------- 核心:获取 McpServer 单例实例 -------------------------- - def get_mcp_server() -> McpServer: - """获取 McpServer 单例(确保与主服务使用同一个实例)""" - try: - return McpServer._instance # 直接获取单例(依赖原 singleton 装饰器的 _instance 属性) - except AttributeError: - raise HTTPException(status_code=503, detail="MCP 主服务未初始化") - - # -------------------------- API 接口定义 -------------------------- - @app.post("/tool/add", summary="新增工具包") - def add_tool(type: str = Query(...), value: str = Query(...)): - """新增工具包(system/custom 类型)""" - mcp_server = get_mcp_server() - try: - if type == "system": - # 转换为 ToolType 枚举 - try: - tool_type = ToolType(value) - except ValueError: - raise HTTPException(status_code=400, detail=f"不支持的系统工具类型:{value}(参考 ToolType 枚举)") - mcp_server.load(tool_type) - elif type == "custom": - mcp_server.load(value) - else: - raise HTTPException(status_code=400, detail="type 只能是 system 或 custom") - return {"success": True, "message": f"新增 {value} 成功"} - except HTTPException: - raise # 直接抛出已定义的 HTTP 异常 - except Exception as e: - logger.error(f"新增工具包失败:{str(e)}") - raise HTTPException(status_code=500, detail=f"新增失败:{str(e)}") - - @app.post("/tool/remove", summary="删除工具包") - def remove_tool(type: str = Query(...), value: str = Query(...)): - """删除工具包(system/custom 类型)""" - mcp_server = get_mcp_server() - try: - if type == "system": - try: - tool_type = ToolType(value) - except ValueError: - raise HTTPException(status_code=400, detail=f"不支持的系统工具类型:{value}") - mcp_server.remove(tool_type) - elif type == "custom": - mcp_server.remove(value) - else: - raise HTTPException(status_code=400, detail="type 只能是 system 或 custom") - return {"success": True, "message": f"删除 {value} 成功,重启后生效"} - except HTTPException: - raise - except Exception as e: - logger.error(f"删除工具包失败:{str(e)}") - raise HTTPException(status_code=500, detail=f"删除失败:{str(e)}") - - @app.get("/tool/list", summary="查询所有已加载工具包") - def list_tools(): - """查询所有工具包及对应的函数""" - mcp_server = get_mcp_server() - try: - pkg_funcs = {} - for pkg in mcp_server.list_packages(): - pkg_funcs[pkg] = mcp_server.list_funcs(pkg) - return { - "success": True, - "data": { - "pkg_funcs": pkg_funcs, - "total_packages": len(pkg_funcs) - } - } - except Exception as e: - logger.error(f"查询工具包失败:{str(e)}") - raise HTTPException(status_code=500, detail=f"查询失败:{str(e)}") - - @app.post("/tool/init", summary="初始化系统(仅保留基础运维包)") - def init_system(): - """初始化系统,卸载所有包并仅保留基础运维包""" - mcp_server = get_mcp_server() - try: - mcp_server.init() - return {"success": True, "message": "初始化成功(仅保留基础运维包)"} - except Exception as e: - logger.error(f"初始化系统失败:{str(e)}") - raise HTTPException(status_code=500, detail=f"初始化失败:{str(e)}") - - @app.post("/tool/restart", summary="重启 MCP 服务") - def restart_service(): - """重启 MCP 服务(重新加载所有工具包)""" - mcp_server = get_mcp_server() - try: - mcp_server.restart() - return {"success": True, "message": "服务重启成功"} - except Exception as e: - logger.error(f"重启服务失败:{str(e)}") - raise HTTPException(status_code=500, detail=f"重启失败:{str(e)}") - - return app - -# -------------------------- 启动 FastAPI 服务(独立线程)-------------------------- -def start_fastapi_server(host: str = "0.0.0.0", port: int = 8003): - """在独立线程中启动 FastAPI 服务(不阻塞主服务)""" - app = create_fastapi_app() - logger.info(f"FastAPI 服务启动中:http://{host}:{port}") - logger.info(f"接口文档地址:http://{host}:{port}/docs") - - def run_server(): - uvicorn.run( - app, - host=host, - port=port, - log_level="warning", # 仅输出警告以上日志 - access_log=False # 关闭访问日志(避免刷屏) - ) - - # 启动独立线程(daemon=True:主进程退出时,API 服务也退出) - api_thread = threading.Thread(target=run_server, daemon=True) - api_thread.start() - return api_thread \ No newline at end of file diff --git a/oe-cli-mcp-server/mcp_server/cli/socket.py b/oe-cli-mcp-server/mcp_server/cli/socket.py deleted file mode 100644 index 6dd683d0b2d544a8cdff4af75dee38017a6504e8..0000000000000000000000000000000000000000 --- a/oe-cli-mcp-server/mcp_server/cli/socket.py +++ /dev/null @@ -1,32 +0,0 @@ -import socket -import json -import logging -from typing import Dict, Any - -# Socket配置(直接硬编码,简化配置) -SOCKET_HOST = "127.0.0.1" -SOCKET_PORT = 8003 -SOCKET_TIMEOUT = 5 - -logger = logging.getLogger(__name__) - -def send_socket_request(action: str, params: Dict[str, Any] = None) -> Dict[str, Any]: - """发送Socket请求到服务端(极简封装,失败直接返回错误)""" - params = params or {} - request = json.dumps({"action": action, "params": params}, ensure_ascii=False).encode("utf-8") - - try: - client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - client.settimeout(SOCKET_TIMEOUT) - client.connect((SOCKET_HOST, SOCKET_PORT)) - client.send(request) - response = json.loads(client.recv(4096).decode("utf-8")) - client.close() - return response - except socket.timeout: - return {"success": False, "message": "连接服务超时"} - except ConnectionRefusedError: - return {"success": False, "message": "服务未启动,请先执行 -start"} - except Exception as e: - logger.error(f"Socket通信失败:{str(e)}") - return {"success": False, "message": f"通信失败:{str(e)}"} \ No newline at end of file diff --git a/oe-cli-mcp-server/util/test.py b/oe-cli-mcp-server/util/test.py deleted file mode 100644 index 413065913871131a0eda78255c36f4bff11cf309..0000000000000000000000000000000000000000 --- a/oe-cli-mcp-server/util/test.py +++ /dev/null @@ -1,3 +0,0 @@ -import os - -print(os.path.basename("/home/tsn/oe-cli-mcp-server/mcp_tools/personal_tools/test_tool/base.py")) \ No newline at end of file