From d7fd343b362f8df8ab0d0a8fda82bfcab79bb662 Mon Sep 17 00:00:00 2001 From: cui-gaoleng <562344211@qq.com> Date: Wed, 17 Dec 2025 16:00:11 +0800 Subject: [PATCH 1/2] =?UTF-8?q?=E4=BF=AE=E6=94=B9ignorance?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index c62e334e..84a874ae 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ logs .ruff_cache/ config uv.lock +!mcp_center/config -- Gitee From 65b044aca25a278959e2c132689579e4c2cd2d98 Mon Sep 17 00:00:00 2001 From: cui-gaoleng <562344211@qq.com> Date: Wed, 17 Dec 2025 16:00:41 +0800 Subject: [PATCH 2/2] mcp_center/config --- .../config/private/mcp_server/config.toml | 21 +++++ .../private/mcp_server/config_loader.py | 32 +++++++ mcp_center/config/private/rag/config.toml | 0 .../config/private/rag/config_loader.py | 26 ++++++ .../config/public/base_config_loader.py | 87 +++++++++++++++++++ mcp_center/config/public/public_config.toml | 17 ++++ 6 files changed, 183 insertions(+) create mode 100644 mcp_center/config/private/mcp_server/config.toml create mode 100644 mcp_center/config/private/mcp_server/config_loader.py create mode 100644 mcp_center/config/private/rag/config.toml create mode 100644 mcp_center/config/private/rag/config_loader.py create mode 100644 mcp_center/config/public/base_config_loader.py create mode 100644 mcp_center/config/public/public_config.toml diff --git a/mcp_center/config/private/mcp_server/config.toml b/mcp_center/config/private/mcp_server/config.toml new file mode 100644 index 00000000..ce814405 --- /dev/null +++ b/mcp_center/config/private/mcp_server/config.toml @@ -0,0 +1,21 @@ +# 公共配置文件 +# 语言设置,支持zh(中文)和en(英文) +language = "zh" +# server信息配置 +host = "0.0.0.0" +port = 12555 +fastapi_port = 12556 +# 大模型配置 +llm_remote = "" +llm_model = "" +llm_api_key = "" +max_tokens = 8192 +temperature = 0.7 +# 远程主机列表配置 +[[remote_hosts]] +name = "本机" +os_type = "openEuler" +host = "" +port = 22 +username = "root" +password = "" \ No newline at end of file diff --git a/mcp_center/config/private/mcp_server/config_loader.py b/mcp_center/config/private/mcp_server/config_loader.py new file mode 100644 index 00000000..5c1c6929 --- /dev/null +++ b/mcp_center/config/private/mcp_server/config_loader.py @@ -0,0 +1,32 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2023-2025. All rights reserved. +from config.public.base_config_loader import BaseConfig, project_root +import os +from pydantic import BaseModel, Field +import toml + + +class McpServerConfigModel(BaseModel): + """顶层配置模型""" + port: int = Field(default=12555, description="MCP服务端口") + fastapi_port : int = Field(default=12556, description="fastapi服务端口") + llm_remote: str = Field(default="", description="LLM远程主机地址") + llm_model: str = Field(default="gpt-3.5-turbo", description="LLM模型名称") + llm_api_key: str = Field(default="", description="LLM API Key") + max_tokens: int = Field(default=2048, description="LLM最大Token数") + temperature: float = Field(default=0.7, description="LLM温度参数") + +class McpServerConfig(BaseConfig): + """顶层配置文件读取和使用Class""" + + def __init__(self) -> None: + """读取配置文件""" + super().__init__() + self.load_private_config() + + def load_private_config(self) -> None: + """加载私有配置文件""" + config_file = os.getenv("MCP_SERVER_CONFIG") + if config_file is None: + config_file = os.path.join(project_root,"config", "private", "mcp_server", "config.toml") + self._config.private_config = McpServerConfigModel.model_validate(toml.load(config_file)) + diff --git a/mcp_center/config/private/rag/config.toml b/mcp_center/config/private/rag/config.toml new file mode 100644 index 00000000..e69de29b diff --git a/mcp_center/config/private/rag/config_loader.py b/mcp_center/config/private/rag/config_loader.py new file mode 100644 index 00000000..8981173d --- /dev/null +++ b/mcp_center/config/private/rag/config_loader.py @@ -0,0 +1,26 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2023-2025. All rights reserved. +from config.public.base_config_loader import BaseConfig +import os +from pydantic import BaseModel, Field +import toml + + +class RemoteInfoConfigModel(BaseModel): + """顶层配置模型""" + port: int = Field(default=12311, description="MCP服务端口") + + +class RemoteInfoConfig(BaseConfig): + """顶层配置文件读取和使用Class""" + + def __init__(self) -> None: + """读取配置文件""" + super().__init__() + self.load_private_config() + + def load_private_config(self) -> None: + """加载私有配置文件""" + config_file = os.getenv("RAG_CONFIG") + if config_file is None: + config_file = os.path.join("config", "private", "rag", "config.toml") + self._config.private_config = RemoteInfoConfigModel.model_validate(toml.load(config_file)) diff --git a/mcp_center/config/public/base_config_loader.py b/mcp_center/config/public/base_config_loader.py new file mode 100644 index 00000000..54106237 --- /dev/null +++ b/mcp_center/config/public/base_config_loader.py @@ -0,0 +1,87 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2023-2025. All rights reserved. +"""配置文件处理模块""" +import toml +from enum import Enum +from typing import Any +from pydantic import BaseModel, Field +from pathlib import Path +from copy import deepcopy +import sys +import os +# 从当前文件位置向上两级到达项目根目录 +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) +# 2. 添加到模块搜索路径(用于 import 模块) +sys.path.append(project_root) +can_import = True +try: + from apps.schemas.config import ConfigModel as FrameworkConfigModel +except ImportError as e: + can_import = False + +# 然后使用绝对导入 + + +class LanguageEnum(str, Enum): + """语言枚举""" + ZH = "zh" + EN = "en" + + +class RemoteConfigModel(BaseModel): + """远程配置模型""" + name: str = Field(..., description="远程主机名称") + os_type: str = Field(..., description="远程主机操作系统类型") + host: str = Field(..., description="远程主机地址") + port: int = Field(..., description="远程主机端口") + username: str = Field(..., description="远程主机用户名") + password: str = Field(..., description="远程主机密码") + + +class PublicConfigModel(BaseModel): + """公共配置模型""" + language: LanguageEnum = Field(default=LanguageEnum.ZH, description="语言") + remote_hosts: list[RemoteConfigModel] = Field(default=[], description="远程主机列表") + llm_remote: str = Field(default="https://dashscope.aliyuncs.com/compatible-mode/v1", description="LLM远程主机地址") + llm_model: str = Field(default="qwen3-coder-480b-a35b-instruct", description="LLM模型名称") + llm_api_key: str = Field(default="", description="LLM API Key") + max_tokens: int = Field(default=8192, description="LLM最大Token数") + temperature: float = Field(default=0.7, description="LLM温度参数") + + +class ConfigModel(BaseModel): + """公共配置模型""" + public_config: PublicConfigModel = Field(default=PublicConfigModel(), description="公共配置") + private_config: Any = Field(default=None, description="私有配置") + + +class BaseConfig(): + """配置文件读取和使用Class""" + + def __init__(self) -> None: + """读取配置文件;当PROD环境变量设置时,配置文件将在读取后删除""" + config_file = os.path.join(project_root,"config", "public", "public_config.toml") + self._config = ConfigModel() + self._config.public_config = PublicConfigModel.model_validate(toml.load(config_file)) + framework_config_file = os.getenv("CONFIG") + if framework_config_file is None: + if can_import: + framework_config_file = os.path.join(project_root,"..", "config", "config.toml") + if framework_config_file and os.path.exists(framework_config_file): + framework_config = FrameworkConfigModel.model_validate(toml.load(framework_config_file)) + self._config.public_config.llm_remote = framework_config.llm.endpoint + self._config.public_config.llm_model = framework_config.llm.model + self._config.public_config.llm_api_key = framework_config.llm.key + self._config.public_config.max_tokens = framework_config.llm.max_tokens + self._config.public_config.temperature = framework_config.llm.temperature + def load_private_config(self) -> None: + """加载私有配置文件""" + pass + + def get_config(self) -> ConfigModel: + """获取配置文件内容""" + return deepcopy(self._config) + + def update_config(self): + config_file = os.path.join(project_root,"config", "public", "public_config.toml") + self._config = ConfigModel() + self._config.public_config = PublicConfigModel.model_validate(toml.load(config_file)) \ No newline at end of file diff --git a/mcp_center/config/public/public_config.toml b/mcp_center/config/public/public_config.toml new file mode 100644 index 00000000..eb842e54 --- /dev/null +++ b/mcp_center/config/public/public_config.toml @@ -0,0 +1,17 @@ +# 公共配置文件 +# 语言设置,支持zh(中文)和en(英文) +language = "zh" +# 大模型配置 +llm_remote = "" +llm_model = "" +llm_api_key = "" +max_tokens = 8192 +temperature = 0.7 +# 远程主机列表配置 +[[remote_hosts]] +name = "本机" +os_type = "openEuler" +host = "" +port = 22 +username = "root" +password = "" \ No newline at end of file -- Gitee