diff --git a/.gitignore b/.gitignore index c62e334e6e0f521a7f889c54a758a5956a99c0ea..84a874ae19cae5e8f1be0c20667f77bb4581c2bc 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ logs .ruff_cache/ config uv.lock +!mcp_center/config diff --git a/mcp_center/config/private/mcp_server/config.toml b/mcp_center/config/private/mcp_server/config.toml new file mode 100644 index 0000000000000000000000000000000000000000..ce81440556782e623c986731422fd681cd2cec9c --- /dev/null +++ b/mcp_center/config/private/mcp_server/config.toml @@ -0,0 +1,21 @@ +# 公共配置文件 +# 语言设置,支持zh(中文)和en(英文) +language = "zh" +# server信息配置 +host = "0.0.0.0" +port = 12555 +fastapi_port = 12556 +# 大模型配置 +llm_remote = "" +llm_model = "" +llm_api_key = "" +max_tokens = 8192 +temperature = 0.7 +# 远程主机列表配置 +[[remote_hosts]] +name = "本机" +os_type = "openEuler" +host = "" +port = 22 +username = "root" +password = "" \ No newline at end of file diff --git a/mcp_center/config/private/mcp_server/config_loader.py b/mcp_center/config/private/mcp_server/config_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..5c1c6929cc54514db42e82ef8d6c3a8c4b527966 --- /dev/null +++ b/mcp_center/config/private/mcp_server/config_loader.py @@ -0,0 +1,32 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2023-2025. All rights reserved. +from config.public.base_config_loader import BaseConfig, project_root +import os +from pydantic import BaseModel, Field +import toml + + +class McpServerConfigModel(BaseModel): + """顶层配置模型""" + port: int = Field(default=12555, description="MCP服务端口") + fastapi_port : int = Field(default=12556, description="fastapi服务端口") + llm_remote: str = Field(default="", description="LLM远程主机地址") + llm_model: str = Field(default="gpt-3.5-turbo", description="LLM模型名称") + llm_api_key: str = Field(default="", description="LLM API Key") + max_tokens: int = Field(default=2048, description="LLM最大Token数") + temperature: float = Field(default=0.7, description="LLM温度参数") + +class McpServerConfig(BaseConfig): + """顶层配置文件读取和使用Class""" + + def __init__(self) -> None: + """读取配置文件""" + super().__init__() + self.load_private_config() + + def load_private_config(self) -> None: + """加载私有配置文件""" + config_file = os.getenv("MCP_SERVER_CONFIG") + if config_file is None: + config_file = os.path.join(project_root,"config", "private", "mcp_server", "config.toml") + self._config.private_config = McpServerConfigModel.model_validate(toml.load(config_file)) + diff --git a/mcp_center/config/private/rag/config.toml b/mcp_center/config/private/rag/config.toml new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/mcp_center/config/private/rag/config_loader.py b/mcp_center/config/private/rag/config_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..8981173d2fb12576dd480d15c9658755d1576fb9 --- /dev/null +++ b/mcp_center/config/private/rag/config_loader.py @@ -0,0 +1,26 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2023-2025. All rights reserved. +from config.public.base_config_loader import BaseConfig +import os +from pydantic import BaseModel, Field +import toml + + +class RemoteInfoConfigModel(BaseModel): + """顶层配置模型""" + port: int = Field(default=12311, description="MCP服务端口") + + +class RemoteInfoConfig(BaseConfig): + """顶层配置文件读取和使用Class""" + + def __init__(self) -> None: + """读取配置文件""" + super().__init__() + self.load_private_config() + + def load_private_config(self) -> None: + """加载私有配置文件""" + config_file = os.getenv("RAG_CONFIG") + if config_file is None: + config_file = os.path.join("config", "private", "rag", "config.toml") + self._config.private_config = RemoteInfoConfigModel.model_validate(toml.load(config_file)) diff --git a/mcp_center/config/public/base_config_loader.py b/mcp_center/config/public/base_config_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..54106237d97c6dbc4b2fdf13c17b4f16f183b00c --- /dev/null +++ b/mcp_center/config/public/base_config_loader.py @@ -0,0 +1,87 @@ +# Copyright (c) Huawei Technologies Co., Ltd. 2023-2025. All rights reserved. +"""配置文件处理模块""" +import toml +from enum import Enum +from typing import Any +from pydantic import BaseModel, Field +from pathlib import Path +from copy import deepcopy +import sys +import os +# 从当前文件位置向上两级到达项目根目录 +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) +# 2. 添加到模块搜索路径(用于 import 模块) +sys.path.append(project_root) +can_import = True +try: + from apps.schemas.config import ConfigModel as FrameworkConfigModel +except ImportError as e: + can_import = False + +# 然后使用绝对导入 + + +class LanguageEnum(str, Enum): + """语言枚举""" + ZH = "zh" + EN = "en" + + +class RemoteConfigModel(BaseModel): + """远程配置模型""" + name: str = Field(..., description="远程主机名称") + os_type: str = Field(..., description="远程主机操作系统类型") + host: str = Field(..., description="远程主机地址") + port: int = Field(..., description="远程主机端口") + username: str = Field(..., description="远程主机用户名") + password: str = Field(..., description="远程主机密码") + + +class PublicConfigModel(BaseModel): + """公共配置模型""" + language: LanguageEnum = Field(default=LanguageEnum.ZH, description="语言") + remote_hosts: list[RemoteConfigModel] = Field(default=[], description="远程主机列表") + llm_remote: str = Field(default="https://dashscope.aliyuncs.com/compatible-mode/v1", description="LLM远程主机地址") + llm_model: str = Field(default="qwen3-coder-480b-a35b-instruct", description="LLM模型名称") + llm_api_key: str = Field(default="", description="LLM API Key") + max_tokens: int = Field(default=8192, description="LLM最大Token数") + temperature: float = Field(default=0.7, description="LLM温度参数") + + +class ConfigModel(BaseModel): + """公共配置模型""" + public_config: PublicConfigModel = Field(default=PublicConfigModel(), description="公共配置") + private_config: Any = Field(default=None, description="私有配置") + + +class BaseConfig(): + """配置文件读取和使用Class""" + + def __init__(self) -> None: + """读取配置文件;当PROD环境变量设置时,配置文件将在读取后删除""" + config_file = os.path.join(project_root,"config", "public", "public_config.toml") + self._config = ConfigModel() + self._config.public_config = PublicConfigModel.model_validate(toml.load(config_file)) + framework_config_file = os.getenv("CONFIG") + if framework_config_file is None: + if can_import: + framework_config_file = os.path.join(project_root,"..", "config", "config.toml") + if framework_config_file and os.path.exists(framework_config_file): + framework_config = FrameworkConfigModel.model_validate(toml.load(framework_config_file)) + self._config.public_config.llm_remote = framework_config.llm.endpoint + self._config.public_config.llm_model = framework_config.llm.model + self._config.public_config.llm_api_key = framework_config.llm.key + self._config.public_config.max_tokens = framework_config.llm.max_tokens + self._config.public_config.temperature = framework_config.llm.temperature + def load_private_config(self) -> None: + """加载私有配置文件""" + pass + + def get_config(self) -> ConfigModel: + """获取配置文件内容""" + return deepcopy(self._config) + + def update_config(self): + config_file = os.path.join(project_root,"config", "public", "public_config.toml") + self._config = ConfigModel() + self._config.public_config = PublicConfigModel.model_validate(toml.load(config_file)) \ No newline at end of file diff --git a/mcp_center/config/public/public_config.toml b/mcp_center/config/public/public_config.toml new file mode 100644 index 0000000000000000000000000000000000000000..eb842e548cd40fb39ca2170b1e061c0de90e6495 --- /dev/null +++ b/mcp_center/config/public/public_config.toml @@ -0,0 +1,17 @@ +# 公共配置文件 +# 语言设置,支持zh(中文)和en(英文) +language = "zh" +# 大模型配置 +llm_remote = "" +llm_model = "" +llm_api_key = "" +max_tokens = 8192 +temperature = 0.7 +# 远程主机列表配置 +[[remote_hosts]] +name = "本机" +os_type = "openEuler" +host = "" +port = 22 +username = "root" +password = "" \ No newline at end of file