diff --git a/apps/models/app.py b/apps/models/app.py index 2b60d89b38dafb0a720f133911c9a3fed7a64f2c..0aa122f67336f58fb843a82fa8e65d861dbc290c 100644 --- a/apps/models/app.py +++ b/apps/models/app.py @@ -36,7 +36,7 @@ class App(Base): """应用名称""" description: Mapped[str] = mapped_column(String(2000), nullable=False) """应用描述""" - authorId: Mapped[str] = mapped_column(String(50), ForeignKey("framework_user.id"), nullable=False) # noqa: N815 + authorId: Mapped[str] = mapped_column(String(50), nullable=False) # noqa: N815 """应用作者ID""" appType: Mapped[AppType] = mapped_column(Enum(AppType), nullable=False) # noqa: N815 """应用类型""" diff --git a/apps/models/mcp.py b/apps/models/mcp.py index 0b3a43f1661794f522355f849e071b4cd49d78d2..f6d0c9c8606cd3622529ef1f479273fb07efefc5 100644 --- a/apps/models/mcp.py +++ b/apps/models/mcp.py @@ -40,7 +40,7 @@ class MCPInfo(Base): """MCP 概述""" description: Mapped[str] = mapped_column(Text, nullable=False) """MCP 描述""" - authorId: Mapped[str] = mapped_column(String(50), ForeignKey("framework_user.id"), nullable=False) # noqa: N815 + authorId: Mapped[str] = mapped_column(String(50), nullable=False) # noqa: N815 """MCP 创建者ID""" id: Mapped[str] = mapped_column(String(255), primary_key=True) """MCP ID""" diff --git a/apps/models/service.py b/apps/models/service.py index f347bad4871aa8356ff04c3b737c74085b15d53b..564161789f4aa1c4aa1c7d77da365737e545da66 100644 --- a/apps/models/service.py +++ b/apps/models/service.py @@ -20,7 +20,7 @@ class Service(Base): """插件名称""" description: Mapped[str] = mapped_column(String(2000), nullable=False) """插件描述""" - authorId: Mapped[str] = mapped_column(String(50), ForeignKey("framework_user.id"), nullable=False) # noqa: N815 + authorId: Mapped[str] = mapped_column(String(50), nullable=False) # noqa: N815 """插件作者ID""" id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) """插件ID""" diff --git a/apps/scheduler/mcp/select.py b/apps/scheduler/mcp/select.py index f2f3f5d4f0298f17540b006043037493e1195eb8..11412f39b796ea0822fe5b2d9b9c48001c13daaf 100644 --- a/apps/scheduler/mcp/select.py +++ b/apps/scheduler/mcp/select.py @@ -77,6 +77,11 @@ class MCPSelector(MCPNodeBase): async def select_top_tool(self, query: str, mcp_list: list[str], top_n: int = 10) -> list[MCPTools]: """选择最合适的工具""" + # 检查 embedding 是否已初始化 + if embedding.MCPToolVector is None: + logger.warning("[MCPSelector] Embedding 未初始化,返回空列表") + return [] + query_embedding = await embedding.get_embedding([query]) async with postgres.session() as session: tool_vecs = await session.scalars( diff --git a/apps/scheduler/pool/loader/call.py b/apps/scheduler/pool/loader/call.py index 57ec5544d8590feb297413a45c7e31c732b4937e..1dc93f306bdfec368eba877ef63a7832272a500b 100644 --- a/apps/scheduler/pool/loader/call.py +++ b/apps/scheduler/pool/loader/call.py @@ -44,7 +44,7 @@ class CallLoader: """将数据插入数据库""" # 清除旧数据 async with postgres.session() as session: - if await _table_exists(embedding.NodePoolVector.__tablename__): + if embedding.NodePoolVector is not None and await _table_exists(embedding.NodePoolVector.__tablename__): await session.execute( delete(embedding.NodePoolVector).where(embedding.NodePoolVector.serviceId == None), # noqa: E711 ) @@ -70,6 +70,13 @@ class CallLoader: async def _add_vector_to_db(self, call_metadata: dict[str, CallInfo]) -> None: """将向量化数据存入数据库""" + # 检查 NodePoolVector 是否已初始化 + if embedding.NodePoolVector is None: + _logger.warning( + "[CallLoader] Embedding 未初始化,跳过向量数据插入", + ) + return + # 检查表是否存在 if not await _table_exists(embedding.NodePoolVector.__tablename__): _logger.warning( diff --git a/apps/scheduler/pool/loader/flow.py b/apps/scheduler/pool/loader/flow.py index ca7215a7c49d57048aa60626dc9c990a89c5235b..695bec2655bc49074b2b0a03457e6aedaf92606b 100644 --- a/apps/scheduler/pool/loader/flow.py +++ b/apps/scheduler/pool/loader/flow.py @@ -211,9 +211,10 @@ class FlowLoader: FlowInfo.id == flow_id, ), )) - await session.execute( - delete(embedding.FlowPoolVector).where(embedding.FlowPoolVector.id == flow_id), - ) + if embedding.FlowPoolVector is not None: + await session.execute( + delete(embedding.FlowPoolVector).where(embedding.FlowPoolVector.id == flow_id), + ) await session.commit() return logger.warning("[FlowLoader] 工作流文件不存在或不是文件:%s", flow_path) @@ -257,6 +258,11 @@ class FlowLoader: async def _update_vector(self, app_id: uuid.UUID) -> None: """重新向量化指定App的所有工作流""" + # 检查 embedding 是否已初始化 + if embedding.FlowPoolVector is None: + logger.warning("[FlowLoader] Embedding 未初始化,跳过向量化") + return + # 从数据库加载该App的所有工作流 async with postgres.session() as session: flows_query = select(FlowInfo).where(FlowInfo.appId == app_id) @@ -288,6 +294,11 @@ class FlowLoader: @staticmethod async def set_vector() -> None: """将所有工作流的向量化数据存入数据库""" + # 检查 embedding 是否已初始化 + if embedding.FlowPoolVector is None: + logger.warning("[FlowLoader] Embedding 未初始化,跳过向量化") + return + flows = await FlowLoader._load_all_flows() # 为每个工作流更新向量数据 diff --git a/apps/scheduler/pool/loader/mcp.py b/apps/scheduler/pool/loader/mcp.py index 999fccbea494511ef56e7baa43049d5321847619..0199bae1bcfc19827a31e1e8760c2dde4e8f532e 100644 --- a/apps/scheduler/pool/loader/mcp.py +++ b/apps/scheduler/pool/loader/mcp.py @@ -664,12 +664,14 @@ class MCPLoader: async with postgres.session() as session: for mcp_id in deleted_mcp_list: - await session.execute( - delete(embedding.MCPVector).where(embedding.MCPVector.id == mcp_id), - ) - await session.execute( - delete(embedding.MCPToolVector).where(embedding.MCPToolVector.mcpId == mcp_id), - ) + if embedding.MCPVector is not None: + await session.execute( + delete(embedding.MCPVector).where(embedding.MCPVector.id == mcp_id), + ) + if embedding.MCPToolVector is not None: + await session.execute( + delete(embedding.MCPToolVector).where(embedding.MCPToolVector.mcpId == mcp_id), + ) await session.commit() logger.info("[MCPLoader] 清除数据库中无效的MCP向量化数据") diff --git a/apps/scheduler/pool/loader/service.py b/apps/scheduler/pool/loader/service.py index 4a877009884c4176903ef37b9e61bc4215c77d7a..afe8ee387ba50985358117f017b0fc466d3606c7 100644 --- a/apps/scheduler/pool/loader/service.py +++ b/apps/scheduler/pool/loader/service.py @@ -112,14 +112,16 @@ class ServiceLoader: await session.execute(delete(ServiceACL).where(ServiceACL.serviceId == service_id)) await session.execute(delete(ServiceHashes).where(ServiceHashes.serviceId == service_id)) - await session.execute( - delete(embedding.ServicePoolVector).where(embedding.ServicePoolVector.id == service_id), - ) - await session.execute( - delete( - embedding.NodePoolVector, - ).where(embedding.NodePoolVector.serviceId == service_id), - ) + if embedding.ServicePoolVector is not None: + await session.execute( + delete(embedding.ServicePoolVector).where(embedding.ServicePoolVector.id == service_id), + ) + if embedding.NodePoolVector is not None: + await session.execute( + delete( + embedding.NodePoolVector, + ).where(embedding.NodePoolVector.serviceId == service_id), + ) await session.commit() if not is_reload: @@ -187,6 +189,13 @@ class ServiceLoader: service_description: str, ) -> None: """更新向量数据""" + # 检查 embedding 是否已初始化 + if embedding.ServicePoolVector is None or embedding.NodePoolVector is None: + logger.warning( + "[ServiceLoader] Embedding 未初始化,跳过向量数据更新", + ) + return + # 检查表是否存在 if not await _table_exists(embedding.ServicePoolVector.__tablename__): logger.warning( diff --git a/data/semantics/service/test_service/metadata.yaml b/data/semantics/service/68e83905-a6fc-4acb-992c-550159e7b33e/metadata.yaml similarity index 100% rename from data/semantics/service/test_service/metadata.yaml rename to data/semantics/service/68e83905-a6fc-4acb-992c-550159e7b33e/metadata.yaml diff --git a/data/semantics/service/test_service/openapi/api.yaml b/data/semantics/service/68e83905-a6fc-4acb-992c-550159e7b33e/openapi/api.yaml similarity index 100% rename from data/semantics/service/test_service/openapi/api.yaml rename to data/semantics/service/68e83905-a6fc-4acb-992c-550159e7b33e/openapi/api.yaml diff --git a/data/sysagent.service b/data/sysagent.service index 4f893444dffb621d71cda5f6c8002d03ceeca099..ca5a8e5999d7334c2c5789ca3fb27e0ba87f04ec 100644 --- a/data/sysagent.service +++ b/data/sysagent.service @@ -5,7 +5,7 @@ User=root WorkingDirectory=/usr/lib/sysagent Environment="PYTHONPATH=/usr/lib/sysagent" Environment="CONFIG=/etc/sysagent/config.toml" -ExecStart=/usr/bin/python3 apps/main.py +ExecStart=/usr/bin/python3 -m apps.main ExecReload=/bin/kill -HUP $MAINPID Restart=on-failure RestartSec=10