diff --git a/backend/agents/create_agent_info.py b/backend/agents/create_agent_info.py index faed9ce79..bc4031e0a 100644 --- a/backend/agents/create_agent_info.py +++ b/backend/agents/create_agent_info.py @@ -1,10 +1,10 @@ import threading import logging +from typing import List from urllib.parse import urljoin from datetime import datetime from jinja2 import Template, StrictUndefined -from smolagents.utils import BASE_BUILTIN_MODULES from nexent.core.utils.observer import MessageObserver from nexent.core.agents.agent_model import AgentRunInfo, ModelConfig, AgentConfig, ToolConfig from nexent.memory.memory_service import search_memory_in_levels @@ -27,11 +27,119 @@ from utils.prompt_template_utils import get_agent_prompt_template from utils.config_utils import tenant_config_manager, get_model_name_from_config from consts.const import LOCAL_MCP_SERVER, MODEL_CONFIG_MAPPING, LANGUAGE, DATA_PROCESS_SERVICE +import re logger = logging.getLogger("create_agent_info") logger.setLevel(logging.DEBUG) +def _get_skills_for_template( + agent_id: int, + tenant_id: str, + version_no: int = 0 +) -> List[dict]: + """Get skills list for prompt template injection. + + Args: + agent_id: Agent ID + tenant_id: Tenant ID + version_no: Version number + + Returns: + List of skill dicts with name and description + """ + try: + from services.skill_service import SkillService + skill_service = SkillService() + enabled_skills = skill_service.get_enabled_skills_for_agent( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=version_no + ) + return [ + {"name": s.get("name", ""), "description": s.get("description", "")} + for s in enabled_skills + ] + except Exception as e: + logger.warning(f"Failed to get skills for template: {e}") + return [] + + +def _get_skill_script_tools( + agent_id: int, + tenant_id: str, + version_no: int = 0 +) -> List[ToolConfig]: + """Get tool config for skill script execution and skill reading. + + Args: + agent_id: Agent ID for filtering available skills in error messages. + tenant_id: Tenant ID for filtering available skills in error messages. + version_no: Version number for filtering available skills. + + Returns: + List of ToolConfig for skill execution and reading tools + """ + from consts.const import CONTAINER_SKILLS_PATH + + skill_context = { + "agent_id": agent_id, + "tenant_id": tenant_id, + "version_no": version_no, + } + + try: + return [ + ToolConfig( + class_name="RunSkillScriptTool", + name="run_skill_script", + description="Execute a skill script with given parameters. Use this to run Python or shell scripts that are part of a skill.", + inputs='{"skill_name": "str", "script_path": "str", "params": "dict"}', + output_type="string", + params={"local_skills_dir": CONTAINER_SKILLS_PATH}, + source="builtin", + usage="builtin", + metadata=skill_context, + ), + ToolConfig( + class_name="ReadSkillMdTool", + name="read_skill_md", + description="Read skill execution guide and optional additional files. Always reads SKILL.md first, then optionally reads additional files.", + inputs='{"skill_name": "str", "additional_files": "list[str]"}', + output_type="string", + params={"local_skills_dir": CONTAINER_SKILLS_PATH}, + source="builtin", + usage="builtin", + metadata=skill_context, + ), + ToolConfig( + class_name="ReadSkillConfigTool", + name="read_skill_config", + description="Read the config.yaml file from a skill directory. Returns JSON containing configuration variables needed for skill workflows.", + inputs='{"skill_name": "str"}', + output_type="string", + params={"local_skills_dir": CONTAINER_SKILLS_PATH}, + source="builtin", + usage="builtin", + metadata=skill_context, + ), + ToolConfig( + class_name="WriteSkillFileTool", + name="write_skill_file", + description="Write content to a file within a skill directory. Creates parent directories if they do not exist.", + inputs='{"skill_name": "str", "file_path": "str", "content": "str"}', + output_type="string", + params={"local_skills_dir": CONTAINER_SKILLS_PATH}, + source="builtin", + usage="builtin", + metadata=skill_context, + ) + ] + except Exception as e: + logger.warning(f"Failed to load skill script tool: {e}") + return [] + + async def create_model_config_list(tenant_id): records = get_model_records({"model_type": "llm"}, tenant_id) model_list = [] @@ -169,22 +277,26 @@ async def create_agent_config( logger.error(f"Failed to build knowledge base summary: {e}") # Assemble system_prompt - if duty_prompt or constraint_prompt or few_shots_prompt: - system_prompt = Template(prompt_template["system_prompt"], undefined=StrictUndefined).render({ - "duty": duty_prompt, - "constraint": constraint_prompt, - "few_shots": few_shots_prompt, - "tools": {tool.name: tool for tool in tool_list}, - "managed_agents": {agent.name: agent for agent in managed_agents}, - "authorized_imports": str(BASE_BUILTIN_MODULES), - "APP_NAME": app_name, - "APP_DESCRIPTION": app_description, - "memory_list": memory_list, - "knowledge_base_summary": knowledge_base_summary, - "time": datetime.now().strftime("%Y-%m-%d %H:%M:%S") - }) - else: - system_prompt = agent_info.get("prompt", "") + # Get skills list for prompt template + skills = _get_skills_for_template(agent_id, tenant_id, version_no) + + render_kwargs = { + "duty": duty_prompt, + "constraint": constraint_prompt, + "few_shots": few_shots_prompt, + "tools": {tool.name: tool for tool in tool_list}, + "skills": skills, + "managed_agents": {agent.name: agent for agent in managed_agents}, + "APP_NAME": app_name, + "APP_DESCRIPTION": app_description, + "memory_list": memory_list, + "knowledge_base_summary": knowledge_base_summary, + "time": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "user_id": user_id, + } + system_prompt = Template(prompt_template["system_prompt"], undefined=StrictUndefined).render(render_kwargs) + + _print_prompt_with_token_count(system_prompt, agent_id, "BEFORE_INJECTION") if agent_info.get("model_id") is not None: model_info = get_model_by_model_id(agent_info.get("model_id")) @@ -197,9 +309,10 @@ async def create_agent_config( prompt_templates=await prepare_prompt_templates( is_manager=len(managed_agents) > 0, system_prompt=system_prompt, - language=language + language=language, + agent_id=agent_id ), - tools=tool_list, + tools=tool_list + _get_skill_script_tools(agent_id, tenant_id, version_no), max_steps=agent_info.get("max_steps", 10), model_name=model_name, provide_run_summary=agent_info.get("provide_run_summary", False), @@ -296,7 +409,12 @@ async def discover_langchain_tools(): return langchain_tools -async def prepare_prompt_templates(is_manager: bool, system_prompt: str, language: str = 'zh'): +async def prepare_prompt_templates( + is_manager: bool, + system_prompt: str, + language: str = 'zh', + agent_id: int = None, +): """ Prepare prompt templates, support multiple languages @@ -304,15 +422,33 @@ async def prepare_prompt_templates(is_manager: bool, system_prompt: str, languag is_manager: Whether it is a manager mode system_prompt: System prompt content language: Language code ('zh' or 'en') + agent_id: Agent ID for fetching skill instances Returns: dict: Prompt template configuration """ prompt_templates = get_agent_prompt_template(is_manager, language) prompt_templates["system_prompt"] = system_prompt + + # Print final prompt with all injections + _print_prompt_with_token_count(prompt_templates["system_prompt"], agent_id, "FINAL_PROMPT") + return prompt_templates +def _print_prompt_with_token_count(prompt: str, agent_id: int = None, stage: str = "PROMPT"): + """Print prompt content and estimate token count using tiktoken.""" + try: + import tiktoken + encoding = tiktoken.get_encoding("cl100k_base") + token_count = len(encoding.encode(prompt)) + logger.info(f"[Skill Debug][{stage}] Agent {agent_id} token count: {token_count}") + logger.info(f"[Skill Debug][{stage}] Agent {agent_id} prompt:\n{prompt}") + except Exception as e: + logger.warning(f"[Skill Debug][{stage}] Failed to count tokens: {e}") + logger.info(f"[Skill Debug][{stage}] Agent {agent_id} prompt:\n{prompt}") + + async def join_minio_file_description_to_query(minio_files, query): final_query = query if minio_files and isinstance(minio_files, list): diff --git a/backend/apps/agent_app.py b/backend/apps/agent_app.py index a42d11b53..595569050 100644 --- a/backend/apps/agent_app.py +++ b/backend/apps/agent_app.py @@ -20,7 +20,8 @@ run_agent_stream, stop_agent_tasks, get_agent_call_relationship_impl, - clear_agent_new_mark_impl + clear_agent_new_mark_impl, + get_agent_by_name_impl, ) from services.agent_version_service import ( publish_version_impl, @@ -100,6 +101,27 @@ async def search_agent_info_api( status_code=HTTPStatus.INTERNAL_SERVER_ERROR, detail="Agent search info error.") +@agent_config_router.get("/by-name/{agent_name}") +async def get_agent_by_name_api( + agent_name: str, + tenant_id: Optional[str] = Query( + None, description="Tenant ID for filtering (uses auth if not provided)"), + authorization: Optional[str] = Header(None) +): + """ + Look up an agent by name and return its agent_id and highest published version_no. + """ + try: + _, auth_tenant_id = get_current_user_id(authorization) + effective_tenant_id = tenant_id or auth_tenant_id + result = get_agent_by_name_impl(agent_name, effective_tenant_id) + return JSONResponse(status_code=HTTPStatus.OK, content=result) + except Exception as e: + logger.error(f"Agent by name lookup error: {str(e)}") + raise HTTPException( + status_code=HTTPStatus.INTERNAL_SERVER_ERROR, detail="Agent not found.") + + @agent_config_router.get("/get_creating_sub_agent_id") async def get_creating_sub_agent_info_api(authorization: Optional[str] = Header(None)): """ diff --git a/backend/apps/config_app.py b/backend/apps/config_app.py index 58e2b008b..ec1db6e7a 100644 --- a/backend/apps/config_app.py +++ b/backend/apps/config_app.py @@ -14,6 +14,7 @@ from apps.model_managment_app import router as model_manager_router from apps.prompt_app import router as prompt_router from apps.remote_mcp_app import router as remote_mcp_router +from apps.skill_app import router as skill_router from apps.tenant_config_app import router as tenant_config_router from apps.tool_config_app import router as tool_config_router from apps.user_management_app import router as user_management_router @@ -52,6 +53,7 @@ app.include_router(summary_router) app.include_router(prompt_router) +app.include_router(skill_router) app.include_router(tenant_config_router) app.include_router(remote_mcp_router) app.include_router(tenant_router) diff --git a/backend/apps/skill_app.py b/backend/apps/skill_app.py new file mode 100644 index 000000000..8bf19e8b7 --- /dev/null +++ b/backend/apps/skill_app.py @@ -0,0 +1,540 @@ +"""Skill management HTTP endpoints.""" + +import logging +import os +import re +from typing import Any, Dict, List, Optional + +from fastapi import APIRouter, HTTPException, Query, UploadFile, File, Form, Header +from starlette.responses import JSONResponse +from pydantic import BaseModel + +from consts.exceptions import SkillException, UnauthorizedError +from services.skill_service import SkillService +from consts.model import SkillInstanceInfoRequest +from utils.auth_utils import get_current_user_id + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/skills", tags=["skills"]) + + +class SkillCreateRequest(BaseModel): + """Request model for creating a skill.""" + name: str + description: str + content: str + tool_ids: Optional[List[int]] = [] # Use tool_id list, link to ag_tool_info_t + tool_names: Optional[List[str]] = [] # Alternative: use tool name list, will be converted to tool_ids + tags: Optional[List[str]] = [] + source: Optional[str] = "custom" # official, custom, partner + params: Optional[Dict[str, Any]] = None # Skill config (JSON object) + + +class SkillUpdateRequest(BaseModel): + """Request model for updating a skill.""" + description: Optional[str] = None + content: Optional[str] = None + tool_ids: Optional[List[int]] = None # Use tool_id list + tool_names: Optional[List[str]] = None # Alternative: use tool name list, will be converted to tool_ids + tags: Optional[List[str]] = None + source: Optional[str] = None + params: Optional[Dict[str, Any]] = None + + +class SkillResponse(BaseModel): + """Response model for skill data.""" + skill_id: int + name: str + description: str + content: str + tool_ids: List[int] + tags: List[str] + source: str + params: Optional[Dict[str, Any]] = None + created_by: Optional[str] = None + create_time: Optional[str] = None + updated_by: Optional[str] = None + update_time: Optional[str] = None + + +# List routes first (no path parameters) +@router.get("") +async def list_skills() -> JSONResponse: + """List all available skills.""" + try: + service = SkillService() + skills = service.list_skills() + return JSONResponse(content={"skills": skills}) + except SkillException as e: + raise HTTPException(status_code=500, detail=str(e)) + except Exception as e: + logger.error(f"Error listing skills: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +# POST routes +@router.post("") +async def create_skill( + request: SkillCreateRequest, + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """Create a new skill (JSON format).""" + try: + user_id, tenant_id = get_current_user_id(authorization) + service = SkillService() + + # Convert tool_names to tool_ids if provided + tool_ids = request.tool_ids or [] + if request.tool_names: + tool_ids = service.repository.get_tool_ids_by_names(request.tool_names, tenant_id) + + skill_data = { + "name": request.name, + "description": request.description, + "content": request.content, + "tool_ids": tool_ids, + "tags": request.tags, + "source": request.source, + "params": request.params, + } + skill = service.create_skill(skill_data, user_id=user_id) + return JSONResponse(content=skill, status_code=201) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except SkillException as e: + error_msg = str(e).lower() + if "already exists" in error_msg: + raise HTTPException(status_code=409, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error creating skill: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.post("/upload") +async def create_skill_from_file( + file: UploadFile = File(..., description="SKILL.md file or ZIP archive"), + skill_name: Optional[str] = Form(None, description="Optional skill name override"), + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """Create a skill from file upload. + + Supports two formats: + - Single SKILL.md file: Extracts metadata and saves directly + - ZIP archive: Contains SKILL.md plus scripts/assets folders + """ + try: + user_id, tenant_id = get_current_user_id(authorization) + service = SkillService() + + content = await file.read() + + file_type = "auto" + if file.filename: + if file.filename.endswith(".zip"): + file_type = "zip" + elif file.filename.endswith(".md"): + file_type = "md" + + skill = service.create_skill_from_file( + file_content=content, + skill_name=skill_name, + file_type=file_type, + user_id=user_id, + tenant_id=tenant_id + ) + return JSONResponse(content=skill, status_code=201) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except SkillException as e: + error_msg = str(e).lower() + if "already exists" in error_msg: + raise HTTPException(status_code=409, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error creating skill from file: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +# Routes with path parameters +@router.get("/{skill_name}/files") +async def get_skill_file_tree(skill_name: str) -> JSONResponse: + """Get file tree structure of a skill.""" + try: + service = SkillService() + tree = service.get_skill_file_tree(skill_name) + if not tree: + raise HTTPException(status_code=404, detail=f"Skill not found: {skill_name}") + return JSONResponse(content=tree) + except HTTPException: + raise + except SkillException as e: + raise HTTPException(status_code=500, detail=str(e)) + except Exception as e: + logger.error(f"Error getting skill file tree: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/{skill_name}/files/{file_path:path}") +async def get_skill_file_content( + skill_name: str, + file_path: str +) -> JSONResponse: + """Get content of a specific file within a skill. + + Args: + skill_name: Name of the skill + file_path: Relative path to the file within the skill directory + """ + try: + service = SkillService() + content = service.get_skill_file_content(skill_name, file_path) + if content is None: + raise HTTPException(status_code=404, detail=f"File not found: {file_path}") + return JSONResponse(content={"content": content}) + except HTTPException: + raise + except SkillException as e: + raise HTTPException(status_code=500, detail=str(e)) + except Exception as e: + logger.error(f"Error getting skill file content: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.put("/{skill_name}/upload") +async def update_skill_from_file( + skill_name: str, + file: UploadFile = File(..., description="SKILL.md file or ZIP archive"), + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """Update a skill from file upload. + + Supports both SKILL.md and ZIP formats. + """ + try: + user_id, tenant_id = get_current_user_id(authorization) + service = SkillService() + + content = await file.read() + + file_type = "auto" + if file.filename: + if file.filename.endswith(".zip"): + file_type = "zip" + elif file.filename.endswith(".md"): + file_type = "md" + + skill = service.update_skill_from_file( + skill_name=skill_name, + file_content=content, + file_type=file_type, + user_id=user_id, + tenant_id=tenant_id + ) + return JSONResponse(content=skill) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except SkillException as e: + if "not found" in str(e).lower(): + raise HTTPException(status_code=404, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error updating skill from file: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +# ============== Skill Instance APIs ============== + +@router.get("/instance") +async def get_skill_instance( + agent_id: int = Query(..., description="Agent ID"), + skill_id: int = Query(..., description="Skill ID"), + version_no: int = Query(0, description="Version number (0 for draft)"), + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """Get a specific skill instance for an agent.""" + try: + _, tenant_id = get_current_user_id(authorization) + + service = SkillService() + instance = service.get_skill_instance( + agent_id=agent_id, + skill_id=skill_id, + tenant_id=tenant_id, + version_no=version_no + ) + + if not instance: + raise HTTPException( + status_code=404, + detail=f"Skill instance not found for agent {agent_id} and skill {skill_id}" + ) + + # Enrich with skill info from ag_skill_info_t (skill_name, skill_description, skill_content, params) + skill = service.get_skill_by_id(skill_id) + if skill: + instance["skill_name"] = skill.get("name") + instance["skill_description"] = skill.get("description", "") + instance["skill_content"] = skill.get("content", "") + instance["skill_params"] = skill.get("params") or {} + + return JSONResponse(content=instance) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error getting skill instance: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.post("/instance/update") +async def update_skill_instance( + request: SkillInstanceInfoRequest, + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """Create or update a skill instance for a specific agent. + + This allows customizing skill content for a specific agent without + modifying the global skill definition. + """ + try: + user_id, tenant_id = get_current_user_id(authorization) + + # Validate skill exists + service = SkillService() + skill = service.get_skill_by_id(request.skill_id) + if not skill: + raise HTTPException(status_code=404, detail=f"Skill with ID {request.skill_id} not found") + + # Create or update skill instance + instance = service.create_or_update_skill_instance( + skill_info=request, + tenant_id=tenant_id, + user_id=user_id, + version_no=request.version_no + ) + + return JSONResponse(content={"message": "Skill instance updated", "instance": instance}) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except HTTPException: + raise + except SkillException as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error updating skill instance: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/instance/list") +async def list_skill_instances( + agent_id: int = Query(..., description="Agent ID to query skill instances"), + version_no: int = Query(0, description="Version number (0 for draft)"), + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """List all skill instances for a specific agent.""" + try: + _, tenant_id = get_current_user_id(authorization) + + service = SkillService() + + instances = service.list_skill_instances( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=version_no + ) + + # Enrich with skill info from ag_skill_info_t (skill_name, skill_description, skill_content, params) + for instance in instances: + skill = service.get_skill_by_id(instance.get("skill_id")) + if skill: + instance["skill_name"] = skill.get("name") + instance["skill_description"] = skill.get("description", "") + instance["skill_content"] = skill.get("content", "") + instance["skill_params"] = skill.get("params") or {} + + return JSONResponse(content={"instances": instances}) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except Exception as e: + logger.error(f"Error listing skill instances: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/{skill_name}") +async def get_skill(skill_name: str) -> JSONResponse: + """Get a specific skill by name.""" + try: + service = SkillService() + skill = service.get_skill(skill_name) + if not skill: + raise HTTPException(status_code=404, detail=f"Skill not found: {skill_name}") + return JSONResponse(content=skill) + except HTTPException: + raise + except SkillException as e: + raise HTTPException(status_code=500, detail=str(e)) + except Exception as e: + logger.error(f"Error getting skill {skill_name}: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.put("/{skill_name}") +async def update_skill( + skill_name: str, + request: SkillUpdateRequest, + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """Update an existing skill. + + Audit field updated_by is set from the authenticated user only; it is not read from the JSON body. + """ + try: + user_id, tenant_id = get_current_user_id(authorization) + service = SkillService() + update_data = {} + if request.description is not None: + update_data["description"] = request.description + if request.content is not None: + update_data["content"] = request.content + if request.tool_ids is not None: + # Convert tool_names to tool_ids if tool_names provided, else use tool_ids directly + if request.tool_names: + update_data["tool_ids"] = service.repository.get_tool_ids_by_names(request.tool_names, tenant_id) + else: + update_data["tool_ids"] = request.tool_ids + elif request.tool_names is not None: + # Only tool_names provided, convert to tool_ids + update_data["tool_ids"] = service.repository.get_tool_ids_by_names(request.tool_names, tenant_id) + if request.tags is not None: + update_data["tags"] = request.tags + if request.source is not None: + update_data["source"] = request.source + if request.params is not None: + update_data["params"] = request.params + + if not update_data: + raise HTTPException(status_code=400, detail="No fields to update") + + skill = service.update_skill(skill_name, update_data, user_id=user_id) + return JSONResponse(content=skill) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except SkillException as e: + if "not found" in str(e).lower(): + raise HTTPException(status_code=404, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error updating skill {skill_name}: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/{skill_name}") +async def delete_skill( + skill_name: str, + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """Delete a skill.""" + try: + user_id, _ = get_current_user_id(authorization) + service = SkillService() + service.delete_skill(skill_name, user_id=user_id) + return JSONResponse(content={"message": f"Skill {skill_name} deleted successfully"}) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except SkillException as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error deleting skill {skill_name}: {e}") + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/{skill_name}/files/{file_path:path}") +async def delete_skill_file( + skill_name: str, + file_path: str, + authorization: Optional[str] = Header(None) +) -> JSONResponse: + """Delete a specific file within a skill directory. + + Args: + skill_name: Name of the skill + file_path: Relative path to the file within the skill directory + """ + try: + _, _ = get_current_user_id(authorization) + service = SkillService() + + # Validate skill_name so it cannot be used for path traversal + if not skill_name: + raise HTTPException(status_code=400, detail="Invalid skill name") + if os.sep in skill_name or "/" in skill_name or ".." in skill_name: + raise HTTPException(status_code=400, detail="Invalid skill name") + + # Read config to get temp_filename for validation + config_content = service.get_skill_file_content(skill_name, "config.yaml") + if config_content is None: + raise HTTPException(status_code=404, detail="Config file not found") + + # Parse config to get temp_filename + import yaml + config = yaml.safe_load(config_content) + temp_filename = config.get("temp_filename", "") + + # Get the base directory for the skill + local_dir = os.path.join(service.skill_manager.local_skills_dir, skill_name) + + # Check for path traversal patterns in the raw file_path BEFORE any normalization + # This catches attempts like ../../etc/passwd or /etc/passwd + normalized_for_check = os.path.normpath(file_path) + if ".." in file_path or file_path.startswith("/") or (os.sep in file_path and file_path.startswith(os.sep)): + # Additional check: ensure the normalized path doesn't escape local_dir + abs_local_dir = os.path.abspath(local_dir) + abs_full_path = os.path.abspath(os.path.join(local_dir, normalized_for_check)) + try: + common = os.path.commonpath([abs_local_dir, abs_full_path]) + if common != abs_local_dir: + raise HTTPException(status_code=400, detail="Invalid file path: path traversal detected") + except ValueError: + raise HTTPException(status_code=400, detail="Invalid file path: path traversal detected") + + # Normalize the requested file path - use basename to strip directory components + safe_file_path = os.path.basename(os.path.normpath(file_path)) + + # Build full path and validate it stays within local_dir + full_path = os.path.normpath(os.path.join(local_dir, safe_file_path)) + abs_local_dir = os.path.abspath(local_dir) + abs_full_path = os.path.abspath(full_path) + + # Check for path traversal: abs_full_path should be within abs_local_dir + try: + common = os.path.commonpath([abs_local_dir, abs_full_path]) + if common != abs_local_dir: + raise HTTPException(status_code=400, detail="Invalid file path: path traversal detected") + except ValueError: + # Different drives on Windows + raise HTTPException(status_code=400, detail="Invalid file path: path traversal detected") + + # Validate the filename matches temp_filename + if not temp_filename or safe_file_path != temp_filename: + raise HTTPException(status_code=400, detail="Can only delete temp_filename files") + + # Check if file exists + if not os.path.exists(full_path): + raise HTTPException(status_code=404, detail=f"File not found: {safe_file_path}") + + os.remove(full_path) + logger.info(f"Deleted skill file: {full_path}") + + return JSONResponse(content={"message": f"File {safe_file_path} deleted successfully"}) + except UnauthorizedError as e: + raise HTTPException(status_code=401, detail=str(e)) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting skill file {skill_name}/{file_path}: {e}") + raise HTTPException(status_code=500, detail=str(e)) diff --git a/backend/consts/const.py b/backend/consts/const.py index 324e352f2..5bfd012ff 100644 --- a/backend/consts/const.py +++ b/backend/consts/const.py @@ -35,6 +35,9 @@ class VectorDatabaseType(str, Enum): UPLOAD_FOLDER = os.getenv('UPLOAD_FOLDER', 'uploads') ROOT_DIR = os.getenv("ROOT_DIR") +# Container-internal skills storage path +CONTAINER_SKILLS_PATH = os.getenv("SKILLS_PATH") + # Preview Configuration FILE_PREVIEW_SIZE_LIMIT = 100 * 1024 * 1024 # 100MB diff --git a/backend/consts/exceptions.py b/backend/consts/exceptions.py index 369c24aab..c4e01e5bb 100644 --- a/backend/consts/exceptions.py +++ b/backend/consts/exceptions.py @@ -195,6 +195,11 @@ class DataMateConnectionError(Exception): pass +class SkillException(Exception): + """Raised when skill operations fail.""" + pass + + # ==================== Legacy Aliases (same as above, for compatibility) ==================== # These are additional aliases that map to the same simple exception classes above. # They provide backward compatibility for code that uses these names. diff --git a/backend/consts/model.py b/backend/consts/model.py index 9e0b69d0f..2728d95ca 100644 --- a/backend/consts/model.py +++ b/backend/consts/model.py @@ -131,7 +131,6 @@ class GlobalConfig(BaseModel): class AgentRequest(BaseModel): query: str conversation_id: Optional[int] = None - is_set: Optional[bool] = False history: Optional[List[Dict]] = None # Complete list of attachment information minio_files: Optional[List[Dict[str, Any]]] = None @@ -277,6 +276,7 @@ class AgentInfoRequest(BaseModel): business_logic_model_name: Optional[str] = None business_logic_model_id: Optional[int] = None enabled_tool_ids: Optional[List[int]] = None + enabled_skill_ids: Optional[List[int]] = None related_agent_ids: Optional[List[int]] = None group_ids: Optional[List[int]] = None ingroup_permission: Optional[str] = None @@ -295,6 +295,18 @@ class ToolInstanceInfoRequest(BaseModel): version_no: int = 0 +class SkillInstanceInfoRequest(BaseModel): + """Request model for skill instance update. + + Note: skill_description and skill_content are no longer accepted. + These fields are now retrieved from ag_skill_info_t table. + """ + skill_id: int + agent_id: int + enabled: bool = True + version_no: int = 0 + + class ToolInstanceSearchRequest(BaseModel): tool_id: int agent_id: int @@ -304,6 +316,7 @@ class ToolSourceEnum(Enum): LOCAL = "local" MCP = "mcp" LANGCHAIN = "langchain" + BUILTIN = "builtin" class ToolInfo(BaseModel): diff --git a/backend/database/agent_version_db.py b/backend/database/agent_version_db.py index b2877bdb1..4df0158a8 100644 --- a/backend/database/agent_version_db.py +++ b/backend/database/agent_version_db.py @@ -3,7 +3,7 @@ from sqlalchemy import select, insert, update, func from database.client import get_db_session, as_dict -from database.db_models import AgentInfo, ToolInstance, AgentRelation, AgentVersion +from database.db_models import AgentInfo, ToolInstance, AgentRelation, AgentVersion, SkillInstance logger = logging.getLogger("agent_version_db") @@ -370,6 +370,34 @@ def delete_relation_snapshot( return result.rowcount +def delete_skill_snapshot( + agent_id: int, + tenant_id: str, + version_no: int, + deleted_by: str = None, +) -> int: + """ + Delete all skill instance snapshots for a version (used when deleting a version) + Returns: number of rows affected + """ + with get_db_session() as session: + values = {'delete_flag': 'Y'} + if deleted_by: + values['updated_by'] = deleted_by + values['update_time'] = func.now() + result = session.execute( + update(SkillInstance) + .where( + SkillInstance.agent_id == agent_id, + SkillInstance.tenant_id == tenant_id, + SkillInstance.version_no == version_no, + SkillInstance.delete_flag == 'N', + ) + .values(**values) + ) + return result.rowcount + + def get_next_version_no( agent_id: int, tenant_id: str, @@ -410,4 +438,34 @@ def delete_version( ) rows_affected = result.rowcount logger.info(f"Delete version result: rows_affected={rows_affected} for agent_id={agent_id}, tenant_id={tenant_id}, version_no={version_no}") - return rows_affected \ No newline at end of file + return rows_affected + + +# ============== Skill Instance Snapshot Functions ============== + +def query_skill_instances_snapshot( + agent_id: int, + tenant_id: str, + version_no: int, +) -> List[dict]: + """ + Query skill instances snapshot for a specific version. + """ + with get_db_session() as session: + skills = session.query(SkillInstance).filter( + SkillInstance.agent_id == agent_id, + SkillInstance.tenant_id == tenant_id, + SkillInstance.version_no == version_no, + SkillInstance.delete_flag == 'N', + ).all() + return [as_dict(s) for s in skills] + + +def insert_skill_snapshot( + skill_data: dict, +) -> None: + """ + Insert skill instance snapshot. + """ + with get_db_session() as session: + session.execute(insert(SkillInstance).values(**skill_data)) \ No newline at end of file diff --git a/backend/database/client.py b/backend/database/client.py index 37e5dba03..7f54532bf 100644 --- a/backend/database/client.py +++ b/backend/database/client.py @@ -268,10 +268,19 @@ def get_db_session(db_session=None): def as_dict(obj): + from datetime import datetime # Handle SQLAlchemy ORM objects (both TableBase and other DeclarativeBase subclasses) if hasattr(obj, '__class__') and hasattr(obj.__class__, '__mapper__'): - return {c.key: getattr(obj, c.key) for c in class_mapper(obj.__class__).columns} + result = {} + for c in class_mapper(obj.__class__).columns: + value = getattr(obj, c.key) + # Convert datetime to ISO format string for JSON serialization + if isinstance(value, datetime): + result[c.key] = value.isoformat() + else: + result[c.key] = value + return result # noinspection PyProtectedMember return dict(obj._mapping) diff --git a/backend/database/db_models.py b/backend/database/db_models.py index 80dcc87eb..a1b28334c 100644 --- a/backend/database/db_models.py +++ b/backend/database/db_models.py @@ -512,3 +512,58 @@ class UserTokenUsageLog(TableBase): call_function_name = Column(String(100), doc="API function name being called") related_id = Column(Integer, doc="Related resource ID (e.g., conversation_id)") meta_data = Column(JSONB, doc="Additional metadata for this usage log entry, stored as JSON") + + +class SkillInfo(TableBase): + """ + Skill information table - stores skill metadata and content. + """ + __tablename__ = "ag_skill_info_t" + __table_args__ = {"schema": SCHEMA} + + skill_id = Column(Integer, Sequence("ag_skill_info_t_skill_id_seq", schema=SCHEMA), + primary_key=True, nullable=False, autoincrement=True, doc="Skill ID") + skill_name = Column(String(100), nullable=False, unique=True, doc="Unique skill name") + skill_description = Column(String(1000), doc="Skill description") + skill_tags = Column(JSON, doc="Skill tags as JSON array") + skill_content = Column(Text, doc="Skill content in markdown format") + params = Column(JSON, doc="Skill configuration parameters as JSON object") + source = Column(String(30), nullable=False, default="official", + doc="Skill source: official, custom, etc.") + + +class SkillToolRelation(TableBase): + """ + Skill-Tool relation table - many-to-many relationship between skills and tools. + """ + __tablename__ = "ag_skill_tools_rel_t" + __table_args__ = {"schema": SCHEMA} + + rel_id = Column(Integer, Sequence("ag_skill_tools_rel_t_rel_id_seq", schema=SCHEMA), + primary_key=True, nullable=False, autoincrement=True, doc="Relation ID") + skill_id = Column(Integer, nullable=False, doc="Foreign key to ag_skill_info_t.skill_id") + tool_id = Column(Integer, nullable=False, doc="Foreign key to ag_tool_info_t.tool_id") + + +class SkillInstance(TableBase): + """ + Skill instance table - stores per-agent skill configuration. + Similar to ToolInstance, stores skill settings for each agent version. + Note: skill_description and skill_content removed - these are now retrieved from ag_skill_info_t. + """ + __tablename__ = "ag_skill_instance_t" + __table_args__ = {"schema": SCHEMA} + + skill_instance_id = Column( + Integer, + Sequence("ag_skill_instance_t_skill_instance_id_seq", schema=SCHEMA), + primary_key=True, + nullable=False, + doc="Skill instance ID" + ) + skill_id = Column(Integer, nullable=False, doc="Foreign key to ag_skill_info_t.skill_id") + agent_id = Column(Integer, nullable=False, doc="Agent ID") + user_id = Column(String(100), doc="User ID") + tenant_id = Column(String(100), doc="Tenant ID") + enabled = Column(Boolean, default=True, doc="Whether this skill is enabled for the agent") + version_no = Column(Integer, default=0, primary_key=True, nullable=False, doc="Version number. 0 = draft/editing state, >=1 = published snapshot") diff --git a/backend/database/skill_db.py b/backend/database/skill_db.py new file mode 100644 index 000000000..a6a483af4 --- /dev/null +++ b/backend/database/skill_db.py @@ -0,0 +1,447 @@ +"""Skill instance and skill info database operations.""" + +import json +import logging +from datetime import datetime +from typing import Any, Dict, List, Optional + +from sqlalchemy import update as sa_update + +from database.client import get_db_session, filter_property, as_dict +from database.db_models import SkillInfo, SkillToolRelation, SkillInstance, ToolInfo +from utils.skill_params_utils import strip_params_comments_for_db + +logger = logging.getLogger(__name__) + + +def _params_value_for_db(raw: Any) -> Any: + """Strip UI/YAML comment metadata, then JSON round-trip for the DB JSON column.""" + if raw is None: + return None + stripped = strip_params_comments_for_db(raw) + return json.loads(json.dumps(stripped, default=str)) + + +def create_or_update_skill_by_skill_info(skill_info, tenant_id: str, user_id: str, version_no: int = 0): + """ + Create or update a SkillInstance in the database. + Default version_no=0 operates on the draft version. + + Args: + skill_info: Dictionary or object containing skill instance information + tenant_id: Tenant ID for filtering, mandatory + user_id: User ID for updating (will be set as the last updater) + version_no: Version number to filter. Default 0 = draft/editing state + + Returns: + Created or updated SkillInstance object + """ + skill_info_dict = skill_info.__dict__ if hasattr(skill_info, '__dict__') else skill_info + skill_info_dict = skill_info_dict.copy() + skill_info_dict.setdefault("tenant_id", tenant_id) + skill_info_dict.setdefault("user_id", user_id) + skill_info_dict.setdefault("version_no", version_no) + skill_info_dict.setdefault("created_by", user_id) + skill_info_dict.setdefault("updated_by", user_id) + + with get_db_session() as session: + query = session.query(SkillInstance).filter( + SkillInstance.tenant_id == tenant_id, + SkillInstance.agent_id == skill_info_dict.get('agent_id'), + SkillInstance.delete_flag != 'Y', + SkillInstance.skill_id == skill_info_dict.get('skill_id'), + SkillInstance.version_no == version_no + ) + skill_instance = query.first() + + if skill_instance: + for key, value in skill_info_dict.items(): + if hasattr(skill_instance, key): + setattr(skill_instance, key, value) + else: + new_skill_instance = SkillInstance( + **filter_property(skill_info_dict, SkillInstance)) + session.add(new_skill_instance) + session.flush() + skill_instance = new_skill_instance + + return as_dict(skill_instance) + + +def query_skill_instances_by_agent_id(agent_id: int, tenant_id: str, version_no: int = 0): + """Query all SkillInstance for an agent (regardless of enabled status).""" + with get_db_session() as session: + query = session.query(SkillInstance).filter( + SkillInstance.tenant_id == tenant_id, + SkillInstance.agent_id == agent_id, + SkillInstance.version_no == version_no, + SkillInstance.delete_flag != 'Y') + skill_instances = query.all() + return [as_dict(skill_instance) for skill_instance in skill_instances] + + +def query_enabled_skill_instances(agent_id: int, tenant_id: str, version_no: int = 0): + """Query enabled SkillInstance in the database.""" + with get_db_session() as session: + query = session.query(SkillInstance).filter( + SkillInstance.tenant_id == tenant_id, + SkillInstance.version_no == version_no, + SkillInstance.delete_flag != 'Y', + SkillInstance.enabled, + SkillInstance.agent_id == agent_id) + skill_instances = query.all() + return [as_dict(skill_instance) for skill_instance in skill_instances] + + +def query_skill_instance_by_id(agent_id: int, skill_id: int, tenant_id: str, version_no: int = 0): + """Query SkillInstance in the database by agent_id and skill_id.""" + with get_db_session() as session: + query = session.query(SkillInstance).filter( + SkillInstance.tenant_id == tenant_id, + SkillInstance.agent_id == agent_id, + SkillInstance.skill_id == skill_id, + SkillInstance.version_no == version_no, + SkillInstance.delete_flag != 'Y') + skill_instance = query.first() + if skill_instance: + return as_dict(skill_instance) + else: + return None + + +def search_skills_for_agent(agent_id: int, tenant_id: str, version_no: int = 0): + """Query enabled skills for an agent with skill content from SkillInstance.""" + with get_db_session() as session: + query = session.query(SkillInstance).filter( + SkillInstance.agent_id == agent_id, + SkillInstance.tenant_id == tenant_id, + SkillInstance.version_no == version_no, + SkillInstance.delete_flag != 'Y', + SkillInstance.enabled + ) + + skill_instances = query.all() + return [as_dict(skill_instance) for skill_instance in skill_instances] + + +def delete_skills_by_agent_id(agent_id: int, tenant_id: str, user_id: str, version_no: int = 0): + """Delete all skill instances for an agent.""" + with get_db_session() as session: + session.query(SkillInstance).filter( + SkillInstance.agent_id == agent_id, + SkillInstance.tenant_id == tenant_id, + SkillInstance.version_no == version_no + ).update({ + SkillInstance.delete_flag: 'Y', 'updated_by': user_id + }) + + +def delete_skill_instances_by_skill_id(skill_id: int, user_id: str): + """Soft delete all skill instances for a specific skill. + + This is called when a skill is deleted to clean up associated skill instances. + + Args: + skill_id: ID of the skill to delete instances for + user_id: User ID for the updated_by field + """ + with get_db_session() as session: + session.query(SkillInstance).filter( + SkillInstance.skill_id == skill_id, + SkillInstance.delete_flag != 'Y' + ).update({ + SkillInstance.delete_flag: 'Y', + 'updated_by': user_id + }) + + +# ============== SkillInfo Repository Functions ============== + + +def _get_tool_ids(session, skill_id: int) -> List[int]: + """Get tool IDs for a skill.""" + relations = session.query(SkillToolRelation).filter( + SkillToolRelation.skill_id == skill_id + ).all() + return [r.tool_id for r in relations] + + +def _to_dict(skill: SkillInfo) -> Dict[str, Any]: + """Convert SkillInfo to dict.""" + return { + "skill_id": skill.skill_id, + "name": skill.skill_name, + "description": skill.skill_description, + "tags": skill.skill_tags or [], + "content": skill.skill_content or "", + "params": skill.params if skill.params is not None else {}, + "source": skill.source, + "created_by": skill.created_by, + "create_time": skill.create_time.isoformat() if skill.create_time else None, + "updated_by": skill.updated_by, + "update_time": skill.update_time.isoformat() if skill.update_time else None, + } + + +def list_skills() -> List[Dict[str, Any]]: + """List all skills from database.""" + with get_db_session() as session: + skills = session.query(SkillInfo).filter( + SkillInfo.delete_flag != 'Y' + ).all() + results = [] + for s in skills: + result = _to_dict(s) + result["tool_ids"] = _get_tool_ids(session, s.skill_id) + results.append(result) + return results + + +def get_skill_by_name(skill_name: str) -> Optional[Dict[str, Any]]: + """Get skill by name.""" + with get_db_session() as session: + skill = session.query(SkillInfo).filter( + SkillInfo.skill_name == skill_name, + SkillInfo.delete_flag != 'Y' + ).first() + if skill: + result = _to_dict(skill) + result["tool_ids"] = _get_tool_ids(session, skill.skill_id) + return result + return None + + +def get_skill_by_id(skill_id: int) -> Optional[Dict[str, Any]]: + """Get skill by ID.""" + with get_db_session() as session: + skill = session.query(SkillInfo).filter( + SkillInfo.skill_id == skill_id, + SkillInfo.delete_flag != 'Y' + ).first() + if skill: + result = _to_dict(skill) + result["tool_ids"] = _get_tool_ids(session, skill.skill_id) + return result + return None + + +def create_skill(skill_data: Dict[str, Any]) -> Dict[str, Any]: + """Create a new skill.""" + with get_db_session() as session: + skill = SkillInfo( + skill_name=skill_data["name"], + skill_description=skill_data.get("description", ""), + skill_tags=skill_data.get("tags", []), + skill_content=skill_data.get("content", ""), + params=_params_value_for_db(skill_data.get("params")), + source=skill_data.get("source", "custom"), + created_by=skill_data.get("created_by"), + create_time=datetime.now(), + updated_by=skill_data.get("updated_by"), + update_time=datetime.now(), + ) + session.add(skill) + session.flush() + + skill_id = skill.skill_id + + tool_ids = skill_data.get("tool_ids", []) + if tool_ids: + for tool_id in tool_ids: + rel = SkillToolRelation( + skill_id=skill_id, + tool_id=tool_id, + create_time=datetime.now() + ) + session.add(rel) + + session.commit() + + result = _to_dict(skill) + result["tool_ids"] = tool_ids + return result + + +def update_skill( + skill_name: str, + skill_data: Dict[str, Any], + updated_by: Optional[str] = None, +) -> Dict[str, Any]: + """Update an existing skill. + + Args: + skill_name: Skill name (unique key). + skill_data: Business fields to update (description, content, tags, source, params, tool_ids). + updated_by: Actor user id from server-side auth; never taken from the HTTP request body. + + Notes: + Uses a single Core UPDATE for ag_skill_info_t columns. Mixing ORM attribute assignment + with session.execute(update()) can let autoflush emit an UPDATE that overwrites JSON + params with stale in-memory values, so we avoid ORM writes for this row. + """ + with get_db_session() as session: + skill = session.query(SkillInfo).filter( + SkillInfo.skill_name == skill_name, + SkillInfo.delete_flag != "Y", + ).first() + + if not skill: + raise ValueError(f"Skill not found: {skill_name}") + + skill_id = skill.skill_id + now = datetime.now() + row_values: Dict[str, Any] = {"update_time": now} + if updated_by: + row_values["updated_by"] = updated_by + + if "description" in skill_data: + row_values["skill_description"] = skill_data["description"] + if "content" in skill_data: + row_values["skill_content"] = skill_data["content"] + if "tags" in skill_data: + row_values["skill_tags"] = skill_data["tags"] + if "source" in skill_data: + row_values["source"] = skill_data["source"] + if "params" in skill_data: + row_values["params"] = _params_value_for_db(skill_data["params"]) + + session.execute( + sa_update(SkillInfo) + .where( + SkillInfo.skill_id == skill_id, + SkillInfo.delete_flag != "Y", + ) + .values(**row_values) + ) + + if "tool_ids" in skill_data: + session.query(SkillToolRelation).filter( + SkillToolRelation.skill_id == skill_id + ).delete() + + for tool_id in skill_data["tool_ids"]: + rel = SkillToolRelation( + skill_id=skill_id, + tool_id=tool_id, + create_time=datetime.now() + ) + session.add(rel) + + session.commit() + + refreshed = session.query(SkillInfo).filter( + SkillInfo.skill_id == skill_id, + SkillInfo.delete_flag != "Y", + ).first() + if not refreshed: + raise ValueError(f"Skill not found after update: {skill_name}") + + result = _to_dict(refreshed) + result["tool_ids"] = skill_data.get( + "tool_ids", + _get_tool_ids(session, skill_id), + ) + return result + + +def delete_skill(skill_name: str, updated_by: Optional[str] = None) -> bool: + """Soft delete a skill (mark as deleted). + + Args: + skill_name: Name of the skill to delete + updated_by: User ID of the user performing the delete + + Returns: + True if deleted successfully + """ + with get_db_session() as session: + skill = session.query(SkillInfo).filter( + SkillInfo.skill_name == skill_name + ).first() + + if not skill: + return False + + skill_id = skill.skill_id + skill.delete_flag = 'Y' + skill.update_time = datetime.now() + if updated_by: + skill.updated_by = updated_by + + session.query(SkillInstance).filter( + SkillInstance.skill_id == skill_id, + SkillInstance.delete_flag != 'Y' + ).update({ + SkillInstance.delete_flag: 'Y', + 'updated_by': updated_by + }) + + session.commit() + return True + + +def get_tool_names_by_ids(session, tool_ids: List[int]) -> List[str]: + """Get tool names from tool IDs.""" + if not tool_ids: + return [] + tools = session.query(ToolInfo.name).filter( + ToolInfo.tool_id.in_(tool_ids) + ).all() + return [t.name for t in tools] + + +def get_tool_ids_by_names(tool_names: List[str], tenant_id: str) -> List[int]: + """Get tool IDs from tool names. + + Args: + tool_names: List of tool names + tenant_id: Tenant ID + + Returns: + List of tool IDs + """ + if not tool_names: + return [] + with get_db_session() as session: + tools = session.query(ToolInfo.tool_id).filter( + ToolInfo.name.in_(tool_names), + ToolInfo.delete_flag != 'Y', + ToolInfo.author == tenant_id + ).all() + return [t.tool_id for t in tools] + + +def get_tool_names_by_skill_name(skill_name: str) -> List[str]: + """Get tool names for a skill by skill name. + + Args: + skill_name: Name of the skill + + Returns: + List of tool names + """ + with get_db_session() as session: + skill = session.query(SkillInfo).filter( + SkillInfo.skill_name == skill_name, + SkillInfo.delete_flag != 'Y' + ).first() + if not skill: + return [] + tool_ids = _get_tool_ids(session, skill.skill_id) + return get_tool_names_by_ids(session, tool_ids) + + +def get_skill_with_tool_names(skill_name: str) -> Optional[Dict[str, Any]]: + """Get skill with tool names included.""" + with get_db_session() as session: + skill = session.query(SkillInfo).filter( + SkillInfo.skill_name == skill_name, + SkillInfo.delete_flag != 'Y' + ).first() + if skill: + result = _to_dict(skill) + tool_ids = _get_tool_ids(session, skill.skill_id) + result["tool_ids"] = tool_ids + result["allowed_tools"] = get_tool_names_by_ids(session, tool_ids) + return result + return None diff --git a/backend/prompts/managed_system_prompt_template_en.yaml b/backend/prompts/managed_system_prompt_template_en.yaml index 9c3a2799c..82fc4d982 100644 --- a/backend/prompts/managed_system_prompt_template_en.yaml +++ b/backend/prompts/managed_system_prompt_template_en.yaml @@ -119,11 +119,10 @@ system_prompt: |- 4. Use tool input parameters correctly, use keyword arguments, not dictionary format; 5. Avoid making too many tool calls in one round of conversation, as this will make the output format unpredictable; 6. Only call tools when needed, do not repeat calls with the same parameters; - 7. Only import from the following modules: {{authorized_imports}}; - 8. Use variable names to save function call results. In each intermediate step, you can use "print()" to save any important information you need. Saved information persists between code executions. The content printed by print() should be treated as a string, do not perform dictionary-related operations such as .get(), [] etc., to avoid type errors; - 9. Avoid using **if**, **for**, and other logic in example code, only call tools. Each action in the example is a deterministic event. If there are different conditions, you should provide examples for different conditions; - 10. Use keyword arguments for tool calls, such as: tool_name(param1="value1", param2="value2"); - 11. Don't give up! You are responsible for solving the task, not providing solution directions. + 7. Use variable names to save function call results. In each intermediate step, you can use "print()" to save any important information you need. Saved information persists between code executions. The content printed by print() should be treated as a string, do not perform dictionary-related operations such as .get(), [] etc., to avoid type errors; + 8. Avoid using **if**, **for**, and other logic in example code, only call tools. Each action in the example is a deterministic event. If there are different conditions, you should provide examples for different conditions; + 9. Use keyword arguments for tool calls, such as: tool_name(param1="value1", param2="value2"); + 10. Don't give up! You are responsible for solving the task, not providing solution directions. ### Example Templates {{ few_shots }} diff --git a/backend/prompts/managed_system_prompt_template_zh.yaml b/backend/prompts/managed_system_prompt_template_zh.yaml index b89dcc405..c8f3e393a 100644 --- a/backend/prompts/managed_system_prompt_template_zh.yaml +++ b/backend/prompts/managed_system_prompt_template_zh.yaml @@ -1,152 +1,412 @@ system_prompt: |- + ### 基本信息 - 你是{{APP_NAME}},{{APP_DESCRIPTION}},现在是{{time|default('当前时间')}} + + 你是{{APP_NAME}},{{APP_DESCRIPTION}},现在是{{time|default('当前时间')}},用户ID为{{user_id}} + + {%- if memory_list and memory_list|length > 0 %} + ### 上下文记忆 + 基于之前的交互记录,以下是按作用域和重要程度排序的最相关记忆: + + {%- set level_order = ['tenant', 'user_agent', 'user', 'agent'] %} + {%- set memory_by_level = memory_list|groupby('memory_level') %} + {%- for level in level_order %} + {%- for group_level, memories in memory_by_level %} + {%- if group_level == level %} + + **{{ level|title }} 层级记忆:** + {%- for item in memories %} + - {{ item.memory }} `({{ "%.2f"|format(item.score|float) }})` + {%- endfor %} + {%- endif %} + {%- endfor %} + {%- endfor %} + + **记忆使用准则:** + 1. **冲突处理优先级**:当记忆信息存在矛盾时,严格按以下顺序处理: + - **最优**:在上述列表中位置靠前的记忆具有优先权 + - **次优**:当前对话内容与记忆直接冲突时,以当前对话为准 + - **次优**:相关度分数越高,表示记忆越可信 + + 2. **记忆整合最佳实践**: + - 自然地将相关记忆融入回答中,避免显式使用"根据记忆"、"根据上下文"或"根据交互记忆"等语言 + - 利用记忆信息调整回答的语调、方式和技术深度以适应用户 + - 让记忆指导您对用户偏好和上下文的理解 + + 3. **级别特定说明**: + - **tenant(租户级)**:组织层面的约束和政策(不可违背) + - **user_agent(用户-代理级)**:特定用户在代理中的交互模式和既定工作流程 + - **user(用户级)**:用户的个人偏好、技能水平和历史上下文 + - **agent(代理级)**:您的既定行为模式和能力特征,通常对所有用户共享(重要性最低) + {%- endif %} + + ### 核心职责 + {{ duty }} - + + + 请注意,你应该遵守以下原则: + 法律合规:严格遵守服务地区的所有法律法规; + 政治中立:不讨论任何国家的政治体制、领导人评价或敏感历史事件; + 安全防护:不响应涉及武器制造、危险行为、隐私窃取等内容的请求; + 伦理准则:拒绝仇恨言论、歧视性内容及任何违反普世价值观的请求。 + + + {%- if skills and skills|length > 0 %} + + ### 可用技能 + + + + 你拥有以下技能(Skills)。技能是预定义的专业能力模块,包含详细执行指南和可选的附加脚本。 + + + + + + {%- for skill in skills %} + + + + {{ skill.name }} + + {{ skill.description }} + + + + {%- endfor %} + + + + + + **技能使用流程**: + + 1. 收到用户请求后,首先审视 `` 中每个技能的 description,判断是否有匹配的技能。 + + 2. **加载技能**:根据不同场景选择读取方式: + + - **首次加载**:调用 `read_skill_md("skill_name")` 读取技能的完整执行指南(默认读取 SKILL.md) + + - **精确读取**:如只需特定文件(如示例、参考文档),可指定 additional_files: + + ``` + + skill_content = read_skill_md("skill_name", ["examples.md", "reference/api_doc"]) + + print(skill_content) + + ``` + + 注意:当 additional_files 非空时,默认不再自动读取 SKILL.md,如需同时读取请显式指定。 + + - **加载技能配置**:如果技能需要读取配置变量,可先调用 `read_skill_config("skill_name")` 读取配置字符串,通过 `json.loads` 方法转化为配置字典,再从中获取所需值: + ``` + import json + config = json.loads(read_skill_config("skill_name")) + # 返回示例: {"key_a": {"key2": "value2"}, "others": {...}} + value = config["key1"]["key2"] + print(value) + ``` + + 3. **遵循技能指南**:技能内容注入后,严格按其中的步骤执行。不要跳过技能指南中的步骤,也不要用自行编写的代码替代技能定义的流程。 + + 4. **执行技能脚本**:如果技能指南中引用了附加脚本(形如 ``),使用以下格式调用: + + 代码: + + ``` + + # 参数使用 -- 前缀传递命令行参数 + # 布尔参数传 True 即可(如 --wait) + # 列表参数会自动展开(如 --names ["vm1", "vm2"] -> --names vm1 vm2) + result = run_skill_script("skill_name", "script_path", {"--param1": "value1", "--flag": True}) + + print(result) + + ``` + + 注意:只执行技能指南中明确声明的脚本路径,绝不自行构造脚本路径。 + + 5. **整合输出**:根据技能指南要求的输出格式,结合脚本执行结果生成最终回答。 + + 6. **引用场景处理**:当技能内容中出现引用标记或需要引用其他文件时,需要识别并再次调用 read_skill_md: + + - **引用模板识别**:注意技能内容中形如 `` 或自然语言式的引用声明(如"详见 examples.md"、"请参考 reference/api_doc") + + - **自动补全**:发现引用后,尝试读取被引用的文件获取更多信息 + + - **示例**: + + ``` + + # 技能内容提示"请参考 examples.md 获取详细示例" + + additional_info = read_skill_md("skill_name", ["examples.md"]) + + print(additional_info) + + ``` + + {%- endif %} + + + ### 执行流程 + 要解决任务,你必须通过一系列步骤向前规划,以'思考:'、'代码:'和'观察结果:'序列的循环进行: + 1. 思考: + - 确定需要使用哪些工具获取信息或行动 + {%- if memory_list and memory_list|length > 0 %} + - 合理参考之前交互中的上下文记忆信息 + {%- endif %} + - 解释你的决策逻辑和预期结果 + + 2. 代码: + - 用简单的Python编写代码 + - 遵循python代码规范和python语法 + - 根据格式规范正确调用工具 + - 考虑到代码执行与展示用户代码的区别,使用'代码:\n```\n'开头,并以'```'表达运行代码,使用'代码:\n```\n'开头,并以'```'表达展示代码 + - 注意运行的代码不会被用户看到,所以如果用户需要看到代码,你需要使用'代码:\n```\n'开头,并以'```'表达展示代码。 + 3. 观察结果: + - 查看代码执行结果 - + + 在思考结束后,当你认为可以回答用户问题,那么可以不生成代码,直接生成最终回答给到用户并停止循环。 - + + + 生成最终回答时,你需要遵循以下规范: + 1. **Markdown格式要求**: + - 使用标准Markdown语法格式化输出,支持标题、列表、表格、代码块、链接等 + - 展示图片和视频使用链接方式,不需要外套代码块,格式:[链接文本](URL),图片格式:![alt文本](图片URL),视频格式: + - 段落之间使用单个空行分隔,避免多个连续空行 + - 数学公式使用标准Markdown格式:行内公式用 $公式$,块级公式用 $$公式$$ - + + + 2. **引用标记规范**(仅在使用了检索工具时): + - 引用标记格式必须严格为:`[[字母+数字]]`,例如:`[[a1]]`、`[[b2]]`、`[[c3]]` + - 字母部分必须是单个小写字母(a-e),数字部分必须是整数 + - 引用标记的字母和数字必须与检索工具的检索结果一一对应 + - 引用标记应紧跟在相关信息或句子之后,通常放在句末或段落末尾 + - 多个引用标记可以连续使用,例如:`[[a1]][[b2]]` + - **重要**:仅添加引用标记,不要添加链接、参考文献列表等多余内容 + - 如果检索结果中没有匹配的引用,则不显示该引用标记 - + + + 3. **格式细节要求**: + - 避免在Markdown中使用HTML标签,优先使用Markdown原生语法 + - 代码块中的代码应保持原始格式,不要添加额外的转义字符 + - 若未使用检索工具,则不添加任何引用标记 - + + + 注意最后生成的回答要语义连贯,信息清晰,可读性高。 - + + + ### 可用资源 + {%- if tools and tools.values() | list %} + - 你只能使用以下工具,不得使用任何其他工具: + {%- for tool in tools.values() %} + - {{ tool.name }}: {{ tool.description }} + 接受输入: {{tool.inputs}} + 返回输出类型: {{tool.output_type}} + {%- endfor %} + + {%- if knowledge_base_summary %} + - knowledge_base_search工具只能使用以下知识库索引,请根据用户问题选择最相关的一个或多个知识库索引: + {{ knowledge_base_summary }} + {%- endif %} + {%- else %} + - 当前没有可用的工具 + {%- endif %} - + + + + {%- if skills and skills|length > 0 %} + + - 你拥有上述 `` 中列出的技能。技能中引用的脚本通过 `run_skill_script()` 函数调用,该函数由平台提供,不需要导入。 + + + + ### 技能使用要求 + + 1. **技能优先**:如果用户请求匹配了某个技能的 description,必须先调用 `read_skill_md()` 加载技能指南,再按指南执行。不得跳过技能自行编写代码解决。 + + 2. **忠实执行**:读取技能内容后,严格按技能指南中的步骤操作。不要自行修改流程、跳过步骤或用通用代码替代技能定义的流程。 + + 3. **脚本调用规范**:只使用 `run_skill_script` 工具执行技能指南中明确要求的脚本。传入的 `skill_name` 和 `script_path` 必须与技能指南中的声明完全一致,不要自行拼接或猜测路径。 + + 4. **失败回退**:如果 `read_skill_md` 返回错误或 `run_skill_script` 执行失败,向用户说明情况,并尝试用通用推理模式提供替代方案。 + + 5. **技能组合**:如果一个任务需要多个技能配合,按逻辑依赖顺序依次加载和执行,前一个技能的输出可作为后一个技能的输入。 + + + {%- else %} + + - 当前没有可用的技能 + + {%- endif %} + + ### 资源使用要求 + {{ constraint }} + ### python代码规范 + 1. 如果认为是需要执行的代码,代码内容以'代码:\n```\n'开头,并以'```'标识符结尾。如果是不需要执行仅用于展示的代码,代码内容以'代码:\n```\n'开头,并以'```'标识符结尾,其中语言类型例如python、java、javascript等; + 2. 只使用已定义的变量,变量将在多次调用之间持续保持; + 3. 使用“print()”函数让下一次的模型调用看到对应变量信息; + 4. 正确使用工具的入参,使用关键字参数,不要用字典形式; + 5. 避免在一轮对话中进行过多的工具调用,这会导致输出格式难以预测; + 6. 只在需要时调用工具,不重复相同参数的调用; - 7. 只能从以下模块导入:{{authorized_imports}}; - 8. 使用变量名保存函数调用结果,在每个中间步骤中,您可以使用“print()”来保存您需要的任何重要信息。被保存的信息在代码执行之间保持。print()输出的内容应被视为字符串,不要对其进行字典相关操作如.get()、[]等,避免类型错误; + + 7. 使用变量名保存函数调用结果,在每个中间步骤中,您可以使用“print()”来保存您需要的任何重要信息。被保存的信息在代码执行之间保持。print()输出的内容应被视为字符串,不要对其进行字典相关操作如.get()、[]等,避免类型错误; + 9. 示例中的代码避免出现**if**、**for**等逻辑,仅调用工具,示例中的每一次的行动都是确定事件。如果有不同的条件,你应该给出不同条件下的示例; + 10. 工具调用使用关键字参数,如:tool_name(param1="value1", param2="value2"); + 11. 不要放弃!你负责解决任务,而不是提供解决方向。 + ### 示例模板 + {{ few_shots }} + 现在开始!如果你正确解决任务,你将获得100万美元的奖励。 managed_agent: + task: |- + 你是一个名为'{{name}}'的助手。 + 你的管理者给你提交了这个任务。 + --- + 任务: + {{task}} + --- + 你正在帮助你的管理者解决一个更大的任务:所以确保不要提供一行答案,而是提供尽可能多的信息,让他们清楚地理解答案。 + 即使你的任务解决不成功,也请返回尽可能多的上下文,这样你的管理者可以根据这个反馈采取行动。 + report: |- + {{final_answer}} planning: + initial_plan: |- update_plan_pre_messages: |- @@ -155,6 +415,7 @@ planning: final_answer: + pre_messages: |- - post_messages: |- \ No newline at end of file + post_messages: |- diff --git a/backend/prompts/manager_system_prompt_template_en.yaml b/backend/prompts/manager_system_prompt_template_en.yaml index 8da048bfe..aa9e9fc80 100644 --- a/backend/prompts/manager_system_prompt_template_en.yaml +++ b/backend/prompts/manager_system_prompt_template_en.yaml @@ -147,12 +147,11 @@ system_prompt: |- 4. Use tool/agent input parameters correctly, use keyword arguments, not dictionary format; 5. Avoid making too many tool/agent calls in one round of conversation, as this will make the output format unpredictable; 6. Only call tools/agents when needed, do not repeat calls with the same parameters; - 7. Only import from the following modules: {{authorized_imports}}; - 8. Use variable names to save function call results. In each intermediate step, you can use "print()" to save any important information you need. The saved information persists between code executions. The content printed by print() should be treated as a string, do not perform dictionary-related operations such as .get(), [] etc., to avoid type errors; - 9. Avoid **if**, **for** and other logic in example code, only call tools/agents. Each action in the example is a deterministic event. If there are different conditions, you should provide examples under different conditions; - 10. Tool calls use keyword arguments, such as: tool_name(param1="value1", param2="value2"); - 11. Agent calls must use task parameter, such as: agent_name(task="task description"); - 12. Don't give up! You are responsible for solving the task, not providing solution directions. + 7. Use variable names to save function call results. In each intermediate step, you can use "print()" to save any important information you need. The saved information persists between code executions. The content printed by print() should be treated as a string, do not perform dictionary-related operations such as .get(), [] etc., to avoid type errors; + 8. Avoid **if**, **for** and other logic in example code, only call tools/agents. Each action in the example is a deterministic event. If there are different conditions, you should provide examples under different conditions; + 9. Tool calls use keyword arguments, such as: tool_name(param1="value1", param2="value2"); + 10. Agent calls must use task parameter, such as: agent_name(task="task description"); + 11. Don't give up! You are responsible for solving the task, not providing solution directions. ### Example Templates {{ few_shots }} diff --git a/backend/prompts/manager_system_prompt_template_zh.yaml b/backend/prompts/manager_system_prompt_template_zh.yaml index 8effcd54a..3829c1439 100644 --- a/backend/prompts/manager_system_prompt_template_zh.yaml +++ b/backend/prompts/manager_system_prompt_template_zh.yaml @@ -1,6 +1,6 @@ system_prompt: |- ### 基本信息 - 你是{{APP_NAME}},{{APP_DESCRIPTION}}, 现在是{{time|default('当前时间')}} + 你是{{APP_NAME}},{{APP_DESCRIPTION}},现在是{{time|default('当前时间')}},用户ID为{{user_id}} {%- if memory_list and memory_list|length > 0 %} ### 上下文记忆 @@ -40,13 +40,73 @@ system_prompt: |- ### 核心职责 {{ duty }} - + 请注意,你应该遵守以下原则: 法律合规:严格遵守服务地区的所有法律法规; 政治中立:不讨论任何国家的政治体制、领导人评价或敏感历史事件; 安全防护:不响应涉及武器制造、危险行为、隐私窃取等内容的请求; 伦理准则:拒绝仇恨言论、歧视性内容及任何违反普世价值观的请求。 + {%- if skills and skills|length > 0 %} + ### 可用技能 + + 你拥有以下技能(Skills)。技能是预定义的专业能力模块,包含详细执行指南和可选的附加脚本。 + + + {%- for skill in skills %} + + {{ skill.name }} + {{ skill.description }} + + {%- endfor %} + + + **技能使用流程**: + 1. 收到用户请求后,首先审视 `` 中每个技能的 description,判断是否有匹配的技能。 + 2. **加载技能**:根据不同场景选择读取方式: + - **首次加载**:调用 `read_skill_md("skill_name")` 读取技能的完整执行指南(默认读取 SKILL.md) + - **精确读取**:如只需特定文件(如示例、参考文档),可指定 additional_files: + ``` + skill_content = read_skill_md("skill_name", ["examples.md", "reference/api_doc"]) + print(skill_content) + ``` + 注意:当 additional_files 非空时,默认不再自动读取 SKILL.md,如需同时读取请显式指定。 + + - **加载技能配置**:如果技能需要读取配置变量,可先调用 `read_skill_config("skill_name")` 读取配置字符串,通过 `json.loads` 方法转化为配置字典,再从中获取所需值: + ``` + import json + config = json.loads(read_skill_config("skill_name")) + # 返回示例: {"key_a": {"key2": "value2"}, "others": {...}} + value = config["key1"]["key2"] + print(value) + ``` + + 3. **遵循技能指南**:技能内容注入后,严格按其中的步骤执行。不要跳过技能指南中的步骤,也不要用自行编写的代码替代技能定义的流程。 + + 4. **执行技能脚本**:如果技能指南中引用了附加脚本(形如 ``),使用以下格式调用: + 代码: + ``` + # 参数使用 -- 前缀传递命令行参数 + # 布尔参数传 True 即可(如 --wait) + # 列表参数会自动展开(如 --names ["vm1", "vm2"] -> --names vm1 vm2) + result = run_skill_script("skill_name", "script_path", {"--param1": "value1", "--flag": True}) + print(result) + ``` + 注意:只执行技能指南中明确声明的脚本路径,绝不自行构造脚本路径。 + + 5. **整合输出**:根据技能指南要求的输出格式,结合脚本执行结果生成最终回答。 + + 6. **引用场景处理**:当技能内容中出现引用标记或需要引用其他文件时,需要识别并再次调用 read_skill_md: + - **引用模板识别**:注意技能内容中形如 `` 或自然语言式的引用声明(如"详见 examples.md"、"请参考 reference/api_doc") + - **自动补全**:发现引用后,尝试读取被引用的文件获取更多信息 + - **示例**: + ``` + # 技能内容提示"请参考 examples.md 获取详细示例" + additional_info = read_skill_md("skill_name", ["examples.md"]) + print(additional_info) + ``` + {%- endif %} + ### 执行流程 要解决任务,你必须通过一系列步骤向前规划,以'思考:'、'代码:'和'观察结果:'序列的循环进行: @@ -68,16 +128,16 @@ system_prompt: |- 3. 观察结果: - 查看代码执行结果 - 根据结果决定下一步行动 - + 在思考结束后,当你认为可以回答用户问题,那么可以不生成代码,直接生成最终回答给到用户并停止循环。 - + 生成最终回答时,你需要遵循以下规范: 1. Markdown格式要求: - 使用标准Markdown语法格式化输出,支持标题、列表、表格、代码块、链接等 - 展示图片和视频使用链接方式,不需要外套代码块,格式:[链接文本](URL),图片格式:![alt文本](图片URL),视频格式: - 段落之间使用单个空行分隔,避免多个连续空行 - 数学公式使用标准Markdown格式:行内公式用 $公式$,块级公式用 $$公式$$ - + 2. 引用标记规范(仅在使用了检索工具时): - 引用标记格式必须严格为:`[[字母+数字]]`,例如:`[[a1]]`、`[[b2]]`、`[[c3]]` - 字母部分必须是单个小写字母(a-e),数字部分必须是整数 @@ -86,12 +146,12 @@ system_prompt: |- - 多个引用标记可以连续使用,例如:`[[a1]][[b2]]` - **重要**:仅添加引用标记,不要添加链接、参考文献列表等多余内容 - 如果检索结果中没有匹配的引用,则不显示该引用标记 - + 3. 格式细节要求: - 避免在Markdown中使用HTML标签,优先使用Markdown原生语法 - 代码块中的代码应保持原始格式,不要添加额外的转义字符 - 若未使用检索工具,则不添加任何引用标记 - + ### 可用资源 你只能使用以下资源,不得使用任何其他工具或助手: @@ -136,10 +196,24 @@ system_prompt: |- {%- else %} - 当前没有可用的助手 {%- endif %} - + + 3. 技能 + {%- if skills and skills|length > 0 %} + - 你拥有上述 `` 中列出的技能。技能中引用的脚本通过 `run_skill_script()` 函数调用,该函数由平台提供,不需要导入。 + + ### 技能使用要求 + 1. **技能优先**:如果用户请求匹配了某个技能的 description,必须先调用 `read_skill_md()` 加载技能指南,再按指南执行。不得跳过技能自行编写代码解决。 + 2. **忠实执行**:读取技能内容后,严格按技能指南中的步骤操作。不要自行修改流程、跳过步骤或用通用代码替代技能定义的流程。 + 3. **脚本调用规范**:只使用 `run_skill_script` 工具执行技能指南中明确要求的脚本。传入的 `skill_name` 和 `script_path` 必须与技能指南中的声明完全一致,不要自行拼接或猜测路径。 + 4. **失败回退**:如果 `read_skill_md` 返回错误或 `run_skill_script` 执行失败,向用户说明情况,并尝试用通用推理模式提供替代方案。 + 5. **技能组合**:如果一个任务需要多个技能配合,按逻辑依赖顺序依次加载和执行,前一个技能的输出可作为后一个技能的输入。 + {%- else %} + - 当前没有可用的技能 + {%- endif %} + ### 资源使用要求 {{ constraint }} - + ### python代码规范 1. 如果认为是需要执行的代码,代码内容以'代码:\n```\n'开头,并以'```'标识符结尾。如果是不需要执行仅用于展示的代码,代码内容以'代码:\n```\n'开头,并以'```'标识符结尾,其中语言类型例如python、java、javascript等; 2. 只使用已定义的变量,变量将在多次调用之间持续保持; @@ -147,8 +221,7 @@ system_prompt: |- 4. 正确使用工具/助手的入参,使用关键字参数,不要用字典形式; 5. 避免在一轮对话中进行过多的工具/助手调用,这会导致输出格式难以预测; 6. 只在需要时调用工具/助手,不重复相同参数的调用; - 7. 只能从以下模块导入:{{authorized_imports}}; - 8. 使用变量名保存函数调用结果,在每个中间步骤中,您可以使用“print()”来保存您需要的任何重要信息。被保存的信息在代码执行之间保持。print()输出的内容应被视为字符串,不要对其进行字典相关操作如.get()、[]等,避免类型错误; + 7. 使用变量名保存函数调用结果,在每个中间步骤中,您可以使用“print()”来保存您需要的任何重要信息。被保存的信息在代码执行之间保持。print()输出的内容应被视为字符串,不要对其进行字典相关操作如.get()、[]等,避免类型错误; 9. 示例中的代码避免出现**if**、**for**等逻辑,仅调用工具/助手,示例中的每一次的行动都是确定事件。如果有不同的条件,你应该给出不同条件下的示例; 10. 工具调用使用关键字参数,如:tool_name(param1="value1", param2="value2"); 11. 助手调用必须使用task参数,如:assistant_name(task="任务描述"); @@ -177,7 +250,7 @@ managed_agent: planning: initial_plan: |- - + update_plan_pre_messages: |- update_plan_post_messages: |- @@ -186,4 +259,4 @@ planning: final_answer: pre_messages: |- - post_messages: |- \ No newline at end of file + post_messages: |- diff --git a/backend/prompts/utils/prompt_generate_en.yaml b/backend/prompts/utils/prompt_generate_en.yaml index 499d3c4ba..7f55becd3 100644 --- a/backend/prompts/utils/prompt_generate_en.yaml +++ b/backend/prompts/utils/prompt_generate_en.yaml @@ -68,14 +68,13 @@ FEW_SHOTS_SYSTEM_PROMPT: |- 4. Use tool/assistant input parameters correctly, use keyword arguments, not dictionary format; 5. Avoid making too many tool calls in one round of conversation, as this will make the output format unpredictable; 6. Only call tools/assistants when needed, do not repeat calls with the same parameters; - 7. Only import from the following modules: {{authorized_imports}}; - 8. Use variable names to save function call results. In each intermediate step, you can use "print()" to save any important information you need. Saved information persists between code executions; - 9. Avoid **if**, **for** and other logic in example code, only call tools/assistants. Each action in examples should be a determined event. If there are different conditions, you should provide examples for different conditions; - 10. Tool calls use keyword arguments, such as: tool_name(param1="value1", param2="value2"); - 11. Assistant calls must use "task" as the parameter name, such as: assistant_name(task="task description"). + 7. Use variable names to save function call results. In each intermediate step, you can use "print()" to save any important information you need. Saved information persists between code executions; + 8. Avoid **if**, **for** and other logic in example code, only call tools/assistants. Each action in examples should be a determined event. If there are different conditions, you should provide examples for different conditions; + 9. Tool calls use keyword arguments, such as: tool_name(param1="value1", param2="value2"); + 10. Assistant calls must use "task" as the parameter name, such as: assistant_name(task="task description"). ### Compliant Examples: - Task 1: "Introduce the Oriental Pearl Tower" + Task 1: "Introduce the Oriental Pearl Tower"+ Think: I will first use the knowledge_base_search tool to find if there is relevant information in the local knowledge base. Code: diff --git a/backend/prompts/utils/prompt_generate_zh.yaml b/backend/prompts/utils/prompt_generate_zh.yaml index bc7122bdf..d513bc860 100644 --- a/backend/prompts/utils/prompt_generate_zh.yaml +++ b/backend/prompts/utils/prompt_generate_zh.yaml @@ -67,8 +67,7 @@ FEW_SHOTS_SYSTEM_PROMPT: |- 4. 正确使用工具/助手的入参,使用关键字参数,不要用字典形式; 5. 避免在一轮对话中进行过多的工具调用,这会导致输出格式难以预测; 6. 只在需要时调用工具/助手,不重复相同参数的调用; - 7. 只能从以下模块导入:{{authorized_imports}}; - 8. 使用变量名保存函数调用结果,在每个中间步骤中,您可以使用“print()”来保存您需要的任何重要信息。被保存的信息在代码执行之间保持; + 7. 使用变量名保存函数调用结果,在每个中间步骤中,您可以使用“print()”来保存您需要的任何重要信息。被保存的信息在代码执行之间保持; 9. 示例中的代码避免出现**if**、**for**等逻辑,仅调用工具/助手,示例中的每一次的行动都是确定事件。如果有不同的条件,你应该给出不同条件下的示例; 10. 工具调用使用关键字参数,如:tool_name(param1="value1", param2="value2"); 11. 助手调用必须使用"task"作为参数名,如:assistant_name(task="任务描述")。 diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 65e27107a..04b94589c 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -12,11 +12,12 @@ dependencies = [ "supabase>=2.18.1", "websocket-client>=1.8.0", "pyyaml>=6.0.2", + "ruamel-yaml==0.19.1", "redis>=5.0.0", "fastmcp==2.12.0", "langchain>=0.3.26", "scikit-learn>=1.0.0", - "numpy>=1.24.0" + "numpy>=1.24.0", ] [project.optional-dependencies] diff --git a/backend/services/agent_service.py b/backend/services/agent_service.py index c4a1de3ec..f7ac4bbd7 100644 --- a/backend/services/agent_service.py +++ b/backend/services/agent_service.py @@ -27,6 +27,7 @@ ExportAndImportAgentInfo, ExportAndImportDataFormat, MCPInfo, + SkillInstanceInfoRequest, ToolInstanceInfoRequest, ToolSourceEnum, ModelConnectStatusEnum ) @@ -57,6 +58,8 @@ query_tool_instances_by_agent_id, search_tools_for_sub_agent ) +from database import skill_db +from database.agent_version_db import query_version_list from database.group_db import query_group_ids_by_user from database.user_tenant_db import get_user_tenant_by_user_id from utils.str_utils import convert_list_to_string, convert_string_to_list @@ -613,12 +616,9 @@ async def _stream_agent_chunks( except Exception as run_exc: logger.error(f"Agent run error: {str(run_exc)}") # Emit an error chunk and terminate the stream immediately - try: - error_payload = json.dumps( - {"type": "error", "content": str(run_exc)}, ensure_ascii=False) - yield f"data: {error_payload}\n\n" - finally: - return + error_payload = json.dumps( + {"type": "error", "content": str(run_exc)}, ensure_ascii=False) + yield f"data: {error_payload}\n\n" finally: # Persist assistant messages for non-debug runs if not agent_request.is_debug: @@ -880,6 +880,55 @@ async def update_agent_info_impl(request: AgentInfoRequest, authorization: str = logger.error(f"Failed to update agent tools: {str(e)}") raise ValueError(f"Failed to update agent tools: {str(e)}") + # Handle enabled skills saving when provided + try: + if request.enabled_skill_ids is not None and agent_id is not None: + enabled_set = set(request.enabled_skill_ids) + # Query existing skill instances for this agent + existing_instances = skill_db.query_skill_instances_by_agent_id( + agent_id, tenant_id) + + # Handle unselected skill (already exist instance) -> enabled=False + for instance in existing_instances: + inst_skill_id = instance.get("skill_id") + if inst_skill_id is not None and inst_skill_id not in enabled_set: + skill_db.create_or_update_skill_by_skill_info( + skill_info=SkillInstanceInfoRequest( + skill_id=inst_skill_id, + agent_id=agent_id, + skill_description=instance.get("skill_description"), + skill_content=instance.get("skill_content"), + enabled=False + ), + tenant_id=tenant_id, + user_id=user_id + ) + + # Handle selected skill -> enabled=True (create or update) + for skill_id in enabled_set: + # Keep existing skill_description and skill_content if any + existing_instance = next( + (inst for inst in existing_instances + if inst.get("skill_id") == skill_id), + None + ) + skill_description = (existing_instance or {}).get("skill_description") + skill_content = (existing_instance or {}).get("skill_content") + skill_db.create_or_update_skill_by_skill_info( + skill_info=SkillInstanceInfoRequest( + skill_id=skill_id, + agent_id=agent_id, + skill_description=skill_description, + skill_content=skill_content, + enabled=True, + ), + tenant_id=tenant_id, + user_id=user_id + ) + except Exception as e: + logger.error(f"Failed to update agent skills: {str(e)}") + raise ValueError(f"Failed to update agent skills: {str(e)}") + # Handle related agents saving when provided try: if request.related_agent_ids is not None and agent_id is not None: @@ -930,6 +979,7 @@ async def delete_agent_impl(agent_id: int, tenant_id: str, user_id: str): delete_agent_by_id(agent_id, tenant_id, user_id) delete_agent_relationship(agent_id, tenant_id, user_id) delete_tools_by_agent_id(agent_id, tenant_id, user_id) + skill_db.delete_skills_by_agent_id(agent_id, tenant_id, user_id) # Clean up all memory data related to the agent await clear_agent_memory(agent_id, tenant_id, user_id) @@ -1953,6 +2003,26 @@ async def get_agent_id_by_name(agent_name: str, tenant_id: str) -> int: raise Exception("agent not found") +def get_agent_by_name_impl(agent_name: str, tenant_id: str) -> dict: + """ + Resolve agent id and latest published version by agent name. + + Returns: + dict with agent_id and latest_version_no (may be None) + """ + if not agent_name: + raise Exception("agent_name required") + try: + agent_id = search_agent_id_by_agent_name(agent_name, tenant_id) + versions = query_version_list(agent_id, tenant_id) + latest_version = versions[0]["version_no"] if versions else None + return {"agent_id": agent_id, "latest_version_no": latest_version} + except Exception as _: + logger.error( + f"Failed to find agent '{agent_name}' in tenant {tenant_id}") + raise Exception("agent not found") + + def delete_related_agent_impl(parent_agent_id: int, child_agent_id: int, tenant_id: str): """ Delete the relationship between a parent agent and its child agent diff --git a/backend/services/agent_version_service.py b/backend/services/agent_version_service.py index 554b3a6d1..be0b6a564 100644 --- a/backend/services/agent_version_service.py +++ b/backend/services/agent_version_service.py @@ -17,9 +17,11 @@ insert_agent_snapshot, insert_tool_snapshot, insert_relation_snapshot, + insert_skill_snapshot, delete_agent_snapshot, delete_tool_snapshot, delete_relation_snapshot, + delete_skill_snapshot, get_next_version_no, delete_version, SOURCE_TYPE_NORMAL, @@ -94,6 +96,22 @@ def publish_version_impl( _remove_audit_fields_for_insert(rel_snapshot) insert_relation_snapshot(rel_snapshot) + # Get skill instances from draft (version_no=0) + from database import skill_db as skill_db_module + skills_draft = skill_db_module.query_skill_instances_by_agent_id( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=0 + ) + + # Insert skill instance snapshots + for skill in skills_draft: + skill_snapshot = skill.copy() + skill_snapshot.pop('version_no', None) + skill_snapshot['version_no'] = new_version_no + _remove_audit_fields_for_insert(skill_snapshot) + insert_skill_snapshot(skill_snapshot) + # Create version metadata version_data = { 'tenant_id': tenant_id, @@ -154,7 +172,7 @@ def get_version_detail_impl( ) -> dict: """ Get version detail including snapshot data, structured like agent info. - Returns agent info with tools, sub_agents, availability, etc. + Returns agent info with tools, sub_agents, skills, availability, etc. """ result: Dict[str, Any] = {} @@ -193,6 +211,16 @@ def get_version_detail_impl( # Extract sub_agent_id_list from relations result['sub_agent_id_list'] = [r['selected_agent_id'] for r in relations_snapshot] + # Get skill instances for this version (from ag_skill_instance_t with version_no) + from database import skill_db as skill_db_module + skills_snapshot = skill_db_module.query_skill_instances_by_agent_id( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=version_no + ) + # Add enabled skills to result + result['skills'] = [s for s in skills_snapshot if s.get('enabled', True)] + # Get model name from model_id if result.get('model_id') is not None and result['model_id'] != 0: model_info = get_model_by_model_id(result['model_id']) @@ -379,7 +407,7 @@ def delete_version_impl( ) -> dict: """ Soft delete a version by setting delete_flag='Y' - Also soft deletes all related snapshot data (agent, tools, relations) for this version + Also soft deletes all related snapshot data (agent, tools, relations, skills) for this version """ # Check if version exists version = search_version_by_version_no(agent_id, tenant_id, version_no) @@ -431,6 +459,14 @@ def delete_version_impl( deleted_by=user_id, ) + # 4. Delete skill instance snapshots + delete_skill_snapshot( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=version_no, + deleted_by=user_id, + ) + logger.info(f"Successfully deleted version {version_no} and all related snapshots for agent_id={agent_id}, tenant_id={tenant_id}") return {"message": f"Version {version_no} deleted successfully"} @@ -549,6 +585,17 @@ def compare_versions_impl( 'value_b': sub_agents_b_count, }) + # Compare skills count + skills_a_count = len(version_a.get('skills', [])) + skills_b_count = len(version_b.get('skills', [])) + if skills_a_count != skills_b_count: + differences.append({ + 'field': 'skills_count', + 'label': 'Skills Count', + 'value_a': skills_a_count, + 'value_b': skills_b_count, + }) + return { 'version_a': version_a, 'version_b': version_b, @@ -565,6 +612,8 @@ def _get_version_detail_or_draft( Get version detail for published versions, or draft data for version 0. Returns structured agent info similar to get_version_detail_impl. """ + from database import skill_db as skill_db_module + result: Dict[str, Any] = {} if version_no == 0: @@ -581,6 +630,15 @@ def _get_version_detail_or_draft( # Add tools (only enabled tools) result['tools'] = [t for t in tools_draft if t.get('enabled', True)] result['sub_agent_id_list'] = [r['selected_agent_id'] for r in relations_draft] + + # Get draft skill instances (version_no=0) + skills_draft = skill_db_module.query_skill_instances_by_agent_id( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=0 + ) + result['skills'] = [s for s in skills_draft if s.get('enabled', True)] + result['version'] = { 'version_name': 'Draft', 'version_status': 'DRAFT', @@ -589,7 +647,7 @@ def _get_version_detail_or_draft( 'source_version_no': 0, } else: - # Get published version detail + # Get published version detail (already includes skills from get_version_detail_impl) result = get_version_detail_impl(agent_id, tenant_id, version_no) # Get model name from model_id diff --git a/backend/services/skill_service.py b/backend/services/skill_service.py new file mode 100644 index 000000000..cf47b4df4 --- /dev/null +++ b/backend/services/skill_service.py @@ -0,0 +1,1445 @@ +"""Skill management service.""" + +import io +import json +import logging +import os +from typing import Any, Dict, List, Optional, Union + +import yaml + +from nexent.skills import SkillManager +from nexent.skills.skill_loader import SkillLoader +from consts.const import CONTAINER_SKILLS_PATH, ROOT_DIR +from consts.exceptions import SkillException +from database import skill_db +from database.db_models import SkillInfo + +logger = logging.getLogger(__name__) + +_skill_manager: Optional[SkillManager] = None + + +def _normalize_zip_entry_path(name: str) -> str: + """Normalize a ZIP member path for comparison (slashes, strip ./).""" + norm = name.replace("\\", "/").strip() + while norm.startswith("./"): + norm = norm[2:] + return norm + + +def _find_zip_member_config_yaml( + file_list: List[str], + preferred_skill_root: Optional[str] = None, +) -> Optional[str]: + """Return the ZIP entry path for .../config/config.yaml (any depth; filename case-insensitive). + + If preferred_skill_root is set (usually the folder containing SKILL.md, e.g. zip root + ``my_skill/SKILL.md`` -> ``my_skill``), prefer ``/config/config.yaml``. + """ + suffix = "/config/config.yaml" + root_only = "config/config.yaml" + candidates: List[str] = [] + for name in file_list: + if name.endswith("/"): + continue + norm = _normalize_zip_entry_path(name) + if not norm: + continue + nlow = norm.lower() + if nlow == root_only or nlow.endswith(suffix): + candidates.append(name) + + if not candidates: + return None + + if preferred_skill_root: + pref = _normalize_zip_entry_path(preferred_skill_root) + if pref: + pref_low = pref.lower() + expected_suffix = f"{pref_low}/config/config.yaml" + for name in candidates: + if _normalize_zip_entry_path(name).lower() == expected_suffix: + return name + for name in candidates: + n = _normalize_zip_entry_path(name).lower() + if n.startswith(pref_low + "/"): + return name + + return candidates[0] + + +def _params_dict_to_storable(data: Dict[str, Any]) -> Dict[str, Any]: + """Ensure params are JSON-serializable for the database JSON column.""" + try: + return json.loads(json.dumps(data, default=str)) + except (TypeError, ValueError) as exc: + raise SkillException( + f"params from config/config.yaml cannot be stored: {exc}" + ) from exc + + +def _comment_text_from_token(tok: Any) -> Optional[str]: + """Normalize a ruamel CommentToken (or similar) to tooltip text after ``#``.""" + if tok is None: + return None + val = getattr(tok, "value", None) + if isinstance(val, str): + s = val.strip() + if s.startswith("#"): + return s[1:].strip() + return None + + +def _tuple_slot2(tok_container: Any) -> Any: + """Return ruamel per-key tuple slot index 2 (EOL / before-next-key comment token).""" + if not tok_container or len(tok_container) <= 2: + return None + return tok_container[2] + + +def _is_before_next_sibling_comment_token(tok: Any) -> bool: + """True if token is a comment line placed *above the next key* (starts with newline in ruamel).""" + if tok is None: + return False + val = getattr(tok, "value", None) + return isinstance(val, str) and val.startswith("\n") + + +def _flatten_ca_comment_to_text(comment_field: Any) -> Optional[str]: + """Join ``#`` lines from ``ca.comment`` (block header above first key in map or first list item).""" + if not comment_field: + return None + parts: List[str] = [] + if isinstance(comment_field, list): + for part in comment_field: + if part is None: + continue + if isinstance(part, list): + for tok in part: + t = _comment_text_from_token(tok) + if t: + parts.append(t) + else: + t = _comment_text_from_token(part) + if t: + parts.append(t) + if not parts: + return None + return " ".join(parts) + + +def _comment_from_map_block_header(cm: Any) -> Optional[str]: + """Lines above the first key in this ``CommentedMap`` (``ca.comment``).""" + ca = getattr(cm, "ca", None) + if not ca or not ca.comment: + return None + return _flatten_ca_comment_to_text(ca.comment) + + +def _tooltip_for_commented_map_key(cm: Any, ordered_keys: List[Any], index: int, key: Any) -> Optional[str]: + """Collect tooltip text: block header, line-above key, and same-line EOL ``#`` for one mapping key.""" + tips: List[str] = [] + if index == 0: + h = _comment_from_map_block_header(cm) + if h: + tips.append(h) + if index > 0: + prev_k = ordered_keys[index - 1] + ca = getattr(cm, "ca", None) + if ca and ca.items: + prev_tup = ca.items.get(prev_k) + tok = _tuple_slot2(prev_tup) if prev_tup else None + if _is_before_next_sibling_comment_token(tok): + t = _comment_text_from_token(tok) + if t: + tips.append(t) + ca = getattr(cm, "ca", None) + if ca and ca.items: + tup = ca.items.get(key) + tok = _tuple_slot2(tup) if tup else None + if tok is not None and not _is_before_next_sibling_comment_token(tok): + t = _comment_text_from_token(tok) + if t: + tips.append(t) + if not tips: + return None + return " ".join(tips) + + +def _tooltip_for_commented_seq_index(seq: Any, index: int) -> Optional[str]: + """Same rules as maps: ``ca.comment`` for item 0; slot 0 on previous item for 'line above next'.""" + tips: List[str] = [] + if index == 0: + ca = getattr(seq, "ca", None) + if ca and ca.comment: + h = _flatten_ca_comment_to_text(ca.comment) + if h: + tips.append(h) + if index > 0: + ca = getattr(seq, "ca", None) + if ca and ca.items: + prev_tup = ca.items.get(index - 1) + if prev_tup and len(prev_tup) > 0 and prev_tup[0] is not None: + tok = prev_tup[0] + if _is_before_next_sibling_comment_token(tok): + t = _comment_text_from_token(tok) + if t: + tips.append(t) + ca = getattr(seq, "ca", None) + if ca and ca.items: + tup = ca.items.get(index) + if tup: + tok = _tuple_slot2(tup) + if tok is not None and not _is_before_next_sibling_comment_token(tok): + t = _comment_text_from_token(tok) + if t: + tips.append(t) + if not tips: + return None + return " ".join(tips) + + +def _apply_inline_comment_to_scalar(val: Any, comment: Optional[str]) -> Any: + """Append `` # comment`` to scalars so the UI can show tooltips (same as frontend convention).""" + if not comment: + return val + if isinstance(val, str): + return f"{val} # {comment}" + if isinstance(val, (dict, list)): + return val + try: + encoded = json.dumps(val, ensure_ascii=False) + except (TypeError, ValueError): + encoded = str(val) + return f"{encoded} # {comment}" + + +def _commented_tree_to_plain(node: Any) -> Any: + """Turn ruamel CommentedMap/Seq into plain dict/list. + + YAML ``#`` comments are merged only into **scalar** values as ``value # tip`` (same as the UI). + Block / line-above-key comments attached to **mapping or list values** are not persisted (no ``_comment`` keys). + """ + from ruamel.yaml.comments import CommentedMap, CommentedSeq + + if isinstance(node, CommentedMap): + ordered_keys = list(node.keys()) + out: Dict[str, Any] = {} + for i, k in enumerate(ordered_keys): + v = node[k] + plain_v = _commented_tree_to_plain(v) + tip = _tooltip_for_commented_map_key(node, ordered_keys, i, k) + if tip is not None and not isinstance(plain_v, (dict, list)): + plain_v = _apply_inline_comment_to_scalar(plain_v, tip) + out[k] = plain_v + return out + if isinstance(node, CommentedSeq): + out_list: List[Any] = [] + for i, v in enumerate(node): + plain_v = _commented_tree_to_plain(v) + tip = _tooltip_for_commented_seq_index(node, i) + if tip is not None and not isinstance(plain_v, (dict, list)): + plain_v = _apply_inline_comment_to_scalar(plain_v, tip) + out_list.append(plain_v) + return out_list + return node + + +def _parse_yaml_with_ruamel_merge_eol_comments(text: str) -> Dict[str, Any]: + """Parse YAML with ruamel; merge ``#`` into scalar values only (``value # tip`` for the UI). + + Does not inject ``_comment`` into nested objects; non-scalar-adjacent YAML comments are dropped. + """ + from ruamel.yaml import YAML + from ruamel.yaml.comments import CommentedMap + + # Round-trip loader preserves ``CommentedMap`` and comment tokens; ``safe`` returns plain dict. + y = YAML(typ="rt") + try: + root = y.load(text) + except Exception as exc: + raise SkillException( + f"Invalid YAML in config/config.yaml: {exc}" + ) from exc + if root is None: + return {} + if isinstance(root, CommentedMap): + plain = _commented_tree_to_plain(root) + elif isinstance(root, dict): + plain = root + else: + raise SkillException( + "config/config.yaml must contain a JSON or YAML object (mapping), not a list or scalar" + ) + if not isinstance(plain, dict): + raise SkillException( + "config/config.yaml must contain a JSON or YAML object (mapping), not a list or scalar" + ) + return _params_dict_to_storable(plain) + + +def _parse_yaml_fallback_pyyaml(text: str) -> Dict[str, Any]: + """Parse YAML with PyYAML (comments are dropped).""" + try: + data = yaml.safe_load(text) + except yaml.YAMLError as exc: + raise SkillException( + f"Invalid JSON or YAML in config/config.yaml: {exc}" + ) from exc + if data is None: + return {} + if not isinstance(data, dict): + raise SkillException( + "config/config.yaml must contain a JSON or YAML object (mapping), not a list or scalar" + ) + return _params_dict_to_storable(data) + + +def _parse_skill_params_from_config_bytes(raw: bytes) -> Dict[str, Any]: + """Parse JSON or YAML from config/config.yaml bytes (DB upload path; scalar ``#`` tips merged when possible).""" + text = raw.decode("utf-8-sig").strip() + if not text: + return {} + try: + data = json.loads(text) + except json.JSONDecodeError: + try: + return _parse_yaml_with_ruamel_merge_eol_comments(text) + except ImportError: + logger.warning("ruamel.yaml not installed; YAML comments will be dropped on parse") + return _parse_yaml_fallback_pyyaml(text) + except SkillException: + raise + except Exception as exc: + logger.warning( + "ruamel YAML parse failed (%s); falling back to PyYAML", + exc, + ) + return _parse_yaml_fallback_pyyaml(text) + else: + if not isinstance(data, dict): + raise SkillException( + "config/config.yaml must contain a JSON or YAML object (mapping), not a list or scalar" + ) + return _params_dict_to_storable(data) + + +def _read_params_from_zip_config_yaml( + zip_bytes: bytes, + preferred_skill_root: Optional[str] = None, +) -> Optional[Dict[str, Any]]: + """If the archive contains config/config.yaml, read and parse it into params; else None.""" + import zipfile + + zip_stream = io.BytesIO(zip_bytes) + with zipfile.ZipFile(zip_stream, "r") as zf: + member = _find_zip_member_config_yaml( + zf.namelist(), + preferred_skill_root=preferred_skill_root, + ) + if not member: + return None + raw = zf.read(member) + params = _parse_skill_params_from_config_bytes(raw) + logger.info("Loaded skill params from ZIP member %s", member) + return params + + +def _local_skill_config_yaml_path(skill_name: str, local_skills_dir: str) -> str: + """Absolute path to //config/config.yaml.""" + return os.path.join(local_skills_dir, skill_name, "config", "config.yaml") + + +def _write_skill_params_to_local_config_yaml( + skill_name: str, + params: Dict[str, Any], + local_skills_dir: str, +) -> None: + """Write params to config/config.yaml; scalar ``value # tip`` strings round-trip as YAML comments above keys.""" + from utils.skill_params_utils import params_dict_to_roundtrip_yaml_text + + if not local_skills_dir: + return + config_dir = os.path.join(local_skills_dir, skill_name, "config") + os.makedirs(config_dir, exist_ok=True) + path = _local_skill_config_yaml_path(skill_name, local_skills_dir) + text = params_dict_to_roundtrip_yaml_text(params) + with open(path, "w", encoding="utf-8") as f: + f.write(text) + logger.info("Wrote skill params to %s", path) + + +def _remove_local_skill_config_yaml(skill_name: str, local_skills_dir: str) -> None: + """Remove config/config.yaml when params are cleared in the database.""" + if not local_skills_dir: + return + path = _local_skill_config_yaml_path(skill_name, local_skills_dir) + if os.path.isfile(path): + os.remove(path) + logger.info("Removed %s (params cleared in DB)", path) + + +def get_skill_manager() -> SkillManager: + """Get or create the global SkillManager instance.""" + global _skill_manager + if _skill_manager is None: + _skill_manager = SkillManager(CONTAINER_SKILLS_PATH) + return _skill_manager + + +class SkillService: + """Skill management service for backend operations.""" + + def __init__(self, skill_manager: Optional[SkillManager] = None): + """Initialize SkillService. + + Args: + skill_manager: Optional SkillManager instance, uses global if not provided + """ + self.skill_manager = skill_manager or get_skill_manager() + + def _resolve_local_skills_dir_for_overlay(self) -> Optional[str]: + """Directory where skill folders live: ``SKILLS_PATH``, else ``ROOT_DIR/skills`` if present.""" + d = self.skill_manager.local_skills_dir or CONTAINER_SKILLS_PATH + if d: + return str(d).rstrip(os.sep) or None + if ROOT_DIR: + candidate = os.path.join(ROOT_DIR, "skills") + if os.path.isdir(candidate): + return candidate + return None + + def _overlay_params_from_local_config_yaml(self, skill: Dict[str, Any]) -> Dict[str, Any]: + """Prefer ``//config/config.yaml`` for ``params`` in API responses. + + The database stores comment-free JSON (no legacy ``_comment`` keys, no `` # `` suffixes). + On-disk YAML may use ``#`` lines; when the file exists, parse with ruamel (inline tips + on scalars only) and use for ``params``; otherwise use DB. + """ + out = dict(skill) + local_dir = self._resolve_local_skills_dir_for_overlay() + if not local_dir: + return out + name = out.get("name") + if not name: + return out + path = _local_skill_config_yaml_path(name, local_dir) + if not os.path.isfile(path): + return out + try: + with open(path, "rb") as f: + raw = f.read() + out["params"] = _parse_skill_params_from_config_bytes(raw) + logger.info("Using local config.yaml params (scalar inline comment tooltips) for skill %s", name) + except Exception as exc: + logger.warning( + "Could not use local config.yaml for skill %s params (using DB): %s", + name, + exc, + ) + return out + + def list_skills(self, tenant_id: Optional[str] = None) -> List[Dict[str, Any]]: + """List all skills for tenant. + + Args: + tenant_id: Tenant ID (reserved for future multi-tenant support) + + Returns: + List of skill info dicts + """ + try: + skills = skill_db.list_skills() + return [self._overlay_params_from_local_config_yaml(s) for s in skills] + except Exception as e: + logger.error(f"Error listing skills: {e}") + raise SkillException(f"Failed to list skills: {str(e)}") from e + + def get_skill(self, skill_name: str, tenant_id: Optional[str] = None) -> Optional[Dict[str, Any]]: + """Get a specific skill. + + Args: + skill_name: Name of the skill + tenant_id: Tenant ID (reserved for future multi-tenant support) + + Returns: + Skill dict or None if not found + """ + try: + skill = skill_db.get_skill_by_name(skill_name) + if skill: + return self._overlay_params_from_local_config_yaml(skill) + return None + except Exception as e: + logger.error(f"Error getting skill {skill_name}: {e}") + raise SkillException(f"Failed to get skill: {str(e)}") from e + + def get_skill_by_id(self, skill_id: int) -> Optional[Dict[str, Any]]: + """Get a specific skill by ID. + + Args: + skill_id: ID of the skill + + Returns: + Skill dict or None if not found + """ + try: + skill = skill_db.get_skill_by_id(skill_id) + if skill: + return self._overlay_params_from_local_config_yaml(skill) + return None + except Exception as e: + logger.error(f"Error getting skill by ID {skill_id}: {e}") + raise SkillException(f"Failed to get skill: {str(e)}") from e + + def create_skill( + self, + skill_data: Dict[str, Any], + tenant_id: Optional[str] = None, + user_id: Optional[str] = None + ) -> Dict[str, Any]: + """Create a new skill. + + Args: + skill_data: Skill data including name, description, content, etc. + tenant_id: Tenant ID (reserved for future multi-tenant support) + user_id: User ID of the creator + + Returns: + Created skill dict + + Raises: + SkillException: If skill already exists locally or in database (409) + """ + skill_name = skill_data.get("name") + if not skill_name: + raise SkillException("Skill name is required") + + # Check if skill already exists in database + existing = skill_db.get_skill_by_name(skill_name) + if existing: + raise SkillException(f"Skill '{skill_name}' already exists") + + # Check if skill directory already exists locally + resolved = self._resolve_local_skills_dir_for_overlay() + if resolved and os.path.exists(os.path.join(resolved, skill_name)): + raise SkillException(f"Skill '{skill_name}' already exists locally") + + # Set created_by and updated_by if user_id is provided + if user_id: + skill_data["created_by"] = user_id + skill_data["updated_by"] = user_id + + try: + # Create database record first + result = skill_db.create_skill(skill_data) + + # Create local skill file (SKILL.md) + self.skill_manager.save_skill(skill_data) + + # Mirror DB params to config/config.yaml when present (same layout as ZIP uploads). + if self.skill_manager.local_skills_dir and skill_data.get("params") is not None: + try: + _write_skill_params_to_local_config_yaml( + skill_name, + _params_dict_to_storable(skill_data["params"]), + self.skill_manager.local_skills_dir, + ) + except Exception as exc: + logger.warning( + "Local config/config.yaml write failed after create for %s: %s", + skill_name, + exc, + ) + + logger.info(f"Created skill '{skill_name}' with local files") + return self._overlay_params_from_local_config_yaml(result) + except SkillException: + raise + except Exception as e: + logger.error(f"Error creating skill: {e}") + raise SkillException(f"Failed to create skill: {str(e)}") from e + + def create_skill_from_file( + self, + file_content: Union[bytes, str, io.BytesIO], + skill_name: Optional[str] = None, + file_type: str = "auto", + tenant_id: Optional[str] = None, + user_id: Optional[str] = None + ) -> Dict[str, Any]: + """Create a skill from file content. + + Supports two formats: + 1. Single SKILL.md file - extracts metadata and saves directly + 2. ZIP archive - extracts SKILL.md and all other files/scripts + + Args: + file_content: File content as bytes, string, or BytesIO + skill_name: Optional skill name (extracted from ZIP if not provided) + file_type: File type hint - "md", "zip", or "auto" (detect) + tenant_id: Tenant ID (reserved for future multi-tenant support) + user_id: User ID of the creator + + Returns: + Created skill dict + """ + content_bytes: bytes + if isinstance(file_content, str): + content_bytes = file_content.encode("utf-8") + elif isinstance(file_content, io.BytesIO): + content_bytes = file_content.getvalue() + else: + content_bytes = file_content + + if file_type == "auto": + if content_bytes.startswith(b"PK"): + file_type = "zip" + else: + file_type = "md" + + if file_type == "zip": + return self._create_skill_from_zip(content_bytes, skill_name, user_id, tenant_id) + else: + return self._create_skill_from_md(content_bytes, skill_name, user_id, tenant_id) + + def _create_skill_from_md( + self, + content_bytes: bytes, + skill_name: Optional[str] = None, + user_id: Optional[str] = None, + tenant_id: Optional[str] = None + ) -> Dict[str, Any]: + """Create skill from SKILL.md content.""" + content_str = content_bytes.decode("utf-8") + + try: + skill_data = SkillLoader.parse(content_str) + except ValueError as e: + raise SkillException(f"Invalid SKILL.md format: {e}") + + name = skill_name or skill_data.get("name") + if not name: + raise SkillException("Skill name is required") + + # Check if skill already exists in database + existing = skill_db.get_skill_by_name(name) + if existing: + raise SkillException(f"Skill '{name}' already exists") + + # Convert allowed_tools (from SKILL.md) to tool_ids for database + allowed_tools = skill_data.get("allowed_tools", []) + tool_ids = [] + if allowed_tools: + tool_ids = skill_db.get_tool_ids_by_names(allowed_tools, tenant_id) + + skill_dict = { + "name": name, + "description": skill_data.get("description", ""), + "content": skill_data.get("content", ""), + "tags": skill_data.get("tags", []), + "source": "custom", + "tool_ids": tool_ids, + "allowed-tools": allowed_tools, # Preserve for local file sync + } + + # Set created_by and updated_by if user_id is provided + if user_id: + skill_dict["created_by"] = user_id + skill_dict["updated_by"] = user_id + + result = skill_db.create_skill(skill_dict) + + # Write SKILL.md to local storage + self.skill_manager.save_skill(skill_dict) + + return self._overlay_params_from_local_config_yaml(result) + + def _create_skill_from_zip( + self, + zip_bytes: bytes, + skill_name: Optional[str] = None, + user_id: Optional[str] = None, + tenant_id: Optional[str] = None + ) -> Dict[str, Any]: + """Create skill from ZIP archive (for file storage, content extracted from SKILL.md). + + Priority for skill_name: + 1. Parameter skill_name + 2. Root directory SKILL.md (top-level skill_name field) + 3. Subdirectory name containing SKILL.md + """ + import zipfile + + zip_stream = io.BytesIO(zip_bytes) + + try: + with zipfile.ZipFile(zip_stream, "r") as zf: + file_list = zf.namelist() + except zipfile.BadZipFile: + raise SkillException("Invalid ZIP archive") + + zip_stream.seek(0) + + skill_md_path: Optional[str] = None + detected_skill_name: Optional[str] = None + + # First: Check for SKILL.md at root level + for file_path in file_list: + if file_path.endswith("/"): + continue + normalized_path = file_path.replace("\\", "/") + parts = normalized_path.split("/") + # Root level SKILL.md (only 1 part) + if len(parts) == 1 and parts[0].lower() == "skill.md": + skill_md_path = file_path + break + + # Second: If not found at root, check subdirectory + if not skill_md_path: + for file_path in file_list: + if file_path.endswith("/"): + continue + normalized_path = file_path.replace("\\", "/") + parts = normalized_path.split("/") + if len(parts) >= 2 and parts[-1].lower() == "skill.md": + skill_md_path = file_path + detected_skill_name = parts[0] + break + + if not skill_md_path: + raise SkillException("SKILL.md not found in ZIP archive") + + name = skill_name or detected_skill_name + if not name: + raise SkillException("Skill name is required") + + # Check if skill already exists in database + existing = skill_db.get_skill_by_name(name) + if existing: + raise SkillException(f"Skill '{name}' already exists") + + with zipfile.ZipFile(zip_stream, "r") as zf: + skill_content = zf.read(skill_md_path).decode("utf-8") + + try: + skill_data = SkillLoader.parse(skill_content) + except ValueError as e: + raise SkillException(f"Invalid SKILL.md in ZIP: {e}") + + # If still no name, try to get from SKILL.md parsed data + if not name: + name = skill_data.get("name") + + if not name: + raise SkillException("Skill name is required") + + # Convert allowed_tools (from SKILL.md) to tool_ids for database + allowed_tools = skill_data.get("allowed_tools", []) + tool_ids = [] + if allowed_tools: + tool_ids = skill_db.get_tool_ids_by_names(allowed_tools, tenant_id) + + skill_dict = { + "name": name, + "description": skill_data.get("description", ""), + "content": skill_data.get("content", ""), + "tags": skill_data.get("tags", []), + "source": "custom", + "tool_ids": tool_ids, + "allowed-tools": allowed_tools, # Preserve for local file sync + } + + preferred_root = detected_skill_name or name + params_from_zip = _read_params_from_zip_config_yaml( + zip_bytes, + preferred_skill_root=preferred_root, + ) + if params_from_zip is not None: + skill_dict["params"] = params_from_zip + + # Set created_by and updated_by if user_id is provided + if user_id: + skill_dict["created_by"] = user_id + skill_dict["updated_by"] = user_id + + result = skill_db.create_skill(skill_dict) + + # Save SKILL.md to local storage + self.skill_manager.save_skill(skill_dict) + + self._upload_zip_files(zip_bytes, name, detected_skill_name) + + return self._overlay_params_from_local_config_yaml(result) + + def _delete_local_skill_files(self, skill_name: str) -> None: + """Delete all files within a skill's local directory, preserving the directory itself. + + Args: + skill_name: Name of the skill whose local files should be deleted. + """ + import shutil + + local_dir = os.path.join(self.skill_manager.local_skills_dir, skill_name) + logger.info("Starting deletion of local files for skill '%s' from '%s'", skill_name, local_dir) + + if not os.path.isdir(local_dir): + logger.info("Local skill directory does not exist, nothing to delete: %s", local_dir) + return + try: + items = os.listdir(local_dir) + logger.info("Found %d items to delete in '%s'", len(items), local_dir) + + for item in items: + item_path = os.path.join(local_dir, item) + if item_path.endswith("/"): + continue + if os.path.isdir(item_path): + shutil.rmtree(item_path) + logger.debug("Deleted directory: %s", item_path) + else: + os.remove(item_path) + logger.debug("Deleted file: %s", item_path) + logger.info("Successfully deleted all local files for skill '%s'", skill_name) + except Exception as e: + logger.error("Failed to delete local files for skill '%s': %s", skill_name, e) + + def _upload_zip_files( + self, + zip_bytes: bytes, + skill_name: str, + original_folder_name: Optional[str] = None + ) -> None: + """Extract ZIP files to local storage only. + + Args: + zip_bytes: ZIP archive content + skill_name: Target skill name (for local directory) + original_folder_name: Original folder name in ZIP (if different from skill_name) + """ + import zipfile + + zip_stream = io.BytesIO(zip_bytes) + + # Determine if folder renaming is needed + needs_rename = ( + original_folder_name is not None + and original_folder_name != skill_name + ) + + logger.info( + "Starting ZIP extraction for skill '%s': needs_rename=%s, original_folder='%s'", + skill_name, needs_rename, original_folder_name + ) + + try: + with zipfile.ZipFile(zip_stream, "r") as zf: + file_list = zf.namelist() + logger.info("ZIP contains %d entries for skill '%s'", len(file_list), skill_name) + + extracted_count = 0 + for file_path in file_list: + if file_path.endswith("/"): + continue + + normalized_path = file_path.replace("\\", "/") + parts = normalized_path.split("/") + + # Calculate target relative path + if needs_rename and len(parts) >= 2 and parts[0] == original_folder_name: + # Replace original folder name with skill_name + relative_path = parts[0].replace(original_folder_name, skill_name) + "/" + "/".join(parts[1:]) + elif len(parts) >= 2: + relative_path = "/".join(parts[1:]) + else: + relative_path = normalized_path + + if not relative_path: + continue + + file_data = zf.read(file_path) + + local_dir = os.path.join(self.skill_manager.local_skills_dir, skill_name) + local_path = os.path.join(local_dir, relative_path) + os.makedirs(os.path.dirname(local_path), exist_ok=True) + with open(local_path, "wb") as f: + f.write(file_data) + extracted_count += 1 + logger.debug("Extracted file '%s' -> '%s'", file_path, local_path) + + logger.info( + "Completed ZIP extraction for skill '%s': %d files extracted to '%s'", + skill_name, extracted_count, self.skill_manager.local_skills_dir + ) + except Exception as e: + logger.error("Failed to extract ZIP files for skill '%s': %s", skill_name, e) + raise + + def update_skill_from_file( + self, + skill_name: str, + file_content: Union[bytes, str, io.BytesIO], + file_type: str = "auto", + tenant_id: Optional[str] = None, + user_id: Optional[str] = None + ) -> Dict[str, Any]: + """Update an existing skill from file content. + + Args: + skill_name: Name of the skill to update + file_content: File content as bytes, string, or BytesIO + file_type: File type hint - "md", "zip", or "auto" (detect) + tenant_id: Tenant ID (reserved for future multi-tenant support) + user_id: User ID of the updater + + Returns: + Updated skill dict + """ + existing = skill_db.get_skill_by_name(skill_name) + if not existing: + raise SkillException(f"Skill not found: {skill_name}") + + content_bytes: bytes + if isinstance(file_content, str): + content_bytes = file_content.encode("utf-8") + elif isinstance(file_content, io.BytesIO): + content_bytes = file_content.getvalue() + else: + content_bytes = file_content + + if file_type == "auto": + if content_bytes.startswith(b"PK"): + file_type = "zip" + else: + file_type = "md" + + if file_type == "zip": + return self._update_skill_from_zip(content_bytes, skill_name, user_id, tenant_id) + else: + return self._update_skill_from_md(content_bytes, skill_name, user_id, tenant_id) + + def _update_skill_from_md( + self, + content_bytes: bytes, + skill_name: str, + user_id: Optional[str] = None, + tenant_id: Optional[str] = None + ) -> Dict[str, Any]: + """Update skill from SKILL.md content.""" + content_str = content_bytes.decode("utf-8") + + try: + skill_data = SkillLoader.parse(content_str) + except ValueError as e: + raise SkillException(f"Invalid SKILL.md format: {e}") + + # Get allowed-tools from parsed content and try to map to tool_ids + allowed_tools = skill_data.get("allowed_tools", []) + tool_ids = [] + if allowed_tools: + tool_ids = skill_db.get_tool_ids_by_names(allowed_tools, tenant_id) + + skill_dict = { + "description": skill_data.get("description", ""), + "content": skill_data.get("content", ""), + "tags": skill_data.get("tags", []), + "tool_ids": tool_ids, + } + + result = skill_db.update_skill( + skill_name, skill_dict, updated_by=user_id or None + ) + + # Clean up existing local files before writing new ones + self._delete_local_skill_files(skill_name) + + # Update local storage with new SKILL.md (preserve allowed-tools) + skill_dict["name"] = skill_name + skill_dict["allowed-tools"] = allowed_tools + self.skill_manager.save_skill(skill_dict) + + return self._overlay_params_from_local_config_yaml(result) + + def _update_skill_from_zip( + self, + zip_bytes: bytes, + skill_name: str, + user_id: Optional[str] = None, + tenant_id: Optional[str] = None, + ) -> Dict[str, Any]: + """Update skill from ZIP archive.""" + existing = skill_db.get_skill_by_name(skill_name) + if not existing: + raise SkillException(f"Skill not found: {skill_name}") + + import zipfile + + zip_stream = io.BytesIO(zip_bytes) + + skill_md_path = None + original_folder_name = None + + with zipfile.ZipFile(zip_stream, "r") as zf: + file_list = zf.namelist() + + for file_path in file_list: + normalized_path = file_path.replace("\\", "/") + if normalized_path.lower().endswith("skill.md"): + parts = normalized_path.split("/") + if len(parts) >= 2: + skill_md_path = file_path + original_folder_name = parts[0] + break + + skill_content = None + if skill_md_path: + skill_content = zf.read(skill_md_path).decode("utf-8") + + # Reset stream position before _upload_zip_files reads it + zip_stream.seek(0) + + preferred_root = original_folder_name or skill_name + params_from_zip = _read_params_from_zip_config_yaml( + zip_bytes, + preferred_skill_root=preferred_root, + ) + + skill_dict = {} + allowed_tools = [] + if skill_content: + try: + skill_data = SkillLoader.parse(skill_content) + allowed_tools = skill_data.get("allowed_tools", []) + # Try to map allowed_tools to tool_ids for database + tool_ids = [] + if allowed_tools: + tool_ids = skill_db.get_tool_ids_by_names(allowed_tools, tenant_id) + skill_dict = { + "description": skill_data.get("description", ""), + "content": skill_data.get("content", ""), + "tags": skill_data.get("tags", []), + "tool_ids": tool_ids, + } + except ValueError as e: + logger.warning(f"Could not parse SKILL.md from ZIP: {e}") + + if params_from_zip is not None: + skill_dict["params"] = params_from_zip + + result = skill_db.update_skill( + skill_name, skill_dict, updated_by=user_id or None + ) + + # Clean up existing local files before writing new ones + self._delete_local_skill_files(skill_name) + + # Update SKILL.md in local storage (preserve allowed-tools) + skill_dict["name"] = skill_name + skill_dict["allowed-tools"] = allowed_tools + self.skill_manager.save_skill(skill_dict) + + # Update other files in local storage + self._upload_zip_files(zip_bytes, skill_name, original_folder_name) + + return self._overlay_params_from_local_config_yaml(result) + + def update_skill( + self, + skill_name: str, + skill_data: Dict[str, Any], + tenant_id: Optional[str] = None, + user_id: Optional[str] = None + ) -> Dict[str, Any]: + """Update an existing skill. + + Args: + skill_name: Name of the skill to update + skill_data: Business fields from the application layer (no audit fields). + tenant_id: Tenant ID (reserved for future multi-tenant support) + user_id: Updater id from server-side auth (JWT / session); sets DB updated_by. + + Returns: + Updated skill dict + """ + try: + existing = skill_db.get_skill_by_name(skill_name) + if not existing: + raise SkillException(f"Skill not found: {skill_name}") + + result = skill_db.update_skill( + skill_name, skill_data, updated_by=user_id or None + ) + + # Keep config/config.yaml in sync when params are updated (matches ZIP import path). + if CONTAINER_SKILLS_PATH and "params" in skill_data: + try: + raw_params = skill_data["params"] + if raw_params is None: + _remove_local_skill_config_yaml(skill_name, CONTAINER_SKILLS_PATH) + else: + _write_skill_params_to_local_config_yaml( + skill_name, + _params_dict_to_storable(raw_params), + CONTAINER_SKILLS_PATH, + ) + except Exception as exc: + logger.warning( + "Local config/config.yaml sync failed after params update for %s: %s", + skill_name, + exc, + ) + + # Optional: sync SKILL.md on disk when SKILLS_PATH is configured (DB is source of truth). + if not CONTAINER_SKILLS_PATH: + logger.warning( + "SKILLS_PATH is not set; skipped local SKILL.md sync after DB update for %s", + skill_name, + ) + return self._overlay_params_from_local_config_yaml(result) + + try: + allowed_tools = skill_db.get_tool_names_by_skill_name(skill_name) + local_skill_dict = { + "name": skill_name, + "description": skill_data.get("description", existing.get("description", "")), + "content": skill_data.get("content", existing.get("content", "")), + "tags": skill_data.get("tags", existing.get("tags", [])), + "allowed-tools": allowed_tools, + } + self.skill_manager.save_skill(local_skill_dict) + except Exception as exc: + logger.warning( + "Local SKILL.md sync failed after DB update for %s: %s", + skill_name, + exc, + ) + + return self._overlay_params_from_local_config_yaml(result) + except SkillException: + raise + except Exception as e: + logger.error(f"Error updating skill {skill_name}: {e}") + raise SkillException(f"Failed to update skill: {str(e)}") from e + + def delete_skill( + self, + skill_name: str, + user_id: Optional[str] = None + ) -> bool: + """Delete a skill. + + Args: + skill_name: Name of the skill to delete + tenant_id: Tenant ID (reserved for future multi-tenant support) + user_id: User ID of the user performing the delete + + Returns: + True if deleted successfully + """ + try: + # Delete local skill files from filesystem + skill_dir = os.path.join(self.skill_manager.local_skills_dir, skill_name) + if os.path.exists(skill_dir): + import shutil + shutil.rmtree(skill_dir) + logger.info(f"Deleted skill directory: {skill_dir}") + + # Delete from database (soft delete with updated_by) + return skill_db.delete_skill(skill_name, updated_by=user_id) + except Exception as e: + logger.error(f"Error deleting skill {skill_name}: {e}") + raise SkillException(f"Failed to delete skill: {str(e)}") from e + + + def get_enabled_skills_for_agent( + self, + agent_id: int, + tenant_id: str, + version_no: int = 0 + ) -> List[Dict[str, Any]]: + """Get enabled skills for a specific agent from SkillInstance table. + + Args: + agent_id: Agent ID + tenant_id: Tenant ID + version_no: Version number for fetching skill instances + + Returns: + List of enabled skill dicts + """ + try: + enabled_skills = skill_db.search_skills_for_agent( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=version_no + ) + + result = [] + for skill_instance in enabled_skills: + skill_id = skill_instance.get("skill_id") + skill = skill_db.get_skill_by_id(skill_id) + if skill: + # Get skill info from ag_skill_info_t (repository returns keys: name, description, content) + merged = { + "skill_id": skill_id, + "name": skill.get("name"), + "description": skill.get("description", ""), + "content": skill.get("content", ""), + "enabled": skill_instance.get("enabled", True), + "tool_ids": skill.get("tool_ids", []), + } + result.append(merged) + + return result + except Exception as e: + logger.error(f"Error getting enabled skills for agent: {e}") + raise SkillException(f"Failed to get enabled skills: {str(e)}") from e + + def load_skill_directory(self, skill_name: str) -> Optional[Dict[str, Any]]: + """Load entire skill directory including scripts. + + Args: + skill_name: Name of the skill + + Returns: + Dict with skill metadata and local directory path, or None if not found + """ + try: + return self.skill_manager.load_skill_directory(skill_name) + except Exception as e: + logger.error(f"Error loading skill directory {skill_name}: {e}") + raise SkillException(f"Failed to load skill directory: {str(e)}") from e + + def get_skill_scripts(self, skill_name: str) -> List[str]: + """Get list of executable scripts in skill. + + Args: + skill_name: Name of the skill + + Returns: + List of script file paths + """ + try: + return self.skill_manager.get_skill_scripts(skill_name) + except Exception as e: + logger.error(f"Error getting skill scripts {skill_name}: {e}") + raise SkillException(f"Failed to get skill scripts: {str(e)}") from e + + def build_skills_summary( + self, + available_skills: Optional[List[str]] = None, + agent_id: Optional[int] = None, + tenant_id: Optional[str] = None, + version_no: int = 0 + ) -> str: + """Build skills summary with whitelist filter for prompt injection. + + Args: + available_skills: Optional whitelist of skill names to include. + If provided, only skills in this list will be included. + agent_id: Agent ID for fetching skill instances + tenant_id: Tenant ID for fetching skill instances + version_no: Version number for fetching skill instances + + Returns: + XML-formatted skills summary + """ + try: + skills_to_include = [] + + if agent_id and tenant_id: + # Get skills from SkillInstance table + agent_skills = skill_db.search_skills_for_agent( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=version_no + ) + + for skill_instance in agent_skills: + skill_id = skill_instance.get("skill_id") + skill = skill_db.get_skill_by_id(skill_id) + if skill: + if available_skills is not None and skill.get("name") not in available_skills: + continue + # Get skill info from ag_skill_info_t (repository returns keys: name, description) + skills_to_include.append({ + "name": skill.get("name"), + "description": skill.get("description", ""), + }) + else: + # Fallback: use all skills + all_skills = skill_db.list_skills() + skills_to_include = all_skills + if available_skills is not None: + available_set = set(available_skills) + skills_to_include = [s for s in all_skills if s.get("name") in available_set] + + if not skills_to_include: + return "" + + def escape_xml(s: str) -> str: + if s is None: + return "" + return str(s).replace("&", "&").replace("<", "<").replace(">", ">") + + lines = [""] + for skill in skills_to_include: + name = escape_xml(skill.get("name", "")) + description = escape_xml(skill.get("description", "")) + + lines.append(f' ') + lines.append(f' {name}') + lines.append(f' {description}') + lines.append(f' ') + + lines.append("") + + return "\n".join(lines) + except Exception as e: + logger.error(f"Error building skills summary: {e}") + raise SkillException(f"Failed to build skills summary: {str(e)}") from e + + def get_skill_content(self, skill_name: str, tenant_id: Optional[str] = None) -> str: + """Get skill content for runtime loading. + + Args: + skill_name: Name of the skill to load + tenant_id: Tenant ID (reserved for future multi-tenant support) + + Returns: + Skill content in markdown format + """ + try: + skill = skill_db.get_skill_by_name(skill_name) + return skill.get("content", "") if skill else "" + except Exception as e: + logger.error(f"Error getting skill content {skill_name}: {e}") + raise SkillException(f"Failed to get skill content: {str(e)}") from e + + def get_skill_file_tree( + self, + skill_name: str, + tenant_id: Optional[str] = None + ) -> Optional[Dict[str, Any]]: + """Get file tree structure of a skill. + + Args: + skill_name: Name of the skill + tenant_id: Tenant ID (reserved for future multi-tenant support) + + Returns: + Dict with file tree structure, or None if not found + """ + try: + return self.skill_manager.get_skill_file_tree(skill_name) + except Exception as e: + logger.error(f"Error getting skill file tree: {e}") + raise SkillException(f"Failed to get skill file tree: {str(e)}") from e + + def get_skill_file_content( + self, + skill_name: str, + file_path: str, + tenant_id: Optional[str] = None + ) -> Optional[str]: + """Get content of a specific file within a skill. + + Args: + skill_name: Name of the skill + file_path: Relative path to the file within the skill directory + tenant_id: Tenant ID (reserved for future multi-tenant support) + + Returns: + File content as string, or None if file not found + """ + try: + local_dir = os.path.join(self.skill_manager.local_skills_dir, skill_name) + full_path = os.path.join(local_dir, file_path) + + if not os.path.exists(full_path): + logger.warning(f"File not found: {full_path}") + return None + + with open(full_path, "r", encoding="utf-8") as f: + return f.read() + except Exception as e: + logger.error(f"Error reading skill file {skill_name}/{file_path}: {e}") + raise SkillException(f"Failed to read skill file: {str(e)}") from e + + # ============== Skill Instance Methods ============== + + def create_or_update_skill_instance( + self, + skill_info, + tenant_id: str, + user_id: str, + version_no: int = 0 + ): + """Create or update a skill instance for an agent. + + Args: + skill_info: Skill instance information (SkillInstanceInfoRequest or dict) + tenant_id: Tenant ID + user_id: User ID (will be set as created_by/updated_by) + version_no: Version number (default 0 for draft) + + Returns: + Created or updated skill instance dict + """ + from database import skill_db as skill_db_module + return skill_db_module.create_or_update_skill_by_skill_info( + skill_info=skill_info, + tenant_id=tenant_id, + user_id=user_id, + version_no=version_no + ) + + def list_skill_instances( + self, + agent_id: int, + tenant_id: str, + version_no: int = 0 + ) -> List[Dict[str, Any]]: + """List all skill instances for an agent. + + Args: + agent_id: Agent ID + tenant_id: Tenant ID + version_no: Version number (default 0 for draft) + + Returns: + List of skill instance dicts + """ + from database import skill_db as skill_db_module + return skill_db_module.query_skill_instances_by_agent_id( + agent_id=agent_id, + tenant_id=tenant_id, + version_no=version_no + ) + + def get_skill_instance( + self, + agent_id: int, + skill_id: int, + tenant_id: str, + version_no: int = 0 + ) -> Optional[Dict[str, Any]]: + """Get a specific skill instance for an agent. + + Args: + agent_id: Agent ID + skill_id: Skill ID + tenant_id: Tenant ID + version_no: Version number (default 0 for draft) + + Returns: + Skill instance dict or None if not found + """ + from database import skill_db as skill_db_module + return skill_db_module.query_skill_instance_by_id( + agent_id=agent_id, + skill_id=skill_id, + tenant_id=tenant_id, + version_no=version_no + ) diff --git a/backend/utils/skill_params_utils.py b/backend/utils/skill_params_utils.py new file mode 100644 index 000000000..404e16ccb --- /dev/null +++ b/backend/utils/skill_params_utils.py @@ -0,0 +1,127 @@ +"""Skill ``params`` helpers: DB storage without UI/YAML comment metadata, round-trip YAML for disk.""" + +from __future__ import annotations + +import json +import logging +import re +from io import StringIO +from typing import Any, Dict, List, Optional, Tuple + +logger = logging.getLogger(__name__) + + +def split_string_inline_comment(s: str) -> Tuple[str, Optional[str]]: + """Split ``value # comment`` at the first `` # `` (same rule as the frontend SkillList).""" + idx = s.find(" # ") + if idx == -1: + return s, None + return s[:idx].rstrip(), s[idx + 3 :].strip() or None + + +def strip_params_comments_for_db(obj: Any) -> Any: + """Remove legacy ``_comment`` keys and trailing `` # `` suffixes from strings for JSON/DB storage.""" + if isinstance(obj, str): + display, _tip = split_string_inline_comment(obj) + return display + if isinstance(obj, list): + return [strip_params_comments_for_db(x) for x in obj] + if isinstance(obj, dict): + out: Dict[str, Any] = {} + for k, v in obj.items(): + if k == "_comment": + continue + out[k] = strip_params_comments_for_db(v) + return out + return obj + + +def _coerce_scalar_display(display: str) -> Any: + """Best-effort restore numbers/bools from merged string form (e.g. after stripping `` # ``).""" + s = display.strip() + if s == "": + return display + try: + return json.loads(s) + except (json.JSONDecodeError, TypeError, ValueError): + pass + if re.fullmatch(r"-?\d+", s): + return int(s) + if re.fullmatch(r"-?\d+\.\d+", s): + return float(s) + low = s.lower() + if low in ("true", "false"): + return low == "true" + return display + + +def _scalar_to_node_and_tip(v: Any) -> Tuple[Any, Optional[str]]: + """Return (typed value, optional comment text) for YAML emission.""" + if isinstance(v, str): + display, tip = split_string_inline_comment(v) + return _coerce_scalar_display(display), tip + return v, None + + +def _dict_to_commented_map(d: Dict[str, Any]) -> Any: + """Build ruamel ``CommentedMap``; only scalar ``value # tip`` strings become YAML block comments above keys.""" + from ruamel.yaml.comments import CommentedMap + + cm = CommentedMap() + for k, v in d.items(): + if k == "_comment": + continue + if isinstance(v, dict): + inner_clean = {kk: vv for kk, vv in v.items() if kk != "_comment"} + cm[k] = _dict_to_commented_map(inner_clean) + elif isinstance(v, list): + cm[k] = _list_to_commented_seq(v) + else: + val, tip = _scalar_to_node_and_tip(v) + cm[k] = val + if tip: + cm.yaml_set_comment_before_after_key(k, before=tip + "\n") + return cm + + +def _list_to_commented_seq(items: List[Any]) -> Any: + from ruamel.yaml.comments import CommentedSeq + + seq = CommentedSeq() + for item in items: + if isinstance(item, dict): + seq.append(_dict_to_commented_map(item)) + elif isinstance(item, list): + seq.append(_list_to_commented_seq(item)) + else: + val, _ = _scalar_to_node_and_tip(item) + seq.append(val) + return seq + + +def params_dict_to_roundtrip_yaml_text(params: Dict[str, Any]) -> str: + """Serialize params to YAML with comments restored (ruamel round-trip). Falls back to PyYAML.""" + try: + from ruamel.yaml import YAML + + cm = _dict_to_commented_map(params) + y = YAML(typ="rt") + y.indent(mapping=2, sequence=4, offset=2) + buf = StringIO() + y.dump(cm, buf) + return buf.getvalue() + except Exception as exc: + logger.warning( + "ruamel round-trip YAML failed (%s); falling back to plain yaml.dump", + exc, + ) + import yaml as pyyaml + + clean = strip_params_comments_for_db(params) + return pyyaml.dump( + clean, + allow_unicode=True, + sort_keys=False, + default_flow_style=False, + width=float("inf"), + ) diff --git a/docker/.env.example b/docker/.env.example index 677ccb7c7..d03cf6113 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -142,6 +142,8 @@ QUEUES=process_q,forward_q WORKER_NAME= WORKER_CONCURRENCY=4 +# Skills Configuration +SKILLS_PATH=/mnt/nexent/skills # Telemetry and Monitoring Configuration ENABLE_TELEMETRY=false diff --git a/docker/init.sql b/docker/init.sql index 02e99632c..75e9a818f 100644 --- a/docker/init.sql +++ b/docker/init.sql @@ -651,47 +651,6 @@ BEFORE UPDATE ON "nexent"."memory_user_config_t" FOR EACH ROW EXECUTE FUNCTION "update_memory_user_config_update_time"(); --- Create partner mapping id table -CREATE TABLE IF NOT EXISTS "nexent"."partner_mapping_id_t" ( - "mapping_id" serial PRIMARY KEY NOT NULL, - "external_id" varchar(100) COLLATE "pg_catalog"."default", - "internal_id" int4, - "mapping_type" varchar(30) COLLATE "pg_catalog"."default", - "tenant_id" varchar(100) COLLATE "pg_catalog"."default", - "user_id" varchar(100) COLLATE "pg_catalog"."default", - "create_time" timestamp(6) DEFAULT CURRENT_TIMESTAMP, - "update_time" timestamp(6) DEFAULT CURRENT_TIMESTAMP, - "created_by" varchar(100) COLLATE "pg_catalog"."default", - "updated_by" varchar(100) COLLATE "pg_catalog"."default", - "delete_flag" varchar(1) COLLATE "pg_catalog"."default" DEFAULT 'N'::character varying -); - -ALTER TABLE "nexent"."partner_mapping_id_t" OWNER TO "root"; - -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."mapping_id" IS 'ID'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."external_id" IS 'The external id given by the outer partner'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."internal_id" IS 'The internal id of the other database table'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."mapping_type" IS 'Type of the external - internal mapping, value set: CONVERSATION'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."tenant_id" IS 'Tenant ID'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."user_id" IS 'User ID'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."create_time" IS 'Creation time'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."update_time" IS 'Update time'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."created_by" IS 'Creator'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."updated_by" IS 'Updater'; -COMMENT ON COLUMN "nexent"."partner_mapping_id_t"."delete_flag" IS 'Whether it is deleted. Optional values: Y/N'; - -CREATE OR REPLACE FUNCTION "update_partner_mapping_update_time"() -RETURNS TRIGGER AS $$ -BEGIN - NEW.update_time = CURRENT_TIMESTAMP; - RETURN NEW; -END; -$$ LANGUAGE plpgsql; - -CREATE TRIGGER "update_partner_mapping_update_time_trigger" -BEFORE UPDATE ON "nexent"."partner_mapping_id_t" -FOR EACH ROW -EXECUTE FUNCTION "update_partner_mapping_update_time"(); -- 1. Create tenant_invitation_code_t table for invitation codes CREATE TABLE IF NOT EXISTS nexent.tenant_invitation_code_t ( @@ -1049,3 +1008,162 @@ COMMENT ON COLUMN nexent.ag_tenant_agent_version_t.create_time IS 'Version creat COMMENT ON COLUMN nexent.ag_tenant_agent_version_t.updated_by IS 'Last user who updated this version'; COMMENT ON COLUMN nexent.ag_tenant_agent_version_t.update_time IS 'Last update timestamp'; COMMENT ON COLUMN nexent.ag_tenant_agent_version_t.delete_flag IS 'Soft delete flag: Y/N'; + +-- Create the user_token_info_t table in the nexent schema +CREATE TABLE IF NOT EXISTS nexent.user_token_info_t ( + token_id SERIAL4 PRIMARY KEY NOT NULL, + access_key VARCHAR(100) NOT NULL, + user_id VARCHAR(100) NOT NULL, + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + created_by VARCHAR(100), + updated_by VARCHAR(100), + delete_flag VARCHAR(1) DEFAULT 'N' +); + +ALTER TABLE "user_token_info_t" OWNER TO "root"; + +-- Add comment to the table +COMMENT ON TABLE nexent.user_token_info_t IS 'User token (AK/SK) information table'; + +-- Add comments to the columns +COMMENT ON COLUMN nexent.user_token_info_t.token_id IS 'Token ID, unique primary key'; +COMMENT ON COLUMN nexent.user_token_info_t.access_key IS 'Access Key (AK)'; +COMMENT ON COLUMN nexent.user_token_info_t.user_id IS 'User ID who owns this token'; +COMMENT ON COLUMN nexent.user_token_info_t.create_time IS 'Creation time, audit field'; +COMMENT ON COLUMN nexent.user_token_info_t.update_time IS 'Update time, audit field'; +COMMENT ON COLUMN nexent.user_token_info_t.created_by IS 'Creator ID, audit field'; +COMMENT ON COLUMN nexent.user_token_info_t.updated_by IS 'Last updater ID, audit field'; +COMMENT ON COLUMN nexent.user_token_info_t.delete_flag IS 'Soft delete flag, Y means deleted'; + + +-- Create the user_token_usage_log_t table in the nexent schema +CREATE TABLE IF NOT EXISTS nexent.user_token_usage_log_t ( + token_usage_id SERIAL4 PRIMARY KEY NOT NULL, + token_id INT4 NOT NULL, + call_function_name VARCHAR(100), + related_id INT4, + meta_data JSONB, + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + created_by VARCHAR(100), + updated_by VARCHAR(100), + delete_flag VARCHAR(1) DEFAULT 'N' +); + +ALTER TABLE "user_token_usage_log_t" OWNER TO "root"; + +-- Add comment to the table +COMMENT ON TABLE nexent.user_token_usage_log_t IS 'User token usage log table'; + +-- Add comments to the columns +COMMENT ON COLUMN nexent.user_token_usage_log_t.token_usage_id IS 'Token usage log ID, unique primary key'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.token_id IS 'Foreign key to user_token_info_t.token_id'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.call_function_name IS 'API function name being called'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.related_id IS 'Related resource ID (e.g., conversation_id)'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.meta_data IS 'Additional metadata for this usage log entry, stored as JSON'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.create_time IS 'Creation time, audit field'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.update_time IS 'Update time, audit field'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.created_by IS 'Creator ID, audit field'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.updated_by IS 'Last updater ID, audit field'; +COMMENT ON COLUMN nexent.user_token_usage_log_t.delete_flag IS 'Soft delete flag, Y means deleted'; + +-- Create the ag_skill_info_t table in the nexent schema +CREATE TABLE IF NOT EXISTS nexent.ag_skill_info_t ( + skill_id SERIAL4 PRIMARY KEY NOT NULL, + skill_name VARCHAR(100) NOT NULL, + skill_description VARCHAR(1000), + skill_tags JSON, + skill_content TEXT, + params JSON, + source VARCHAR(30) DEFAULT 'official', + created_by VARCHAR(100), + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_by VARCHAR(100), + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + delete_flag VARCHAR(1) DEFAULT 'N' +); + +ALTER TABLE "ag_skill_info_t" OWNER TO "root"; + +-- Add comment to the table +COMMENT ON TABLE nexent.ag_skill_info_t IS 'Skill information table for managing custom skills'; + +-- Add comments to the columns +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_id IS 'Skill ID, unique primary key'; +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_name IS 'Skill name, globally unique'; +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_description IS 'Skill description text'; +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_tags IS 'Skill tags stored as JSON array'; +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_content IS 'Skill content or prompt text'; +COMMENT ON COLUMN nexent.ag_skill_info_t.params IS 'Skill configuration parameters stored as JSON object'; +COMMENT ON COLUMN nexent.ag_skill_info_t.source IS 'Skill source: official, custom, or partner'; +COMMENT ON COLUMN nexent.ag_skill_info_t.created_by IS 'Creator ID'; +COMMENT ON COLUMN nexent.ag_skill_info_t.create_time IS 'Creation timestamp'; +COMMENT ON COLUMN nexent.ag_skill_info_t.updated_by IS 'Last updater ID'; +COMMENT ON COLUMN nexent.ag_skill_info_t.update_time IS 'Last update timestamp'; +COMMENT ON COLUMN nexent.ag_skill_info_t.delete_flag IS 'Whether it is deleted. Optional values: Y/N'; + +-- Create the ag_skill_tools_rel_t table in the nexent schema +CREATE TABLE IF NOT EXISTS nexent.ag_skill_tools_rel_t ( + rel_id SERIAL4 PRIMARY KEY NOT NULL, + skill_id INTEGER, + tool_id INTEGER, + created_by VARCHAR(100), + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_by VARCHAR(100), + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + delete_flag VARCHAR(1) DEFAULT 'N' +); + +ALTER TABLE "ag_skill_tools_rel_t" OWNER TO "root"; + +-- Add comment to the table +COMMENT ON TABLE nexent.ag_skill_tools_rel_t IS 'Skill-tool relationship table for many-to-many mapping'; + +-- Add comments to the columns +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.rel_id IS 'Relationship ID, unique primary key'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.skill_id IS 'Foreign key to ag_skill_info_t.skill_id'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.tool_id IS 'Tool ID from ag_tool_info_t'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.created_by IS 'Creator ID'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.create_time IS 'Creation timestamp'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.updated_by IS 'Last updater ID'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.update_time IS 'Last update timestamp'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.delete_flag IS 'Whether it is deleted. Optional values: Y/N'; + +-- Create the ag_skill_instance_t table in the nexent schema +-- Stores skill instance configuration per agent version +-- Note: skill_description and skill_content fields removed, now retrieved from ag_skill_info_t +CREATE TABLE IF NOT EXISTS nexent.ag_skill_instance_t ( + skill_instance_id SERIAL4 NOT NULL, + skill_id INTEGER NOT NULL, + agent_id INTEGER NOT NULL, + user_id VARCHAR(100), + tenant_id VARCHAR(100), + enabled BOOLEAN DEFAULT TRUE, + version_no INTEGER DEFAULT 0 NOT NULL, + created_by VARCHAR(100), + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_by VARCHAR(100), + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + delete_flag VARCHAR(1) DEFAULT 'N', + CONSTRAINT ag_skill_instance_t_pkey PRIMARY KEY (skill_instance_id, version_no) +); + +ALTER TABLE "ag_skill_instance_t" OWNER TO "root"; + +-- Add comment to the table +COMMENT ON TABLE nexent.ag_skill_instance_t IS 'Skill instance configuration table - stores per-agent skill settings'; + +-- Add comments to the columns +COMMENT ON COLUMN nexent.ag_skill_instance_t.skill_instance_id IS 'Skill instance ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.skill_id IS 'Foreign key to ag_skill_info_t.skill_id'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.agent_id IS 'Agent ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.user_id IS 'User ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.tenant_id IS 'Tenant ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.enabled IS 'Whether this skill is enabled for the agent'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.version_no IS 'Version number. 0 = draft/editing state, >=1 = published snapshot'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.created_by IS 'Creator ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.create_time IS 'Creation timestamp'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.updated_by IS 'Last updater ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.update_time IS 'Last update timestamp'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.delete_flag IS 'Whether it is deleted. Optional values: Y/N'; diff --git a/docker/sql/v1.8.1_0306_add_user_token_info.sql b/docker/sql/v1.8.1_0306_add_user_token_info.sql index 040530334..402cf4bab 100644 --- a/docker/sql/v1.8.1_0306_add_user_token_info.sql +++ b/docker/sql/v1.8.1_0306_add_user_token_info.sql @@ -32,34 +32,6 @@ COMMENT ON COLUMN nexent.user_token_info_t.created_by IS 'Creator ID, audit fiel COMMENT ON COLUMN nexent.user_token_info_t.updated_by IS 'Last updater ID, audit field'; COMMENT ON COLUMN nexent.user_token_info_t.delete_flag IS 'Soft delete flag, Y means deleted'; --- Create unique index on access_key to ensure uniqueness -CREATE UNIQUE INDEX IF NOT EXISTS idx_user_token_info_access_key ON nexent.user_token_info_t(access_key) WHERE delete_flag = 'N'; - --- Create index on user_id for query performance -CREATE INDEX IF NOT EXISTS idx_user_token_info_user_id ON nexent.user_token_info_t(user_id) WHERE delete_flag = 'N'; - --- Create a function to update the update_time column -CREATE OR REPLACE FUNCTION update_user_token_info_update_time() -RETURNS TRIGGER AS $$ -BEGIN - NEW.update_time = CURRENT_TIMESTAMP; - RETURN NEW; -END; -$$ LANGUAGE plpgsql; - --- Add comment to the function -COMMENT ON FUNCTION update_user_token_info_update_time() IS 'Function to update the update_time column when a record in user_token_info_t is updated'; - --- Create a trigger to call the function before each update -DROP TRIGGER IF EXISTS update_user_token_info_update_time_trigger ON nexent.user_token_info_t; -CREATE TRIGGER update_user_token_info_update_time_trigger -BEFORE UPDATE ON nexent.user_token_info_t -FOR EACH ROW -EXECUTE FUNCTION update_user_token_info_update_time(); - --- Add comment to the trigger -COMMENT ON TRIGGER update_user_token_info_update_time_trigger ON nexent.user_token_info_t IS 'Trigger to call update_user_token_info_update_time function before each update on user_token_info_t table'; - -- Create the user_token_usage_log_t table in the nexent schema CREATE TABLE IF NOT EXISTS nexent.user_token_usage_log_t ( @@ -92,20 +64,6 @@ COMMENT ON COLUMN nexent.user_token_usage_log_t.created_by IS 'Creator ID, audit COMMENT ON COLUMN nexent.user_token_usage_log_t.updated_by IS 'Last updater ID, audit field'; COMMENT ON COLUMN nexent.user_token_usage_log_t.delete_flag IS 'Soft delete flag, Y means deleted'; --- Create index on token_id for query performance -CREATE INDEX IF NOT EXISTS idx_user_token_usage_log_token_id ON nexent.user_token_usage_log_t(token_id); - --- Create index on call_function_name for query performance -CREATE INDEX IF NOT EXISTS idx_user_token_usage_log_function_name ON nexent.user_token_usage_log_t(call_function_name); - --- Add foreign key constraint -ALTER TABLE nexent.user_token_usage_log_t -ADD CONSTRAINT fk_user_token_usage_log_token_id -FOREIGN KEY (token_id) -REFERENCES nexent.user_token_info_t(token_id) -ON DELETE CASCADE; - - -- Migration: Remove partner_mapping_id_t table for northbound conversation ID mapping -- Date: 2026-03-10 -- Description: Remove the external-internal conversation ID mapping table as northbound APIs now use internal conversation IDs directly diff --git a/docker/sql/v2.0.0_0314_add_context_skill_t.sql b/docker/sql/v2.0.0_0314_add_context_skill_t.sql new file mode 100644 index 000000000..5fd23c97e --- /dev/null +++ b/docker/sql/v2.0.0_0314_add_context_skill_t.sql @@ -0,0 +1,105 @@ +-- Migration: Add ag_skill_info_t, ag_skill_tools_rel_t, and ag_skill_instance_t tables +-- Date: 2026-03-14 +-- Description: Create skill management tables with skill content, tags, and tool relationships + +SET search_path TO nexent; + +-- Create the ag_skill_info_t table in the nexent schema +CREATE TABLE IF NOT EXISTS nexent.ag_skill_info_t ( + skill_id SERIAL4 PRIMARY KEY NOT NULL, + skill_name VARCHAR(100) NOT NULL, + skill_description VARCHAR(1000), + skill_tags JSON, + skill_content TEXT, + params JSON, + source VARCHAR(30) DEFAULT 'official', + created_by VARCHAR(100), + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_by VARCHAR(100), + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + delete_flag VARCHAR(1) DEFAULT 'N' +); + +ALTER TABLE "ag_skill_info_t" OWNER TO "root"; + +-- Add comment to the table +COMMENT ON TABLE nexent.ag_skill_info_t IS 'Skill information table for managing custom skills'; + +-- Add comments to the columns +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_id IS 'Skill ID, unique primary key'; +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_name IS 'Skill name, globally unique'; +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_description IS 'Skill description text'; +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_tags IS 'Skill tags stored as JSON array'; +COMMENT ON COLUMN nexent.ag_skill_info_t.skill_content IS 'Skill content or prompt text'; +COMMENT ON COLUMN nexent.ag_skill_info_t.params IS 'Skill configuration parameters stored as JSON object'; +COMMENT ON COLUMN nexent.ag_skill_info_t.source IS 'Skill source: official, custom, or partner'; +COMMENT ON COLUMN nexent.ag_skill_info_t.created_by IS 'Creator ID'; +COMMENT ON COLUMN nexent.ag_skill_info_t.create_time IS 'Creation timestamp'; +COMMENT ON COLUMN nexent.ag_skill_info_t.updated_by IS 'Last updater ID'; +COMMENT ON COLUMN nexent.ag_skill_info_t.update_time IS 'Last update timestamp'; +COMMENT ON COLUMN nexent.ag_skill_info_t.delete_flag IS 'Whether it is deleted. Optional values: Y/N'; + +-- Create the ag_skill_tools_rel_t table in the nexent schema +CREATE TABLE IF NOT EXISTS nexent.ag_skill_tools_rel_t ( + rel_id SERIAL4 PRIMARY KEY NOT NULL, + skill_id INTEGER, + tool_id INTEGER, + created_by VARCHAR(100), + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_by VARCHAR(100), + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + delete_flag VARCHAR(1) DEFAULT 'N' +); + +ALTER TABLE "ag_skill_tools_rel_t" OWNER TO "root"; + +-- Add comment to the table +COMMENT ON TABLE nexent.ag_skill_tools_rel_t IS 'Skill-tool relationship table for many-to-many mapping'; + +-- Add comments to the columns +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.rel_id IS 'Relationship ID, unique primary key'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.skill_id IS 'Foreign key to ag_skill_info_t.skill_id'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.tool_id IS 'Tool ID from ag_tool_info_t'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.created_by IS 'Creator ID'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.create_time IS 'Creation timestamp'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.updated_by IS 'Last updater ID'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.update_time IS 'Last update timestamp'; +COMMENT ON COLUMN nexent.ag_skill_tools_rel_t.delete_flag IS 'Whether it is deleted. Optional values: Y/N'; + +-- Create the ag_skill_instance_t table in the nexent schema +-- Stores skill instance configuration per agent version +-- Note: skill_description and skill_content fields removed, now retrieved from ag_skill_info_t +CREATE TABLE IF NOT EXISTS nexent.ag_skill_instance_t ( + skill_instance_id SERIAL4 NOT NULL, + skill_id INTEGER NOT NULL, + agent_id INTEGER NOT NULL, + user_id VARCHAR(100), + tenant_id VARCHAR(100), + enabled BOOLEAN DEFAULT TRUE, + version_no INTEGER DEFAULT 0 NOT NULL, + created_by VARCHAR(100), + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + updated_by VARCHAR(100), + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT CURRENT_TIMESTAMP, + delete_flag VARCHAR(1) DEFAULT 'N', + CONSTRAINT ag_skill_instance_t_pkey PRIMARY KEY (skill_instance_id, version_no) +); + +ALTER TABLE "ag_skill_instance_t" OWNER TO "root"; + +-- Add comment to the table +COMMENT ON TABLE nexent.ag_skill_instance_t IS 'Skill instance configuration table - stores per-agent skill settings'; + +-- Add comments to the columns +COMMENT ON COLUMN nexent.ag_skill_instance_t.skill_instance_id IS 'Skill instance ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.skill_id IS 'Foreign key to ag_skill_info_t.skill_id'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.agent_id IS 'Agent ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.user_id IS 'User ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.tenant_id IS 'Tenant ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.enabled IS 'Whether this skill is enabled for the agent'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.version_no IS 'Version number. 0 = draft/editing state, >=1 = published snapshot'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.created_by IS 'Creator ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.create_time IS 'Creation timestamp'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.updated_by IS 'Last updater ID'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.update_time IS 'Last update timestamp'; +COMMENT ON COLUMN nexent.ag_skill_instance_t.delete_flag IS 'Whether it is deleted. Optional values: Y/N'; diff --git a/frontend/app/[locale]/agents/components/AgentConfigComp.tsx b/frontend/app/[locale]/agents/components/AgentConfigComp.tsx index cb321f32c..aac48a09c 100644 --- a/frontend/app/[locale]/agents/components/AgentConfigComp.tsx +++ b/frontend/app/[locale]/agents/components/AgentConfigComp.tsx @@ -1,17 +1,21 @@ "use client"; -import { useState, useCallback } from "react"; +import { useState, useCallback, useEffect } from "react"; import { useTranslation } from "react-i18next"; import { App, Button, Row, Col, Flex, Tooltip, Badge, Divider } from "antd"; import CollaborativeAgent from "./agentConfig/CollaborativeAgent"; import ToolManagement from "./agentConfig/ToolManagement"; +import SkillManagement from "./agentConfig/SkillManagement"; +import SkillBuildModal from "./agentConfig/SkillBuildModal"; import { updateToolList } from "@/services/mcpService"; import { useAgentConfigStore } from "@/stores/agentConfigStore"; import { useToolList } from "@/hooks/agent/useToolList"; +import { useSkillList } from "@/hooks/agent/useSkillList"; +import { useAgentSkillInstances } from "@/hooks/agent/useAgentSkillInstances"; import McpConfigModal from "./agentConfig/McpConfigModal"; -import { RefreshCw, Lightbulb, Plug } from "lucide-react"; +import { RefreshCw, Lightbulb, Plug, BlocksIcon } from "lucide-react"; interface AgentConfigCompProps {} @@ -21,14 +25,26 @@ export default function AgentConfigComp({}: AgentConfigCompProps) { // Get state from store const currentAgentId = useAgentConfigStore((state) => state.currentAgentId); - const isCreatingMode = useAgentConfigStore((state) => state.isCreatingMode); const [isMcpModalOpen, setIsMcpModalOpen] = useState(false); + const [isSkillModalOpen, setIsSkillModalOpen] = useState(false); const [isRefreshing, setIsRefreshing] = useState(false); + const [isRefreshingSkill, setIsRefreshingSkill] = useState(false); - // Use tool list hook for data management const { groupedTools, invalidate } = useToolList(); + const { groupedSkills, invalidate: invalidateSkills } = useSkillList(); + const { skillInstances, invalidate: invalidateSkillInstances } = useAgentSkillInstances( + currentAgentId ?? null + ); + const setInitialSkills = useAgentConfigStore((state) => state.setInitialSkills); + + // Load skill instances when agent changes + useEffect(() => { + if (currentAgentId && skillInstances.length > 0) { + setInitialSkills(skillInstances); + } + }, [currentAgentId, skillInstances, setInitialSkills]); const handleRefreshTools = useCallback(async () => { setIsRefreshing(true); @@ -49,52 +65,58 @@ export default function AgentConfigComp({}: AgentConfigCompProps) { } }, [invalidate]); + const handleRefreshSkills = useCallback(async () => { + setIsRefreshingSkill(true); + try { + invalidateSkills(); + invalidateSkillInstances(); + message.success(t("skillManagement.message.refreshSuccess")); + } catch (error) { + message.error(t("skillManagement.message.refreshFailed")); + } finally { + setIsRefreshingSkill(false); + } + }, [invalidateSkills, invalidateSkillInstances]); + + const handleSkillBuildSuccess = useCallback(() => { + invalidateSkills(); + if (currentAgentId) { + invalidateSkillInstances(); + } + }, [invalidateSkills, invalidateSkillInstances, currentAgentId]); + return ( <> {/* Import handled by Ant Design Upload (no hidden input required) */} - + -

- {t("businessLogic.config.title")} -

+

{t("businessLogic.config.title")}

- + -

- {t("toolPool.title")} -

+

{t("toolPool.title")}

- {t("toolPool.tooltip.functionGuide")} - - } + title={
{t("toolPool.tooltip.functionGuide")}
} color="#ffffff" styles={{ root: { backgroundColor: "#ffffff", border: "1px solid #e5e7eb", borderRadius: "6px", - boxShadow: - "0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06)", + boxShadow: "0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06)", maxWidth: "800px", minWidth: "700px", width: "fit-content", @@ -134,7 +156,7 @@ export default function AgentConfigComp({}: AgentConfigCompProps) { - + + + + + +

{t("skillPool.title")}

+
+ + + + + + + +
+ + + + + + + +
- setIsMcpModalOpen(false)} + setIsMcpModalOpen(false)} /> + + setIsSkillModalOpen(false)} + onSuccess={handleSkillBuildSuccess} /> ); diff --git a/frontend/app/[locale]/agents/components/agentConfig/SkillBuildModal.tsx b/frontend/app/[locale]/agents/components/agentConfig/SkillBuildModal.tsx new file mode 100644 index 000000000..46307a0d2 --- /dev/null +++ b/frontend/app/[locale]/agents/components/agentConfig/SkillBuildModal.tsx @@ -0,0 +1,916 @@ +"use client"; + +import { useState, useEffect, useMemo, useRef } from "react"; +import { useTranslation } from "react-i18next"; +import ReactMarkdown from "react-markdown"; +import { + Modal, + Tabs, + Form, + Input, + Button, + AutoComplete, + Select, + message, + Flex, + Progress, + Row, + Col, + Spin, +} from "antd"; +import { + Upload as UploadIcon, + Send, + Trash2, + MessagesSquare, + HardDriveUpload, +} from "lucide-react"; +import { getAgentByName } from "@/services/agentConfigService"; +import { conversationService } from "@/services/conversationService"; +import { extractSkillInfo } from "@/lib/skillFileUtils"; +import { + MAX_RECENT_SKILLS, + THINKING_STEPS_ZH, + type SkillFormData, + type ChatMessage, +} from "@/types/skill"; +import { + fetchSkillsList, + submitSkillForm, + submitSkillFromFile, + processSkillStream, + deleteSkillCreatorTempFile, + findSkillByName, + searchSkillsByName as searchSkillsByNameUtil, + type SkillListItem, +} from "@/services/skillService"; +import log from "@/lib/logger"; + +const { TextArea } = Input; + +interface SkillBuildModalProps { + isOpen: boolean; + onCancel: () => void; + onSuccess: () => void; +} + +export default function SkillBuildModal({ + isOpen, + onCancel, + onSuccess, +}: SkillBuildModalProps) { + const { t } = useTranslation("common"); + const [form] = Form.useForm(); + // TODO: [FEATURE] Re-enable interactive skill creation tab + // Reason: Interactive tab depends on skill_creator agent which may not be available in all deployments + // When to re-enable: + // 1. Ensure skill_creator agent is properly configured and deployed + // 2. Verify conversationService works correctly with the agent + // 3. Test the full chat-to-form workflow end-to-end + // 4. Remove this TODO and restore the interactive tab in tabItems + const [activeTab, setActiveTab] = useState("upload"); + const [isSubmitting, setIsSubmitting] = useState(false); + const [allSkills, setAllSkills] = useState([]); + const [searchResults, setSearchResults] = useState([]); + const [selectedSkillName, setSelectedSkillName] = useState(""); + const [uploadFile, setUploadFile] = useState(null); + const [uploadExtractedSkillName, setUploadExtractedSkillName] = useState(""); + const [uploadExtractingName, setUploadExtractingName] = useState(false); + + // Interactive creation state + const [chatMessages, setChatMessages] = useState([]); + const [chatInput, setChatInput] = useState(""); + const [isChatLoading, setIsChatLoading] = useState(false); + const [thinkingStep, setThinkingStep] = useState(0); + const [thinkingDescription, setThinkingDescription] = useState(""); + const [isThinkingVisible, setIsThinkingVisible] = useState(false); + const [interactiveSkillName, setInteractiveSkillName] = useState(""); + const chatContainerRef = useRef(null); + + // skill_creator agent state (cached after first lookup) + const [skillCreatorAgentId, setSkillCreatorAgentId] = useState(null); + const skillCreatorAgentIdRef = useRef(null); + + // Track if component is mounted to prevent state updates after unmount + const isMountedRef = useRef(true); + + // Name input dropdown control + const [isNameDropdownOpen, setIsNameDropdownOpen] = useState(false); + const [isTagsFocused, setIsTagsFocused] = useState(false); + + // Create/Update mode detection + const [isCreateMode, setIsCreateMode] = useState(true); + + // Recent skills (sorted by update_time descending, take top 5) + const recentSkills = useMemo(() => { + return [...allSkills] + .filter((s) => s.update_time) + .sort((a, b) => { + const timeA = new Date(a.update_time!).getTime(); + const timeB = new Date(b.update_time!).getTime(); + return timeB - timeA; + }) + .slice(0, MAX_RECENT_SKILLS); + }, [allSkills]); + + useEffect(() => { + if (!isOpen) return; + let cancelled = false; + fetchSkillsList() + .then((list) => { + if (!cancelled) setAllSkills(list); + }) + .catch((err) => { + log.error("Failed to load skills for SkillBuildModal", err); + }); + return () => { + cancelled = true; + }; + }, [isOpen]); + + // TODO: [FEATURE] Update setActiveTab("upload") when interactive tab is re-enabled + useEffect(() => { + if (!isOpen) { + form.resetFields(); + setActiveTab("upload"); + setSelectedSkillName(""); + setUploadFile(null); + setSearchResults([]); + setChatMessages([]); + setChatInput(""); + setInteractiveSkillName(""); + setIsNameDropdownOpen(false); + setIsTagsFocused(false); + setIsCreateMode(true); + setUploadExtractingName(false); + setUploadExtractedSkillName(""); + setSkillCreatorAgentId(null); + skillCreatorAgentIdRef.current = null; + setThinkingStep(0); + setThinkingDescription(""); + setIsThinkingVisible(false); + } + }, [isOpen, form]); + + // Track component mount status for async callback safety + useEffect(() => { + isMountedRef.current = true; + return () => { + isMountedRef.current = false; + }; + }, []); + + // Detect create/update mode when skill name changes + useEffect(() => { + const nameValue = interactiveSkillName.trim(); + if (nameValue) { + const matchedSkill = findSkillByName(nameValue, allSkills); + setIsCreateMode(!matchedSkill); + if (matchedSkill) { + setSelectedSkillName(matchedSkill.name); + form.setFieldsValue({ + description: matchedSkill.description || "", + source: matchedSkill.source || "自定义", + content: matchedSkill.content || "", + }); + } + } else { + setIsCreateMode(true); + setSelectedSkillName(""); + } + }, [interactiveSkillName, allSkills, form]); + + // Detect create/update mode when extracted skill name changes (upload tab) + const [uploadIsCreateMode, setUploadIsCreateMode] = useState(true); + useEffect(() => { + const nameValue = uploadExtractedSkillName.trim(); + if (nameValue) { + const matched = findSkillByName(nameValue, allSkills); + setUploadIsCreateMode(!matched); + } else { + setUploadIsCreateMode(true); + } + }, [uploadExtractedSkillName, allSkills]); + + // Dropdown options based on input state + const dropdownOptions = useMemo(() => { + if (!interactiveSkillName || interactiveSkillName.trim() === "") { + return recentSkills.map((skill) => ({ + value: skill.name, + label: ( + + {skill.name} + {skill.source} + + ), + })); + } + return searchResults.map((skill) => ({ + value: skill.name, + label: ( + + {skill.name} + {skill.source} + + ), + })); + }, [interactiveSkillName, searchResults, recentSkills]); + + // Determine if dropdown should be open + const shouldShowDropdown = isNameDropdownOpen && !isTagsFocused; + + const handleNameSearch = (value: string) => { + setInteractiveSkillName(value); + if (!value || value.trim() === "") { + setSearchResults([]); + } else { + const results = searchSkillsByNameUtil(value, allSkills); + setSearchResults(results); + } + }; + + const handleNameSelect = (value: string) => { + setSelectedSkillName(value); + setInteractiveSkillName(value); + setIsNameDropdownOpen(false); + const skill = allSkills.find((s) => s.name === value); + if (skill) { + form.setFieldsValue({ + name: skill.name, + description: skill.description || "", + source: skill.source || "Custom", + content: skill.content || "", + }); + } + }; + + const handleNameChange = (value: string) => { + setInteractiveSkillName(value); + if (!value || value.trim() === "") { + setSelectedSkillName(""); + } + }; + + const handleNameFocus = () => { + setIsNameDropdownOpen(true); + }; + + const handleNameBlur = () => { + setTimeout(() => { + setIsNameDropdownOpen(false); + }, 200); + }; + + // Cleanup temp file when modal is closed + const handleModalClose = async () => { + if (activeTab === "interactive" && chatMessages.length > 0) { + await deleteSkillCreatorTempFile(); + } + onCancel(); + }; + + const handleManualSubmit = async () => { + try { + const values = await form.validateFields(); + setIsSubmitting(true); + await submitSkillForm( + values, + allSkills, + onSuccess, + onCancel, + t + ); + } catch (error) { + log.error("Skill create/update error:", error); + } finally { + setIsSubmitting(false); + } + }; + + const handleUploadSubmit = async () => { + if (!uploadFile) { + message.warning(t("skillManagement.message.pleaseSelectFile")); + return; + } + + if (!uploadExtractedSkillName.trim()) { + message.warning(t("skillManagement.form.nameRequired")); + return; + } + + setIsSubmitting(true); + try { + await submitSkillFromFile( + uploadExtractedSkillName, + uploadFile, + allSkills, + onSuccess, + onCancel, + t + ); + } finally { + setIsSubmitting(false); + } + }; + + // Resolve skill_creator agent + const resolveSkillCreatorAgent = async (): Promise => { + if (skillCreatorAgentIdRef.current !== null) { + const cached = skillCreatorAgentIdRef.current; + return cached < 0 ? null : cached; + } + const result = await getAgentByName("skill_creator"); + if (!result) return null; + skillCreatorAgentIdRef.current = -result.agent_id; + setSkillCreatorAgentId(result.agent_id); + return result.agent_id; + }; + + // Handle chat send for interactive creation + const handleChatSend = async () => { + if (!chatInput.trim() || isChatLoading) return; + + const currentInput = chatInput.trim(); + setChatInput(""); + + const userMessage: ChatMessage = { + id: Date.now().toString(), + role: "user", + content: currentInput, + timestamp: new Date(), + }; + + setChatMessages((prev) => [...prev, userMessage]); + setIsChatLoading(true); + setThinkingStep(0); + setThinkingDescription(THINKING_STEPS_ZH.find((s) => s.step === 0)?.description || ""); + setIsThinkingVisible(true); + + const assistantId = (Date.now() + 1).toString(); + setChatMessages((prev) => [ + ...prev, + { id: assistantId, role: "assistant", content: "", timestamp: new Date() }, + ]); + + try { + const agentId = await resolveSkillCreatorAgent(); + if (!agentId) { + throw new Error("skill_creator agent not found"); + } + + const history = chatMessages.map((msg) => ({ + role: msg.role === "user" ? "user" : "assistant", + content: msg.content, + })); + + const reader = await conversationService.runAgent( + { + query: currentInput, + conversation_id: 0, + history, + agent_id: agentId, + is_debug: true, + }, + undefined as unknown as AbortSignal + ); + + await processSkillStream( + reader, + (step, description) => { + setThinkingStep(step); + setThinkingDescription(description); + }, + setIsThinkingVisible, + async (finalAnswer) => { + if (!isMountedRef.current) return; + + setChatMessages((prev) => + prev.map((msg) => + msg.id === assistantId ? { ...msg, content: finalAnswer } : msg + ) + ); + + const { parseSkillDraft } = await import("@/lib/skillFileUtils"); + const skillDraft = parseSkillDraft(finalAnswer); + + if (skillDraft) { + form.setFieldValue("name", skillDraft.name); + form.setFieldValue("description", skillDraft.description); + form.setFieldValue("tags", skillDraft.tags); + form.setFieldValue("content", skillDraft.content); + setInteractiveSkillName(skillDraft.name); + const existingSkill = allSkills.find( + (s) => s.name.toLowerCase() === skillDraft.name.toLowerCase() + ); + setIsCreateMode(!existingSkill); + message.success(t("skillManagement.message.skillReadyForSave")); + } else { + // Fallback: read from temp file + try { + const { fetchSkillConfig, fetchSkillFileContent } = await import("@/services/agentConfigService"); + const config = await fetchSkillConfig("simple-skill-creator"); + + if (config && config.temp_filename) { + const tempFilename = config.temp_filename as string; + const tempContent = await fetchSkillFileContent("simple-skill-creator", tempFilename); + + if (tempContent) { + const { extractSkillInfoFromContent } = await import("@/lib/skillFileUtils"); + const skillInfo = extractSkillInfoFromContent(tempContent); + + if (skillInfo && skillInfo.name) { + form.setFieldValue("name", skillInfo.name); + setInteractiveSkillName(skillInfo.name); + const existingSkill = allSkills.find( + (s) => s.name.toLowerCase() === skillInfo.name.toLowerCase() + ); + setIsCreateMode(!existingSkill); + } + if (skillInfo && skillInfo.description) { + form.setFieldValue("description", skillInfo.description); + } + if (skillInfo && skillInfo.tags && skillInfo.tags.length > 0) { + form.setFieldValue("tags", skillInfo.tags); + } + // Use content without frontmatter + if (skillInfo.contentWithoutFrontmatter) { + form.setFieldValue("content", skillInfo.contentWithoutFrontmatter); + } + } + } + } catch (error) { + log.warn("Failed to load temp file content:", error); + } + } + }, + "zh" + ); + } catch (error) { + log.error("Interactive skill creation error:", error); + message.error(t("skillManagement.message.chatError")); + setChatMessages((prev) => prev.filter((m) => m.id !== assistantId)); + } finally { + setIsChatLoading(false); + } + }; + + // Handle chat clear + const handleChatClear = async () => { + const { clearChatAndTempFile } = await import("@/services/skillService"); + await clearChatAndTempFile(); + setChatMessages([]); + }; + + // Scroll to bottom of chat when new messages arrive + useEffect(() => { + if (chatContainerRef.current) { + chatContainerRef.current.scrollTop = chatContainerRef.current.scrollHeight; + } + }, [chatMessages]); + + // Import extractSkillGenerationResult + const extractSkillGenerationResult = (content: string): string => { + const skillTagIndex = content.indexOf(""); + if (skillTagIndex !== -1) { + return content.substring(skillTagIndex + 8).trim(); + } + return content; + }; + + const renderInteractiveTab = () => { + return ( +
+ {/* Left side: Chat dialog */} +
+ {/* Chat header */} +
+ + {t("skillManagement.tabs.interactive")} + + {chatMessages.length > 0 && ( + + )} +
+ + {/* Chat messages area */} +
+ {chatMessages.length === 0 && ( +
+ {t("skillManagement.form.chatPlaceholder")} +
+ )} + {chatMessages.map((msg) => ( +
+
+ {msg.role === "assistant" && isThinkingVisible && msg.content === "" ? ( +
+ + {thinkingDescription && ( + + {thinkingDescription} + + )} +
+ ) : msg.role === "assistant" ? ( +
+ + {extractSkillGenerationResult(msg.content)} + +
+ ) : ( +
{msg.content}
+ )} +
+
+ ))} +
+ + {/* Chat input area */} +
+ +