Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
122 changes: 122 additions & 0 deletions backend/agents/skill_creation_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
"""Skill creation agent module for interactive skill generation."""

import logging
import threading
from typing import List

from nexent.core.agents.agent_model import AgentConfig, AgentRunInfo, ModelConfig, ToolConfig
from nexent.core.agents.run_agent import agent_run_thread
from nexent.core.utils.observer import MessageObserver

logger = logging.getLogger("skill_creation_agent")


def create_skill_creation_agent_config(
system_prompt: str,
model_config_list: List[ModelConfig],
local_skills_dir: str = ""
) -> AgentConfig:
"""
Create agent config for skill creation with builtin tools.

Args:
system_prompt: Custom system prompt to replace smolagent defaults
model_config_list: List of model configurations

Returns:
AgentConfig configured for skill creation
"""
if not model_config_list:
raise ValueError("model_config_list cannot be empty")

first_model = model_config_list[0]

prompt_templates = {
"system_prompt": system_prompt,
"managed_agent": {
"task": "{task}",
"report": "## {name} Report\n\n{final_answer}"
},
"planning": {
"initial_plan": "",
"update_plan_pre_messages": "",
"update_plan_post_messages": ""
},
"final_answer": {
"pre_messages": "",
"post_messages": ""
}
}

return AgentConfig(
name="__skill_creator__",
description="Internal skill creator agent",
prompt_templates=prompt_templates,
tools=[],
max_steps=5,
model_name=first_model.cite_name
)


def run_skill_creation_agent(
query: str,
agent_config: AgentConfig,
model_config_list: List[ModelConfig],
observer: MessageObserver,
stop_event: threading.Event,
) -> None:
"""
Run the skill creator agent synchronously.

Args:
query: User query for the agent
agent_config: Pre-configured agent config
model_config_list: List of model configurations
observer: Message observer for capturing agent output
stop_event: Threading event for cancellation
"""
agent_run_info = AgentRunInfo(
query=query,
model_config_list=model_config_list,
observer=observer,
agent_config=agent_config,
stop_event=stop_event
)

agent_run_thread(agent_run_info)


def create_simple_skill_from_request(
system_prompt: str,
user_prompt: str,
model_config_list: List[ModelConfig],
observer: MessageObserver,
stop_event: threading.Event,
local_skills_dir: str = ""
) -> None:
"""
Run skill creation agent to create a skill interactively.

The agent will write the skill content to tmp.md in local_skills_dir.
Frontend should read tmp.md after agent completes to get the skill content.

Args:
system_prompt: System prompt with skill creation instructions
user_prompt: User's skill description request
model_config_list: List of model configurations
observer: Message observer for capturing agent output
stop_event: Threading event for cancellation
local_skills_dir: Path to local skills directory for file operations
"""
agent_config = create_skill_creation_agent_config(
system_prompt=system_prompt,
model_config_list=model_config_list,
local_skills_dir=local_skills_dir
)

thread_agent = threading.Thread(
target=run_skill_creation_agent,
args=(user_prompt, agent_config, model_config_list, observer, stop_event)
)
thread_agent.start()
thread_agent.join()
2 changes: 2 additions & 0 deletions backend/apps/runtime_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from apps.conversation_management_app import router as conversation_management_router
from apps.memory_config_app import router as memory_config_router
from apps.file_management_app import file_management_runtime_router as file_management_router
from apps.skill_app import skill_creator_router
from middleware.exception_handler import ExceptionHandlerMiddleware

# Create logger instance
Expand All @@ -22,3 +23,4 @@
app.include_router(memory_config_router)
app.include_router(file_management_router)
app.include_router(voice_router)
app.include_router(skill_creator_router)
225 changes: 143 additions & 82 deletions backend/apps/skill_app.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,28 @@
"""Skill management HTTP endpoints."""

import asyncio
import logging
import os
import re
import threading
from typing import Any, Dict, List, Optional

from fastapi import APIRouter, HTTPException, Query, UploadFile, File, Form, Header
from starlette.responses import JSONResponse
from starlette.responses import JSONResponse, StreamingResponse
from pydantic import BaseModel

from consts.exceptions import SkillException, UnauthorizedError
from services.skill_service import SkillService
from consts.model import SkillInstanceInfoRequest
from utils.auth_utils import get_current_user_id
from utils.auth_utils import get_current_user_id, get_current_user_info
from utils.prompt_template_utils import get_skill_creation_simple_prompt_template
from nexent.core.agents.agent_model import ModelConfig
from agents.skill_creation_agent import create_simple_skill_from_request
from nexent.core.utils.observer import MessageObserver

logger = logging.getLogger(__name__)

router = APIRouter(prefix="/skills", tags=["skills"])
skill_creator_router = APIRouter(prefix="/skills", tags=["simple-skills"])


class SkillCreateRequest(BaseModel):
Expand Down Expand Up @@ -453,88 +459,143 @@
raise HTTPException(status_code=500, detail="Internal server error")


@router.delete("/{skill_name}/files/{file_path:path}")
async def delete_skill_file(
skill_name: str,
file_path: str,
authorization: Optional[str] = Header(None)
) -> JSONResponse:
"""Delete a specific file within a skill directory.
class SkillCreateSimpleRequest(BaseModel):
"""Request model for interactive skill creation."""
user_request: str

Args:
skill_name: Name of the skill
file_path: Relative path to the file within the skill directory
"""
try:
_, _ = get_current_user_id(authorization)
service = SkillService()

# Validate skill_name so it cannot be used for path traversal
if not skill_name:
raise HTTPException(status_code=400, detail="Invalid skill name")
if os.sep in skill_name or "/" in skill_name or ".." in skill_name:
raise HTTPException(status_code=400, detail="Invalid skill name")

# Read config to get temp_filename for validation
config_content = service.get_skill_file_content(skill_name, "config.yaml")
if config_content is None:
raise HTTPException(status_code=404, detail="Config file not found")

# Parse config to get temp_filename
import yaml
config = yaml.safe_load(config_content)
temp_filename = config.get("temp_filename", "")

# Get the base directory for the skill
local_dir = os.path.join(service.skill_manager.local_skills_dir, skill_name)

# Check for path traversal patterns in the raw file_path BEFORE any normalization
# This catches attempts like ../../etc/passwd or /etc/passwd
normalized_for_check = os.path.normpath(file_path)
if ".." in file_path or file_path.startswith("/") or (os.sep in file_path and file_path.startswith(os.sep)):
# Additional check: ensure the normalized path doesn't escape local_dir
abs_local_dir = os.path.abspath(local_dir)
abs_full_path = os.path.abspath(os.path.join(local_dir, normalized_for_check))
try:
common = os.path.commonpath([abs_local_dir, abs_full_path])
if common != abs_local_dir:
raise HTTPException(status_code=400, detail="Invalid file path: path traversal detected")
except ValueError:
raise HTTPException(status_code=400, detail="Invalid file path: path traversal detected")

# Normalize the requested file path - use basename to strip directory components
safe_file_path = os.path.basename(os.path.normpath(file_path))

# Build full path and validate it stays within local_dir
full_path = os.path.normpath(os.path.join(local_dir, safe_file_path))
abs_local_dir = os.path.abspath(local_dir)
abs_full_path = os.path.abspath(full_path)

# Check for path traversal: abs_full_path should be within abs_local_dir
try:
common = os.path.commonpath([abs_local_dir, abs_full_path])
if common != abs_local_dir:
raise HTTPException(status_code=400, detail="Invalid file path: path traversal detected")
except ValueError:
# Different drives on Windows
raise HTTPException(status_code=400, detail="Invalid file path: path traversal detected")
def _build_model_config_from_tenant(tenant_id: str) -> ModelConfig:
"""Build ModelConfig from tenant's quick-config LLM model."""
from utils.config_utils import tenant_config_manager, get_model_name_from_config
from consts.const import MODEL_CONFIG_MAPPING

# Validate the filename matches temp_filename
if not temp_filename or safe_file_path != temp_filename:
raise HTTPException(status_code=400, detail="Can only delete temp_filename files")
quick_config = tenant_config_manager.get_model_config(
key=MODEL_CONFIG_MAPPING["llm"],
tenant_id=tenant_id
)
if not quick_config:
raise ValueError("No LLM model configured for tenant")

# Check if file exists
if not os.path.exists(full_path):
raise HTTPException(status_code=404, detail=f"File not found: {safe_file_path}")
return ModelConfig(
cite_name=quick_config.get("display_name", "default"),
api_key=quick_config.get("api_key", ""),
model_name=get_model_name_from_config(quick_config),
url=quick_config.get("base_url", ""),
temperature=0.1,
top_p=0.95,
ssl_verify=True,
model_factory=quick_config.get("model_factory")
)

os.remove(full_path)
logger.info(f"Deleted skill file: {full_path}")

return JSONResponse(content={"message": f"File {safe_file_path} deleted successfully"})
except UnauthorizedError as e:
raise HTTPException(status_code=401, detail=str(e))
except HTTPException:
raise
except Exception as e:
logger.error(f"Error deleting skill file {skill_name}/{file_path}: {e}")
raise HTTPException(status_code=500, detail=str(e))
@skill_creator_router.post("/create-simple")
async def create_simple_skill(

Check failure on line 492 in backend/apps/skill_app.py

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Refactor this function to reduce its Cognitive Complexity from 41 to the 15 allowed.

See more on https://sonarcloud.io/project/issues?id=ModelEngine-Group_nexent&issues=AZ1StpiPDjkrgjePF6d1&open=AZ1StpiPDjkrgjePF6d1&pullRequest=2754
request: SkillCreateSimpleRequest,
authorization: Optional[str] = Header(None)
):
"""Create a simple skill interactively via LLM agent.

Loads the skill_creation_simple prompt template, runs an internal agent
with WriteSkillFileTool and ReadSkillMdTool, extracts the <SKILL> block
from the final answer, and streams step progress and token content via SSE.

Yields SSE events:
- step_count: Current agent step number
- skill_content: Token-level content (thinking, code, deep_thinking, tool output)
- final_answer: Complete skill content
- done: Stream completion signal
"""
# Message types to stream as skill_content (token-level output)
STREAMABLE_CONTENT_TYPES = frozenset([
"model_output_thinking",
"model_output_code",
"model_output_deep_thinking",
"tool",
"execution_logs",
])

async def generate():
import json
try:
_, tenant_id, language = get_current_user_info(authorization)

template = get_skill_creation_simple_prompt_template(language)

model_config = _build_model_config_from_tenant(tenant_id)
observer = MessageObserver(lang=language)
stop_event = threading.Event()

# Get local_skills_dir from SkillManager
skill_service = SkillService()
local_skills_dir = skill_service.skill_manager.local_skills_dir or ""

# Start skill creation in background thread
def run_task():
create_simple_skill_from_request(
system_prompt=template.get("system_prompt", ""),
user_prompt=request.user_request,
model_config_list=[model_config],
observer=observer,
stop_event=stop_event,
local_skills_dir=local_skills_dir
)

thread = threading.Thread(target=run_task)
thread.start()

# Poll observer for step_count and token content messages
while thread.is_alive():
cached = observer.get_cached_message()
for msg in cached:
if isinstance(msg, str):
try:
data = json.loads(msg)
msg_type = data.get("type", "")
content = data.get("content", "")

# Stream step progress
if msg_type == "step_count":
yield f"data: {json.dumps({'type': 'step_count', 'content': content}, ensure_ascii=False)}\n\n"
# Stream token content (thinking, code, deep_thinking, tool output)
elif msg_type in STREAMABLE_CONTENT_TYPES:
yield f"data: {json.dumps({'type': 'skill_content', 'content': content}, ensure_ascii=False)}\n\n"
# Stream final_answer content separately
elif msg_type == "final_answer":
yield f"data: {json.dumps({'type': 'final_answer', 'content': content}, ensure_ascii=False)}\n\n"
except (json.JSONDecodeError, Exception):

Check warning on line 565 in backend/apps/skill_app.py

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Remove this redundant Exception class; it derives from another which is already caught.

See more on https://sonarcloud.io/project/issues?id=ModelEngine-Group_nexent&issues=AZ1StpiPDjkrgjePF6d2&open=AZ1StpiPDjkrgjePF6d2&pullRequest=2754
pass
await asyncio.sleep(0.1)

thread.join()

# Stream any remaining cached messages after thread completes
remaining = observer.get_cached_message()
for msg in remaining:
if isinstance(msg, str):
try:
data = json.loads(msg)
msg_type = data.get("type", "")
content = data.get("content", "")

if msg_type == "step_count":
yield f"data: {json.dumps({'type': 'step_count', 'content': content}, ensure_ascii=False)}\n\n"
elif msg_type in STREAMABLE_CONTENT_TYPES:
yield f"data: {json.dumps({'type': 'skill_content', 'content': content}, ensure_ascii=False)}\n\n"
elif msg_type == "final_answer":
yield f"data: {json.dumps({'type': 'final_answer', 'content': content}, ensure_ascii=False)}\n\n"
except (json.JSONDecodeError, Exception):

Check warning on line 586 in backend/apps/skill_app.py

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Remove this redundant Exception class; it derives from another which is already caught.

See more on https://sonarcloud.io/project/issues?id=ModelEngine-Group_nexent&issues=AZ1StpiPDjkrgjePF6d3&open=AZ1StpiPDjkrgjePF6d3&pullRequest=2754
pass

# Stream final answer content from observer
final_result = observer.get_final_answer()
if final_result:
yield f"data: {json.dumps({'type': 'final_answer', 'content': final_result}, ensure_ascii=False)}\n\n"

# Send done signal
yield f"data: {json.dumps({'type': 'done'}, ensure_ascii=False)}\n\n"

except Exception as e:
logger.error(f"Error in create_simple_skill stream: {e}")
yield f"data: {json.dumps({'type': 'error', 'message': str(e)}, ensure_ascii=False)}\n\n"

return StreamingResponse(generate(), media_type="text/event-stream")
Loading
Loading