Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 25 additions & 1 deletion server/.env.example
Original file line number Diff line number Diff line change
@@ -1,7 +1,31 @@
debug=false
url_prefix=/api
secret_key=postgres
database_url=postgresql://postgres:123456@localhost:5432/postgres
# Chat Share Secret Key
CHAT_SHARE_SECRET_KEY=put-your-secret-key-here
CHAT_SHARE_SALT=put-your-encode-salt-here

# Configuration if not running in docker
# database_url=postgresql://postgres:123456@localhost:5432/eigent
# redis_url=redis://localhost:6379/0
# celery_broker_url=redis://localhost:6379/0
# celery_result_url=redis://localhost:6379/0
# SESSION_REDIS_URL=redis://localhost:6379/1

# Trigger Schedule Poller Configuration
# ENABLE_TRIGGER_SCHEDULE_POLLER_TASK: Enable/disable scheduled trigger polling
ENABLE_TRIGGER_SCHEDULE_POLLER_TASK=true
# TRIGGER_SCHEDULE_POLLER_INTERVAL: Polling interval in minutes
TRIGGER_SCHEDULE_POLLER_INTERVAL=1
# TRIGGER_SCHEDULE_POLLER_BATCH_SIZE: Number of triggers to fetch per poll
TRIGGER_SCHEDULE_POLLER_BATCH_SIZE=100
# TRIGGER_SCHEDULE_MAX_DISPATCH_PER_TICK: Max triggers to dispatch per tick (0 = unlimited)
TRIGGER_SCHEDULE_MAX_DISPATCH_PER_TICK=0
# ENABLE_EXECUTION_TIMEOUT_CHECKER: Enable/disable execution timeout checking
ENABLE_EXECUTION_TIMEOUT_CHECKER=true
# EXECUTION_TIMEOUT_CHECKER_INTERVAL: check_execution_timeouts interval in minutes
EXECUTION_TIMEOUT_CHECKER_INTERVAL=1
# EXECUTION_PENDING_TIMEOUT_SECONDS: Timeout for pending executions (default 60 seconds)
EXECUTION_PENDING_TIMEOUT_SECONDS=60
# EXECUTION_RUNNING_TIMEOUT_SECONDS: Timeout for running executions (default 600 seconds / 10 minutes)
EXECUTION_RUNNING_TIMEOUT_SECONDS=600
22 changes: 17 additions & 5 deletions server/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,19 @@ FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim
# Install the project into `/app`
WORKDIR /app

# Install Git and build dependencies (required for git-based dependencies and Rust packages like tiktoken)
RUN apt-get update -o Acquire::Retries=3 && apt-get install -y --no-install-recommends \
git \
curl \
build-essential \
gcc \
python3-dev \
&& curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& rm -rf /var/lib/apt/lists/*

# Add Rust to PATH
ENV PATH="/root/.cargo/bin:$PATH"

# Disable bytecode transfer during compilation to avoid EMFILE during build on low nofile limits
ENV UV_COMPILE_BYTECODE=0

Expand All @@ -15,11 +28,6 @@ ENV UV_PYTHON_INSTALL_MIRROR=https://registry.npmmirror.com/-/binary/python-buil
ARG database_url
ENV database_url=$database_url

RUN apt-get update -o Acquire::Retries=3 && apt-get install -y --no-install-recommends \
gcc \
python3-dev \
&& rm -rf /var/lib/apt/lists/*

# Copy dependency files first
COPY server/pyproject.toml server/uv.lock ./

Expand Down Expand Up @@ -49,6 +57,10 @@ ENV PATH="/app/.venv/bin:$PATH"
COPY server/start.sh /app/start.sh
RUN sed -i 's/\r$//' /app/start.sh && chmod +x /app/start.sh

# Make Celery scripts executable
RUN sed -i 's/\r$//' /app/celery/worker/start && chmod +x /app/celery/worker/start
RUN sed -i 's/\r$//' /app/celery/beat/start && chmod +x /app/celery/beat/start

# Reset the entrypoint, don't invoke `uv`
ENTRYPOINT []

Expand Down
3 changes: 2 additions & 1 deletion server/alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
auto_import("app.model.config")
auto_import("app.model.chat")
auto_import("app.model.provider")
auto_import("app.model.trigger")

# target_metadata = mymodel.Base.metadata
target_metadata = SQLModel.metadata
Expand Down Expand Up @@ -97,7 +98,7 @@ def run_migrations_offline() -> None:
script output.

"""
url = config.get_main_option("sqlalchemy.url")
url = env_not_empty("database_url")
context.configure(
url=url,
target_metadata=target_metadata,
Expand Down
113 changes: 113 additions & 0 deletions server/alembic/versions/2026_02_06_0440-9464b9d89de7_feat_trigger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========

"""feat-trigger

Revision ID: 9464b9d89de7
Revises: add_timestamp_to_chat_step
Create Date: 2026-02-06 04:40:17.623286

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
import sqlmodel.sql.sqltypes
from app.type.trigger_types import ExecutionStatus
from app.type.trigger_types import ExecutionType
from app.type.trigger_types import ListenerType
from app.type.trigger_types import RequestType
from app.type.trigger_types import TriggerStatus
from app.type.trigger_types import TriggerType
from sqlalchemy_utils.types import ChoiceType

# revision identifiers, used by Alembic.
revision: str = '9464b9d89de7'
down_revision: Union[str, None] = 'add_timestamp_to_chat_step'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('trigger',
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(length=100), nullable=False),
sa.Column('description', sqlmodel.sql.sqltypes.AutoString(length=1000), nullable=False),
sa.Column('trigger_type', ChoiceType(choices=TriggerType, impl=sa.String()), nullable=True),
sa.Column('status', ChoiceType(choices=TriggerStatus, impl=sa.String()), nullable=True),
sa.Column('webhook_url', sa.String(length=1024), nullable=True),
sa.Column('webhook_method', ChoiceType(choices=RequestType, impl=sa.String()), nullable=True),
sa.Column('custom_cron_expression', sa.String(length=100), nullable=True),
sa.Column('listener_type', ChoiceType(choices=ListenerType, impl=sa.String()), nullable=True),
sa.Column('agent_model', sa.String(length=100), nullable=True),
sa.Column('task_prompt', sqlmodel.sql.sqltypes.AutoString(length=1500), nullable=True),
sa.Column('config', sa.JSON(), nullable=True),
sa.Column('max_executions_per_hour', sa.Integer(), nullable=True),
sa.Column('max_executions_per_day', sa.Integer(), nullable=True),
sa.Column('is_single_execution', sa.Boolean(), nullable=False),
sa.Column('last_executed_at', sa.DateTime(), nullable=True),
sa.Column('next_run_at', sa.DateTime(), nullable=True),
sa.Column('last_execution_status', sa.String(length=50), nullable=True),
sa.Column('consecutive_failures', sa.Integer(), nullable=False),
sa.Column('auto_disabled_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_trigger_next_run_at'), 'trigger', ['next_run_at'], unique=False)
op.create_index(op.f('ix_trigger_project_id'), 'trigger', ['project_id'], unique=False)
op.create_index(op.f('ix_trigger_user_id'), 'trigger', ['user_id'], unique=False)
op.create_table('trigger_execution',
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_id', sa.Integer(), nullable=False),
sa.Column('execution_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('execution_type', ChoiceType(choices=ExecutionType, impl=sa.String()), nullable=True),
sa.Column('status', ChoiceType(choices=ExecutionStatus, impl=sa.String()), nullable=True),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('duration_seconds', sa.Float(), nullable=True),
sa.Column('input_data', sa.JSON(), nullable=True),
sa.Column('output_data', sa.JSON(), nullable=True),
sa.Column('error_message', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('attempts', sa.Integer(), nullable=False),
sa.Column('max_retries', sa.Integer(), nullable=False),
sa.Column('tokens_used', sa.Integer(), nullable=True),
sa.Column('tools_executed', sa.JSON(), nullable=True),
sa.ForeignKeyConstraint(['trigger_id'], ['trigger.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_trigger_execution_execution_id'), 'trigger_execution', ['execution_id'], unique=True)
op.create_index(op.f('ix_trigger_execution_trigger_id'), 'trigger_execution', ['trigger_id'], unique=False)
# ### end Alembic commands ###


def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_trigger_execution_trigger_id'), table_name='trigger_execution')
op.drop_index(op.f('ix_trigger_execution_execution_id'), table_name='trigger_execution')
op.drop_table('trigger_execution')
op.drop_index(op.f('ix_trigger_user_id'), table_name='trigger')
op.drop_index(op.f('ix_trigger_project_id'), table_name='trigger')
op.drop_index(op.f('ix_trigger_next_run_at'), table_name='trigger')
op.drop_table('trigger')
# ### end Alembic commands ###
40 changes: 33 additions & 7 deletions server/app/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,36 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========

from fastapi import FastAPI
from fastapi_pagination import add_pagination

api = FastAPI(swagger_ui_parameters={"persistAuthorization": True})
add_pagination(api)
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========

from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi_pagination import add_pagination
from fastapi_limiter import FastAPILimiter
from app.component.environment import env_or_fail
from redis import asyncio as aioredis
import logging

logger = logging.getLogger("server_app")

@asynccontextmanager
async def lifespan(app: FastAPI):
"""Application lifespan manager for startup/shutdown events."""
# Startup: Initialize rate limiter with Redis
redis_url = env_or_fail("redis_url")
redis_connection = aioredis.from_url(redis_url, encoding="utf-8", decode_responses=True)
await FastAPILimiter.init(redis_connection)
logger.info("FastAPI Limiter initialized with Redis")

yield

# Shutdown: Close Redis connection
await FastAPILimiter.close()
logger.info("FastAPI Limiter closed")

# Add lifespan for ratelimiter setup
api = FastAPI(
swagger_ui_parameters={"persistAuthorization": True},
lifespan=lifespan
)
add_pagination(api)
52 changes: 52 additions & 0 deletions server/app/component/celery.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========

from celery import Celery
from app.component.environment import env_or_fail, env

celery = Celery(
__name__,
broker=env_or_fail("celery_broker_url"),
backend=env_or_fail("celery_result_url")
)

# Configure Celery to autodiscover tasks
celery.conf.imports = [
"app.schedule.trigger_schedule_task",
]

# Configure Celery Beat schedule
ENABLE_TRIGGER_SCHEDULE_POLLER = env("ENABLE_TRIGGER_SCHEDULE_POLLER_TASK", "true").lower() == "true"
TRIGGER_SCHEDULE_POLLER_INTERVAL = int(env("TRIGGER_SCHEDULE_POLLER_INTERVAL", "1")) # in minutes

ENABLE_EXECUTION_TIMEOUT_CHECKER = env("ENABLE_EXECUTION_TIMEOUT_CHECKER", "true").lower() == "true"
EXECUTION_TIMEOUT_CHECKER_INTERVAL = int(env("EXECUTION_TIMEOUT_CHECKER_INTERVAL", "1")) # in minutes

celery.conf.beat_schedule = {}

if ENABLE_TRIGGER_SCHEDULE_POLLER:
celery.conf.beat_schedule["poll-trigger-schedules"] = {
"task": "app.schedule.trigger_schedule_task.poll_trigger_schedules",
"schedule": TRIGGER_SCHEDULE_POLLER_INTERVAL * 60.0, # Convert minutes to seconds
"options": {"queue": "poll_trigger_schedules"},
}

if ENABLE_EXECUTION_TIMEOUT_CHECKER:
celery.conf.beat_schedule["check-execution-timeouts"] = {
"task": "app.schedule.trigger_schedule_task.check_execution_timeouts",
"schedule": EXECUTION_TIMEOUT_CHECKER_INTERVAL * 60.0, # Convert minutes to seconds
"options": {"queue": "check_execution_timeouts"},
}

celery.conf.timezone = "UTC"
Loading