diff --git a/.env.example b/.env.example
index 78a3b72c0..8300e4254 100644
--- a/.env.example
+++ b/.env.example
@@ -1,16 +1,45 @@
-# LLM API配置(支持 OpenAI SDK 格式的任意 LLM API)
-# 推荐使用阿里百炼平台qwen-plus模型:https://bailian.console.aliyun.com/
-# 注意消耗较大,可先进行小于40轮的模拟尝试
+# LLM API configuration (supports any LLM API compatible with the OpenAI SDK format)
+# Recommended: use the qwen-plus model on Alibaba Bailian: https://bailian.console.aliyun.com/
+# Note: usage can be expensive, so try simulations with fewer than 40 rounds first
LLM_API_KEY=your_api_key_here
-LLM_BASE_URL=https://dashscope.aliyuncs.com/compatible-mode/v1
-LLM_MODEL_NAME=qwen-plus
+LLM_BASE_URL=https://api.openai.com/v1
+LLM_MODEL_NAME=gpt-4o
-# ===== ZEP记忆图谱配置 =====
-# 每月免费额度即可支撑简单使用:https://app.getzep.com/
+# ===== Graph backend selection =====
+# Use zep_cloud for hosted Zep, or graphiti_local for local Neo4j + Graphiti
+GRAPH_BACKEND=zep_cloud
+
+# ===== Zep Cloud configuration =====
+# Required only when GRAPH_BACKEND=zep_cloud
ZEP_API_KEY=your_zep_api_key_here
-# ===== 加速 LLM 配置(可选)=====
-# 注意如果不使用加速配置,env文件中就不要出现下面的配置项
+# ===== Local Graphiti + Neo4j configuration =====
+# Required only when GRAPH_BACKEND=graphiti_local
+# Note: the local Graphiti backend stores all graphs in one Neo4j database
+# and isolates each MiroFish graph by Graphiti `group_id`.
+NEO4J_URI=bolt://localhost:7687
+NEO4J_USER=neo4j
+NEO4J_PASSWORD=your_neo4j_password_here
+NEO4J_DATABASE=neo4j
+GRAPHITI_AUTO_INIT=true
+GRAPHITI_TELEMETRY_ENABLED=false
+GRAPHITI_MAX_COROUTINES=10
+GRAPHITI_SEARCH_RERANKER=rrf
+
+# Optional: override Graphiti model settings
+# If omitted, Graphiti falls back to the main LLM settings above
+GRAPHITI_LLM_API_KEY=
+GRAPHITI_LLM_BASE_URL=
+GRAPHITI_LLM_MODEL=
+GRAPHITI_EMBEDDER_API_KEY=
+GRAPHITI_EMBEDDER_BASE_URL=
+GRAPHITI_EMBEDDER_MODEL=text-embedding-3-small
+GRAPHITI_RERANKER_API_KEY=
+GRAPHITI_RERANKER_BASE_URL=
+GRAPHITI_RERANKER_MODEL=
+
+# ===== Accelerated LLM configuration (optional) =====
+# If you are not using accelerated configuration, do not include the fields below in your env file
LLM_BOOST_API_KEY=your_api_key_here
LLM_BOOST_BASE_URL=your_base_url_here
-LLM_BOOST_MODEL_NAME=your_model_name_here
\ No newline at end of file
+LLM_BOOST_MODEL_NAME=your_model_name_here
diff --git a/Dockerfile b/Dockerfile
index e65646860..b635d4795 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,29 +1,30 @@
FROM python:3.11
-# 安装 Node.js (满足 >=18)及必要工具
+# Install Node.js (version 18 or later) and required tools
RUN apt-get update \
&& apt-get install -y --no-install-recommends nodejs npm \
&& rm -rf /var/lib/apt/lists/*
-# 从 uv 官方镜像复制 uv
+# Copy `uv` from the official uv image
COPY --from=ghcr.io/astral-sh/uv:0.9.26 /uv /uvx /bin/
WORKDIR /app
-# 先复制依赖描述文件以利用缓存
+# Copy dependency manifests first to take advantage of layer caching
COPY package.json package-lock.json ./
COPY frontend/package.json frontend/package-lock.json ./frontend/
COPY backend/pyproject.toml backend/uv.lock ./backend/
-# 安装依赖(Node + Python)
+# Install dependencies (Node + Python)
RUN npm ci \
&& npm ci --prefix frontend \
- && cd backend && uv sync --frozen
+ && cd backend && uv sync --frozen \
+ && uv pip install --python .venv/bin/python --no-deps graphiti-core==0.28.2
-# 复制项目源码
+# Copy the project source
COPY . .
EXPOSE 3000 5001
-# 同时启动前后端(开发模式)
-CMD ["npm", "run", "dev"]
\ No newline at end of file
+# Start both frontend and backend services (development mode)
+CMD ["npm", "run", "dev"]
diff --git a/README-EN.md b/README-EN.md
index 4b003a63f..fc58b26ef 100644
--- a/README-EN.md
+++ b/README-EN.md
@@ -4,7 +4,7 @@
-简洁通用的群体智能引擎,预测万物
+A simple, universal swarm intelligence engine for predicting anything
A Simple and Universal Swarm Intelligence Engine, Predicting Anything
@@ -20,7 +20,7 @@
[](https://x.com/mirofish_ai)
[](https://www.instagram.com/mirofish_ai/)
-[English](./README-EN.md) | [中文文档](./README.md)
+[README](./README.md) | [English Copy](./README-EN.md)
@@ -49,16 +49,16 @@ Welcome to visit our online demo environment and experience a prediction simulat
-
-
+
+
-
-
+
+
-
-
+
+
@@ -68,7 +68,7 @@ Welcome to visit our online demo environment and experience a prediction simulat
### 1. Wuhan University Public Opinion Simulation + MiroFish Project Introduction
-
+
Click the image to watch the complete demo video for prediction using BettaFish-generated "Wuhan University Public Opinion Report"
@@ -76,7 +76,7 @@ Click the image to watch the complete demo video for prediction using BettaFish-
### 2. Dream of the Red Chamber Lost Ending Simulation
-
+
Click the image to watch MiroFish's deep prediction of the lost ending based on hundreds of thousands of words from the first 80 chapters of "Dream of the Red Chamber"
@@ -122,9 +122,21 @@ LLM_API_KEY=your_api_key
LLM_BASE_URL=https://dashscope.aliyuncs.com/compatible-mode/v1
LLM_MODEL_NAME=qwen-plus
-# Zep Cloud Configuration
-# Free monthly quota is sufficient for simple usage: https://app.getzep.com/
+# Graph backend selection
+# Use zep_cloud for hosted Zep, or graphiti_local for local Neo4j + Graphiti
+GRAPH_BACKEND=zep_cloud
+
+# Zep Cloud configuration
+# Required only when GRAPH_BACKEND=zep_cloud
ZEP_API_KEY=your_zep_api_key
+
+# Local Graphiti + Neo4j configuration
+# Required only when GRAPH_BACKEND=graphiti_local
+# Note: the local Graphiti backend stores all graphs in one Neo4j database
+# and isolates each MiroFish graph by Graphiti `group_id`.
+NEO4J_URI=bolt://localhost:7687
+NEO4J_USER=neo4j
+NEO4J_PASSWORD=your_neo4j_password
```
#### 2. Install Dependencies
@@ -151,6 +163,17 @@ npm run setup:backend
npm run dev
```
+If you use `GRAPH_BACKEND=graphiti_local`, start Neo4j too:
+
+```bash
+docker compose up -d neo4j
+```
+
+The bundled `docker-compose.yml` uses `neo4j:5.26.22-enterprise` with
+`NEO4J_ACCEPT_LICENSE_AGREEMENT=yes` as the safe local default.
+The current local backend still keeps all graphs in the default Neo4j database
+and maps each MiroFish `graph_id` directly to a Graphiti `group_id`.
+
**Service URLs:**
- Frontend: `http://localhost:3000`
- Backend API: `http://localhost:5001`
@@ -175,11 +198,12 @@ docker compose up -d
Reads `.env` from root directory by default, maps ports `3000 (frontend) / 5001 (backend)`
> Mirror address for faster pulling is provided as comments in `docker-compose.yml`, replace if needed.
+> When `GRAPH_BACKEND=graphiti_local`, the bundled compose stack starts a local Neo4j instance for Graphiti storage. The repo keeps the enterprise image as the default compose target because existing local stores may use the block format.
## 📬 Join the Conversation
-
+
@@ -200,4 +224,4 @@ MiroFish's simulation engine is powered by **[OASIS (Open Agent Social Interacti
-
\ No newline at end of file
+
diff --git a/README-SETUP.md b/README-SETUP.md
new file mode 100644
index 000000000..fc7d55cfd
--- /dev/null
+++ b/README-SETUP.md
@@ -0,0 +1,266 @@
+# MiroFish Setup Guide
+
+This file is a practical setup guide for the current state of this fork.
+It is based on the main README, but focuses on the startup paths that are
+working in this repository today.
+
+## What Changed
+
+MiroFish now supports two graph backends:
+
+- `zep_cloud`: hosted Zep Cloud
+- `graphiti_local`: local Graphiti + Neo4j
+
+The local backend keeps all project graphs inside one Neo4j database and
+isolates them with Graphiti `group_id`.
+
+## Recommended Paths
+
+Choose one of these:
+
+- Docker: run frontend, backend, and Neo4j with `docker compose`
+- Local development: run frontend/backend locally and Neo4j in Docker
+
+## Prerequisites
+
+For Docker:
+
+- Docker Desktop or Docker Engine with Compose support
+
+For local development:
+
+- Node.js 18+
+- Python 3.11 or 3.12
+- `uv`
+- Docker, if you want the local Neo4j service
+
+## Environment File
+
+Create the env file from the example:
+
+```bash
+cp .env.example .env
+```
+
+## Option 1: Docker Startup
+
+This is the easiest way to run the full stack.
+
+### 1. Configure `.env`
+
+For the local Graphiti backend, a minimal working config looks like this:
+
+```env
+GRAPH_BACKEND=graphiti_local
+
+LLM_API_KEY=your_llm_api_key
+LLM_BASE_URL=https://api.openai.com/v1
+LLM_MODEL_NAME=gpt-4o-mini
+
+NEO4J_URI=bolt://neo4j:7687
+NEO4J_USER=neo4j
+NEO4J_PASSWORD=mirofish-local-password
+NEO4J_DATABASE=neo4j
+```
+
+Notes:
+
+- `GRAPHITI_LLM_*`, `GRAPHITI_EMBEDDER_*`, and `GRAPHITI_RERANKER_*` are optional
+- if they are omitted, the backend falls back to the main `LLM_*` settings
+
+If you want to keep using hosted Zep Cloud instead, use:
+
+```env
+GRAPH_BACKEND=zep_cloud
+
+LLM_API_KEY=your_llm_api_key
+LLM_BASE_URL=https://api.openai.com/v1
+LLM_MODEL_NAME=gpt-4o-mini
+
+ZEP_API_KEY=your_zep_api_key
+```
+
+### 2. Build and start
+
+```bash
+docker compose up -d --build
+```
+
+### 3. Check status
+
+```bash
+docker compose ps
+docker compose logs -f
+curl http://localhost:5001/health
+```
+
+When healthy, the backend should answer with a payload that includes:
+
+```json
+{
+ "status": "ok",
+ "service": "MiroFish Backend",
+ "graph_backend": "graphiti_local"
+}
+```
+
+### 4. Open the app
+
+- Frontend: `http://localhost:3000`
+- Backend: `http://localhost:5001`
+- Neo4j Browser: `http://localhost:7474`
+
+### Useful Docker commands
+
+Stop the stack:
+
+```bash
+docker compose down
+```
+
+Stop and remove volumes too:
+
+```bash
+docker compose down -v
+```
+
+Rebuild after dependency or Dockerfile changes:
+
+```bash
+docker compose up -d --build
+```
+
+Restart only Neo4j:
+
+```bash
+docker compose up -d neo4j
+```
+
+## Option 2: Local Development Startup
+
+Use this when you want hot reload or easier debugging.
+
+### 1. Configure `.env`
+
+For local Graphiti, use:
+
+```env
+GRAPH_BACKEND=graphiti_local
+
+LLM_API_KEY=your_llm_api_key
+LLM_BASE_URL=https://api.openai.com/v1
+LLM_MODEL_NAME=gpt-4o-mini
+
+NEO4J_URI=bolt://localhost:7687
+NEO4J_USER=neo4j
+NEO4J_PASSWORD=mirofish-local-password
+NEO4J_DATABASE=neo4j
+```
+
+### 2. Install dependencies
+
+```bash
+npm run setup:all
+```
+
+This does all of the following:
+
+- installs root Node dependencies
+- installs frontend dependencies
+- creates and syncs the backend `uv` environment
+- installs `graphiti-core==0.28.2` separately into the backend venv
+
+### 3. Start Neo4j
+
+```bash
+docker compose up -d neo4j
+```
+
+### 4. Start frontend and backend
+
+```bash
+npm run dev
+```
+
+Or individually:
+
+```bash
+npm run backend
+npm run frontend
+```
+
+## Current Neo4j Note
+
+The local compose stack uses:
+
+- `neo4j:5.26.22-enterprise`
+
+This repo keeps the enterprise image as the default compose target because
+existing local volumes may already use Neo4j block format. The application
+logic itself is using a single Neo4j database plus Graphiti `group_id`
+isolation, not one database per project.
+
+## Troubleshooting
+
+### Backend health is failing
+
+Check:
+
+- `LLM_API_KEY` is set
+- `GRAPH_BACKEND` is correct
+- if `GRAPH_BACKEND=graphiti_local`, `NEO4J_PASSWORD` is set
+- Neo4j is running
+
+### Docker app builds but does not start correctly
+
+Watch logs:
+
+```bash
+docker compose logs -f mirofish neo4j
+```
+
+### Neo4j starts but the backend cannot connect
+
+For Docker:
+
+- use `NEO4J_URI=bolt://neo4j:7687`
+
+For local development:
+
+- use `NEO4J_URI=bolt://localhost:7687`
+
+### You are on x86_64 and Docker build fails
+
+The app service currently pins:
+
+- `platform: linux/arm64`
+
+in `docker-compose.yml`.
+
+If your machine is not ARM64, remove or change that line before building.
+
+## Fast Start
+
+If you just want the shortest path for local Graphiti in Docker:
+
+```bash
+cp .env.example .env
+```
+
+Put this in `.env`:
+
+```env
+GRAPH_BACKEND=graphiti_local
+LLM_API_KEY=your_llm_api_key
+NEO4J_PASSWORD=mirofish-local-password
+NEO4J_URI=bolt://neo4j:7687
+NEO4J_USER=neo4j
+NEO4J_DATABASE=neo4j
+```
+
+Then run:
+
+```bash
+docker compose up -d --build
+curl http://localhost:5001/health
+```
diff --git a/README.md b/README.md
index 4f5cffe74..7013265eb 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@
-简洁通用的群体智能引擎,预测万物
+A simple, universal swarm intelligence engine for predicting anything
A Simple and Universal Swarm Intelligence Engine, Predicting Anything
@@ -20,179 +20,203 @@
[](https://x.com/mirofish_ai)
[](https://www.instagram.com/mirofish_ai/)
-[English](./README-EN.md) | [中文文档](./README.md)
+[README](./README.md) | [English Copy](./README-EN.md)
-## ⚡ 项目概述
+## ⚡ Overview
-**MiroFish** 是一款基于多智能体技术的新一代 AI 预测引擎。通过提取现实世界的种子信息(如突发新闻、政策草案、金融信号),自动构建出高保真的平行数字世界。在此空间内,成千上万个具备独立人格、长期记忆与行为逻辑的智能体进行自由交互与社会演化。你可透过「上帝视角」动态注入变量,精准推演未来走向——**让未来在数字沙盘中预演,助决策在百战模拟后胜出**。
+**MiroFish** is a next-generation AI prediction engine powered by multi-agent technology. By extracting seed information from the real world (such as breaking news, policy drafts, or financial signals), it automatically constructs a high-fidelity parallel digital world. Within this space, thousands of intelligent agents with independent personalities, long-term memory, and behavioral logic freely interact and undergo social evolution. You can inject variables dynamically from a "God's-eye view" to precisely deduce future trajectories — **rehearse the future in a digital sandbox, and win decisions after countless simulations**.
-> 你只需:上传种子材料(数据分析报告或者有趣的小说故事),并用自然语言描述预测需求
-> MiroFish 将返回:一份详尽的预测报告,以及一个可深度交互的高保真数字世界
+> You only need to: upload seed materials (data analysis reports or interesting novel stories) and describe your prediction requirements in natural language
+> MiroFish will return: a detailed prediction report and a deeply interactive high-fidelity digital world
-### 我们的愿景
+### Our Vision
-MiroFish 致力于打造映射现实的群体智能镜像,通过捕捉个体互动引发的群体涌现,突破传统预测的局限:
+MiroFish is dedicated to creating a swarm intelligence mirror that maps reality. By capturing the collective emergence triggered by individual interactions, we break through the limitations of traditional prediction:
-- **于宏观**:我们是决策者的预演实验室,让政策与公关在零风险中试错
-- **于微观**:我们是个人用户的创意沙盘,无论是推演小说结局还是探索脑洞,皆可有趣、好玩、触手可及
+- **At the Macro Level**: We are a rehearsal laboratory for decision-makers, allowing policies and public relations to be tested at zero risk
+- **At the Micro Level**: We are a creative sandbox for individual users, whether deducing novel endings or exploring imaginative scenarios, everything can be fun, playful, and accessible
-从严肃预测到趣味仿真,我们让每一个如果都能看见结果,让预测万物成为可能。
+From serious predictions to playful simulations, we let every "what if" see its outcome, making it possible to predict anything.
-## 🌐 在线体验
+## 🌐 Live Demo
-欢迎访问在线 Demo 演示环境,体验我们为你准备的一次关于热点舆情事件的推演预测:[mirofish-live-demo](https://666ghj.github.io/mirofish-demo/)
+Visit our online demo environment and experience a prediction simulation around a trending public-opinion event: [mirofish-live-demo](https://666ghj.github.io/mirofish-demo/)
-## 📸 系统截图
+## 📸 Screenshots
-
+
-点击图片查看使用微舆BettaFish生成的《武大舆情报告》进行预测的完整演示视频
+Click the image to watch the complete demo video for prediction using the BettaFish-generated "Wuhan University Public Opinion Report."
-### 2. 《红楼梦》失传结局推演预测
+### 2. Dream of the Red Chamber Lost Ending Simulation
-
+
-点击图片查看基于《红楼梦》前80回数十万字,MiroFish深度预测失传结局
+Click the image to watch MiroFish predict the lost ending based on the first 80 chapters of *Dream of the Red Chamber*.
-> **金融方向推演预测**、**时政要闻推演预测**等示例陆续更新中...
+> **Financial prediction**, **current-events forecasting**, and more examples are coming soon.
-## 🔄 工作流程
+## 🔄 Workflow
-1. **图谱构建**:现实种子提取 & 个体与群体记忆注入 & GraphRAG构建
-2. **环境搭建**:实体关系抽取 & 人设生成 & 环境配置Agent注入仿真参数
-3. **开始模拟**:双平台并行模拟 & 自动解析预测需求 & 动态更新时序记忆
-4. **报告生成**:ReportAgent拥有丰富的工具集与模拟后环境进行深度交互
-5. **深度互动**:与模拟世界中的任意一位进行对话 & 与ReportAgent进行对话
+1. **Graph Building**: Seed extraction, individual and collective memory injection, and GraphRAG construction
+2. **Environment Setup**: Entity relationship extraction, persona generation, and agent configuration injection
+3. **Simulation**: Dual-platform parallel simulation, automatic prediction-requirement parsing, and dynamic temporal memory updates
+4. **Report Generation**: ReportAgent uses a rich toolset to interact deeply with the post-simulation environment
+5. **Deep Interaction**: Chat with any agent in the simulated world and continue the conversation with ReportAgent
-## 🚀 快速开始
+## 🚀 Quick Start
-### 一、源码部署(推荐)
+### Option 1: Source Deployment (Recommended)
-#### 前置要求
+#### Prerequisites
-| 工具 | 版本要求 | 说明 | 安装检查 |
-|------|---------|------|---------|
-| **Node.js** | 18+ | 前端运行环境,包含 npm | `node -v` |
-| **Python** | ≥3.11, ≤3.12 | 后端运行环境 | `python --version` |
-| **uv** | 最新版 | Python 包管理器 | `uv --version` |
+| Tool | Version | Description | Check Installation |
+|------|---------|-------------|-------------------|
+| **Node.js** | 18+ | Frontend runtime, includes npm | `node -v` |
+| **Python** | ≥3.11, ≤3.12 | Backend runtime | `python --version` |
+| **uv** | Latest | Python package manager | `uv --version` |
-#### 1. 配置环境变量
+#### 1. Configure Environment Variables
```bash
-# 复制示例配置文件
+# Copy the example configuration file
cp .env.example .env
-# 编辑 .env 文件,填入必要的 API 密钥
+# Edit the .env file and fill in the required API keys
```
-**必需的环境变量:**
+**Required Environment Variables:**
```env
-# LLM API配置(支持 OpenAI SDK 格式的任意 LLM API)
-# 推荐使用阿里百炼平台qwen-plus模型:https://bailian.console.aliyun.com/
-# 注意消耗较大,可先进行小于40轮的模拟尝试
+# LLM API configuration (supports any LLM API compatible with the OpenAI SDK format)
+# Recommended: use the qwen-plus model on Alibaba Bailian: https://bailian.console.aliyun.com/
+# Note: usage can be expensive, so try simulations with fewer than 40 rounds first
LLM_API_KEY=your_api_key
LLM_BASE_URL=https://dashscope.aliyuncs.com/compatible-mode/v1
LLM_MODEL_NAME=qwen-plus
-# Zep Cloud 配置
-# 每月免费额度即可支撑简单使用:https://app.getzep.com/
+# Graph backend selection
+# Use zep_cloud for hosted Zep, or graphiti_local for local Neo4j + Graphiti
+GRAPH_BACKEND=zep_cloud
+
+# Zep Cloud configuration
+# Required only when GRAPH_BACKEND=zep_cloud
ZEP_API_KEY=your_zep_api_key
+
+# Local Graphiti + Neo4j configuration
+# Required only when GRAPH_BACKEND=graphiti_local
+# Note: the local Graphiti backend stores all graphs in one Neo4j database
+# and isolates each MiroFish graph by Graphiti `group_id`.
+NEO4J_URI=bolt://localhost:7687
+NEO4J_USER=neo4j
+NEO4J_PASSWORD=your_neo4j_password
```
-#### 2. 安装依赖
+#### 2. Install Dependencies
```bash
-# 一键安装所有依赖(根目录 + 前端 + 后端)
+# One-click installation of all dependencies (root + frontend + backend)
npm run setup:all
```
-或者分步安装:
+Or install them step by step:
```bash
-# 安装 Node 依赖(根目录 + 前端)
+# Install Node dependencies (root + frontend)
npm run setup
-# 安装 Python 依赖(后端,自动创建虚拟环境)
+# Install Python dependencies (backend, auto-creates virtual environment)
npm run setup:backend
```
-#### 3. 启动服务
+#### 3. Start Services
```bash
-# 同时启动前后端(在项目根目录执行)
+# Start both frontend and backend (run from the project root)
npm run dev
```
-**服务地址:**
-- 前端:`http://localhost:3000`
-- 后端 API:`http://localhost:5001`
+If you use `GRAPH_BACKEND=graphiti_local`, start Neo4j too:
+
+```bash
+docker compose up -d neo4j
+```
+
+The bundled `docker-compose.yml` uses `neo4j:5.26.22-enterprise` with
+`NEO4J_ACCEPT_LICENSE_AGREEMENT=yes` as the safe default for local compatibility.
+The current local backend still keeps all graphs in the default Neo4j database
+and maps each MiroFish `graph_id` directly to a Graphiti `group_id`.
+
+**Service URLs:**
+- Frontend: `http://localhost:3000`
+- Backend API: `http://localhost:5001`
-**单独启动:**
+**Start Individually:**
```bash
-npm run backend # 仅启动后端
-npm run frontend # 仅启动前端
+npm run backend # Start the backend only
+npm run frontend # Start the frontend only
```
-### 二、Docker 部署
+### Option 2: Docker Deployment
```bash
-# 1. 配置环境变量(同源码部署)
+# 1. Configure environment variables (same as source deployment)
cp .env.example .env
-# 2. 拉取镜像并启动
+# 2. Pull the image and start
docker compose up -d
```
-默认会读取根目录下的 `.env`,并映射端口 `3000(前端)/5001(后端)`
+Docker reads `.env` from the project root by default and maps ports `3000 (frontend) / 5001 (backend)`.
-> 在 `docker-compose.yml` 中已通过注释提供加速镜像地址,可按需替换
+> A mirror image URL is provided as a comment in `docker-compose.yml` if you need a faster pull source.
+> When `GRAPH_BACKEND=graphiti_local`, the bundled compose stack starts a local Neo4j instance for Graphiti storage. The repo keeps the enterprise image as the default compose target because existing local stores may use the block format.
-## 📬 更多交流
+## 📬 Join the Conversation