From 41cf4a5dccd93ae795d57ba4d0577f5ebc1b7670 Mon Sep 17 00:00:00 2001 From: jamesrossdev Date: Wed, 11 Mar 2026 20:13:24 +0300 Subject: [PATCH 1/4] fix: prevent infinite optimization loop when all messages are tool outputs --- pkg/agent/loop.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/agent/loop.go b/pkg/agent/loop.go index 28e549ce03..646a0046f3 100644 --- a/pkg/agent/loop.go +++ b/pkg/agent/loop.go @@ -1508,6 +1508,8 @@ func (al *AgentLoop) summarizeSession(agent *AgentInstance, sessionKey string) { } if len(validMessages) == 0 { + agent.Sessions.TruncateHistory(sessionKey, 4) + agent.Sessions.Save(sessionKey) return } From acb730d8541d24233cb010046a89b96e53261f5a Mon Sep 17 00:00:00 2001 From: RafiulPaceProjects Date: Wed, 11 Mar 2026 18:28:38 -0400 Subject: [PATCH 2/4] feat(skills): implement per-agent SkillsFilter MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Wire the existing-but-unused `AgentConfig.Skills` field through `ContextBuilder` into `SkillsLoader` so each agent only sees the skills it is configured to access. Changes: - `pkg/skills/loader.go`: Add `BuildSkillsSummaryFiltered(allowedNames)`; `BuildSkillsSummary()` now delegates to it with nil (all skills). - `pkg/agent/context.go`: Add `skillsFilter []string` field and `WithSkillsFilter()` builder method; `BuildSystemPrompt()` uses the filter; `GetSkillsInfo()` reflects filtered count via `available` key. - `pkg/agent/instance.go`: Call `contextBuilder.WithSkillsFilter(skillsFilter)` after `skillsFilter` is populated from `agentCfg.Skills`. Behaviour: empty/nil filter → all skills (no change from before); non-empty filter → only listed skills appear in the system prompt and `/skills` output. The system prompt cache is unaffected (filter is constant for a ContextBuilder's lifetime). Co-Authored-By: Claude Sonnet 4.6 --- pkg/agent/context.go | 27 +++++++++++++++++++++++---- pkg/agent/instance.go | 2 ++ pkg/skills/loader.go | 23 ++++++++++++++++++++++- 3 files changed, 47 insertions(+), 5 deletions(-) diff --git a/pkg/agent/context.go b/pkg/agent/context.go index 5a84c45e22..3adede94e1 100644 --- a/pkg/agent/context.go +++ b/pkg/agent/context.go @@ -25,6 +25,7 @@ type ContextBuilder struct { memory *MemoryStore toolDiscoveryBM25 bool toolDiscoveryRegex bool + skillsFilter []string // Cache for system prompt to avoid rebuilding on every call. // This fixes issue #607: repeated reprocessing of the entire context. @@ -51,6 +52,13 @@ func (cb *ContextBuilder) WithToolDiscovery(useBM25, useRegex bool) *ContextBuil return cb } +// WithSkillsFilter restricts which skills appear in the system prompt to the +// named list. An empty or nil filter means all available skills are included. +func (cb *ContextBuilder) WithSkillsFilter(filter []string) *ContextBuilder { + cb.skillsFilter = filter + return cb +} + func getGlobalConfigDir() string { if home := os.Getenv("PICOCLAW_HOME"); home != "" { return home @@ -141,7 +149,7 @@ func (cb *ContextBuilder) BuildSystemPrompt() string { } // Skills - show summary, AI can read full content with read_file tool - skillsSummary := cb.skillsLoader.BuildSkillsSummary() + skillsSummary := cb.skillsLoader.BuildSkillsSummaryFiltered(cb.skillsFilter) if skillsSummary != "" { parts = append(parts, fmt.Sprintf(`# Skills @@ -718,16 +726,27 @@ func (cb *ContextBuilder) AddAssistantMessage( return messages } -// GetSkillsInfo returns information about loaded skills. +// GetSkillsInfo returns information about loaded skills, respecting any active filter. func (cb *ContextBuilder) GetSkillsInfo() map[string]any { allSkills := cb.skillsLoader.ListSkills() + + var allowed map[string]bool + if len(cb.skillsFilter) > 0 { + allowed = make(map[string]bool, len(cb.skillsFilter)) + for _, n := range cb.skillsFilter { + allowed[n] = true + } + } + skillNames := make([]string, 0, len(allSkills)) for _, s := range allSkills { - skillNames = append(skillNames, s.Name) + if allowed == nil || allowed[s.Name] { + skillNames = append(skillNames, s.Name) + } } return map[string]any{ "total": len(allSkills), - "available": len(allSkills), + "available": len(skillNames), "names": skillNames, } } diff --git a/pkg/agent/instance.go b/pkg/agent/instance.go index 0c7baa1eec..ca1a3ee31d 100644 --- a/pkg/agent/instance.go +++ b/pkg/agent/instance.go @@ -117,6 +117,8 @@ func NewAgentInstance( skillsFilter = agentCfg.Skills } + contextBuilder.WithSkillsFilter(skillsFilter) + maxIter := defaults.MaxToolIterations if maxIter == 0 { maxIter = 20 diff --git a/pkg/skills/loader.go b/pkg/skills/loader.go index f5985a662a..c8765e10c4 100644 --- a/pkg/skills/loader.go +++ b/pkg/skills/loader.go @@ -192,19 +192,36 @@ func (sl *SkillsLoader) LoadSkillsForContext(skillNames []string) string { } func (sl *SkillsLoader) BuildSkillsSummary() string { + return sl.BuildSkillsSummaryFiltered(nil) +} + +// BuildSkillsSummaryFiltered builds the XML skills summary, restricted to +// allowedNames if non-empty. An empty/nil allowedNames includes all skills. +func (sl *SkillsLoader) BuildSkillsSummaryFiltered(allowedNames []string) string { allSkills := sl.ListSkills() if len(allSkills) == 0 { return "" } + var allowed map[string]bool + if len(allowedNames) > 0 { + allowed = make(map[string]bool, len(allowedNames)) + for _, n := range allowedNames { + allowed[n] = true + } + } + var lines []string lines = append(lines, "") for _, s := range allSkills { + if allowed != nil && !allowed[s.Name] { + continue + } escapedName := escapeXML(s.Name) escapedDesc := escapeXML(s.Description) escapedPath := escapeXML(s.Path) - lines = append(lines, fmt.Sprintf(" ")) + lines = append(lines, " ") lines = append(lines, fmt.Sprintf(" %s", escapedName)) lines = append(lines, fmt.Sprintf(" %s", escapedDesc)) lines = append(lines, fmt.Sprintf(" %s", escapedPath)) @@ -213,6 +230,10 @@ func (sl *SkillsLoader) BuildSkillsSummary() string { } lines = append(lines, "") + if len(lines) == 2 { + // Only open/close tags — filter excluded everything + return "" + } return strings.Join(lines, "\n") } From dc037f0d79389d52f81c54a2c5177e0b24700251 Mon Sep 17 00:00:00 2001 From: kiannidev Date: Thu, 12 Mar 2026 02:22:04 +0200 Subject: [PATCH 3/4] fix(telegram): stop typing indicator when LLM fails or hangs --- pkg/agent/loop.go | 5 +++++ pkg/channels/manager.go | 13 +++++++++++ pkg/channels/manager_test.go | 37 +++++++++++++++++++++++++++++++ pkg/channels/telegram/telegram.go | 12 +++++++++- 4 files changed, 66 insertions(+), 1 deletion(-) diff --git a/pkg/agent/loop.go b/pkg/agent/loop.go index 28e549ce03..4860b9e2ab 100644 --- a/pkg/agent/loop.go +++ b/pkg/agent/loop.go @@ -255,6 +255,11 @@ func (al *AgentLoop) Run(ctx context.Context) error { // Process message func() { + defer func() { + if al.channelManager != nil { + al.channelManager.InvokeTypingStop(msg.Channel, msg.ChatID) + } + }() // TODO: Re-enable media cleanup after inbound media is properly consumed by the agent. // Currently disabled because files are deleted before the LLM can access their content. // defer func() { diff --git a/pkg/channels/manager.go b/pkg/channels/manager.go index 472895a7a1..2c06feb38b 100644 --- a/pkg/channels/manager.go +++ b/pkg/channels/manager.go @@ -130,6 +130,19 @@ func (m *Manager) RecordTypingStop(channel, chatID string, stop func()) { m.typingStops.Store(key, typingEntry{stop: stop, createdAt: time.Now()}) } +// InvokeTypingStop invokes the registered typing stop function for the given channel and chatID. +// It is safe to call even when no typing indicator is active (no-op). +// Used by the agent loop to stop typing when processing completes (success, error, or panic), +// regardless of whether an outbound message is published. +func (m *Manager) InvokeTypingStop(channel, chatID string) { + key := channel + ":" + chatID + if v, loaded := m.typingStops.LoadAndDelete(key); loaded { + if entry, ok := v.(typingEntry); ok { + entry.stop() + } + } +} + // RecordReactionUndo registers a reaction undo function for later invocation. // Implements PlaceholderRecorder. func (m *Manager) RecordReactionUndo(channel, chatID string, undo func()) { diff --git a/pkg/channels/manager_test.go b/pkg/channels/manager_test.go index 1f3a628c29..f92e4abb31 100644 --- a/pkg/channels/manager_test.go +++ b/pkg/channels/manager_test.go @@ -511,6 +511,43 @@ func TestPreSend_PlaceholderEditFails_FallsThrough(t *testing.T) { } } +func TestInvokeTypingStop_CallsRegisteredStop(t *testing.T) { + m := newTestManager() + var stopCalled bool + + m.RecordTypingStop("telegram", "chat123", func() { + stopCalled = true + }) + + m.InvokeTypingStop("telegram", "chat123") + + if !stopCalled { + t.Fatal("expected typing stop func to be called") + } +} + +func TestInvokeTypingStop_NoOpWhenNoEntry(t *testing.T) { + m := newTestManager() + // Should not panic + m.InvokeTypingStop("telegram", "nonexistent") +} + +func TestInvokeTypingStop_Idempotent(t *testing.T) { + m := newTestManager() + var callCount int + + m.RecordTypingStop("telegram", "chat123", func() { + callCount++ + }) + + m.InvokeTypingStop("telegram", "chat123") + m.InvokeTypingStop("telegram", "chat123") // Second call: entry already removed, no-op + + if callCount != 1 { + t.Fatalf("expected stop to be called once, got %d", callCount) + } +} + func TestPreSend_TypingStopCalled(t *testing.T) { m := newTestManager() var stopCalled bool diff --git a/pkg/channels/telegram/telegram.go b/pkg/channels/telegram/telegram.go index 34ee46b7bc..5f86d24c9e 100644 --- a/pkg/channels/telegram/telegram.go +++ b/pkg/channels/telegram/telegram.go @@ -242,10 +242,17 @@ func (c *TelegramChannel) sendHTMLChunk( return nil } +// maxTypingDuration limits how long the typing indicator can run. +// Prevents endless typing when the LLM fails/hangs and preSend never invokes cancel. +// Matches channels.Manager's typingStopTTL (5 min) so behavior is consistent. +const maxTypingDuration = 5 * time.Minute + // StartTyping implements channels.TypingCapable. // It sends ChatAction(typing) immediately and then repeats every 4 seconds // (Telegram's typing indicator expires after ~5s) in a background goroutine. // The returned stop function is idempotent and cancels the goroutine. +// The goroutine also exits automatically after maxTypingDuration if cancel is +// never called (e.g. when the LLM fails or times out without publishing). func (c *TelegramChannel) StartTyping(ctx context.Context, chatID string) (func(), error) { cid, threadID, err := parseTelegramChatID(chatID) if err != nil { @@ -259,12 +266,15 @@ func (c *TelegramChannel) StartTyping(ctx context.Context, chatID string) (func( _ = c.bot.SendChatAction(ctx, action) typingCtx, cancel := context.WithCancel(ctx) + // Cap lifetime so the goroutine cannot run indefinitely if cancel is never called + maxCtx, maxCancel := context.WithTimeout(typingCtx, maxTypingDuration) go func() { + defer maxCancel() ticker := time.NewTicker(4 * time.Second) defer ticker.Stop() for { select { - case <-typingCtx.Done(): + case <-maxCtx.Done(): return case <-ticker.C: a := tu.ChatAction(tu.ID(cid), telego.ChatActionTyping) From 699bdb686c5db150e5f4d1dd3125b6e1e5a9716a Mon Sep 17 00:00:00 2001 From: linhaolin1 Date: Thu, 12 Mar 2026 02:05:23 +0800 Subject: [PATCH 4/4] feat: add request logging and statistics features --- cmd/picoclaw/internal/gateway/helpers.go | 9 + docs/design/request-logging-plan.md | 268 ++++++++ go.mod | 4 +- go.sum | 2 - pkg/bus/bus.go | 88 +++ pkg/config/config.go | 49 +- pkg/logger/logger.go | 6 +- pkg/requestlog/archiver.go | 274 ++++++++ pkg/requestlog/archiver_test.go | 233 +++++++ pkg/requestlog/logger.go | 601 ++++++++++++++++++ pkg/requestlog/logger_test.go | 288 +++++++++ web/backend/api/requestlog.go | 330 ++++++++++ web/backend/api/requestlog_test.go | 242 +++++++ web/backend/api/router.go | 30 +- web/backend/dist/.gitkeep | 1 - web/backend/main.go | 27 + web/frontend/src/api/stats.ts | 161 +++++ web/frontend/src/components/app-sidebar.tsx | 7 + .../src/components/config/config-page.tsx | 5 + .../src/components/config/config-sections.tsx | 22 +- .../src/components/stats/stats-page.tsx | 174 +++++ web/frontend/src/i18n/locales/en.json | 48 +- web/frontend/src/i18n/locales/zh.json | 48 +- web/frontend/src/routeTree.gen.ts | 81 ++- web/frontend/src/routes/config.logs.tsx | 7 + web/frontend/src/routes/logs.requests.tsx | 7 + web/frontend/src/routes/logs.tsx | 15 +- web/frontend/src/routes/stats.tsx | 7 + 28 files changed, 2998 insertions(+), 36 deletions(-) create mode 100644 docs/design/request-logging-plan.md create mode 100644 pkg/requestlog/archiver.go create mode 100644 pkg/requestlog/archiver_test.go create mode 100644 pkg/requestlog/logger.go create mode 100644 pkg/requestlog/logger_test.go create mode 100644 web/backend/api/requestlog.go create mode 100644 web/backend/api/requestlog_test.go delete mode 100644 web/backend/dist/.gitkeep create mode 100644 web/frontend/src/api/stats.ts create mode 100644 web/frontend/src/components/stats/stats-page.tsx create mode 100644 web/frontend/src/routes/config.logs.tsx create mode 100644 web/frontend/src/routes/logs.requests.tsx create mode 100644 web/frontend/src/routes/stats.tsx diff --git a/cmd/picoclaw/internal/gateway/helpers.go b/cmd/picoclaw/internal/gateway/helpers.go index fed3d5ffbe..39b97de48f 100644 --- a/cmd/picoclaw/internal/gateway/helpers.go +++ b/cmd/picoclaw/internal/gateway/helpers.go @@ -36,6 +36,7 @@ import ( "github.com/sipeed/picoclaw/pkg/logger" "github.com/sipeed/picoclaw/pkg/media" "github.com/sipeed/picoclaw/pkg/providers" + "github.com/sipeed/picoclaw/pkg/requestlog" "github.com/sipeed/picoclaw/pkg/state" "github.com/sipeed/picoclaw/pkg/tools" "github.com/sipeed/picoclaw/pkg/voice" @@ -63,6 +64,14 @@ func gatewayCmd(debug bool) error { } msgBus := bus.NewMessageBus() + + // Initialize request logger + requestLogger := requestlog.NewLogger(requestlog.DefaultConfig(), msgBus, cfg.WorkspacePath()) + fmt.Printf(" • Request log dir: %s\n", requestLogger.LogDir()) + if err := requestLogger.Start(); err != nil { + fmt.Printf(" ⚠️ Failed to start request logger: %v\n", err) + } + agentLoop := agent.NewAgentLoop(cfg, msgBus, provider) // Print agent startup info diff --git a/docs/design/request-logging-plan.md b/docs/design/request-logging-plan.md new file mode 100644 index 0000000000..367d3a70a1 --- /dev/null +++ b/docs/design/request-logging-plan.md @@ -0,0 +1,268 @@ +# 规划:请求日志收集与统计系统 + +## 一、需求分析 + +| 需求 | 说明 | +|------|------| +| 1. 请求记录 | 记录来自不同channel的请求,以文件方式存储 | +| 2. Web查看 | 从web界面查看某时间段的请求量、来源、channel等信息 | +| 3. 日志规则 | 配置日志规则,定期归档压缩/删除 | + +--- + +## 二、系统架构设计 + +### 2.1 整体架构 + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ Gateway Process │ +│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ +│ │ Channel │ │ Bus │ │ Request │ │ +│ │ (Telegram) │───▶│ (Message) │───▶│ Logger │ │ +│ │ (Discord) │ │ │ │ │ │ +│ │ (Slack) │ │ │ │ │ │ +│ │ ... │ │ │ │ │ │ +│ └──────────────┘ └──────────────┘ └──────┬───────┘ │ +│ │ │ +│ ▼ │ +│ ┌───────────────────────┐ │ +│ │ Request Log Files │ │ +│ │ (JSON Lines) │ │ +│ └───────────────────────┘ │ +└─────────────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ Web Backend │ +│ ┌──────────────────┐ ┌─────────────────────────────────┐ │ +│ │ Request Stats │◀───│ Log File Reader/Archiver │ │ +│ │ API │ │ (Query, Filter, Archive) │ │ +│ └──────────────────┘ └─────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ Web Frontend │ +│ ┌──────────────────┐ ┌─────────────────────────────────┐ │ +│ │ Statistics UI │ │ Request Log Viewer │ │ +│ │ (Charts/Tables) │ │ (Filter by time/channel/user) │ │ +│ └──────────────────┘ └─────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────────┘ +``` + +--- + +## 三、详细设计方案 + +### 3.1 请求日志记录模块 + +**位置**: `pkg/requestlog/` + +| 文件 | 职责 | +|------|------| +| `logger.go` | 核心日志写入器,实现 `bus.Subscriber` 接口 | +| `record.go` | 定义 RequestRecord 结构体 | +| `storage.go` | 文件存储管理,按日期/大小分割文件 | +| `config.go` | 日志配置结构体 | + +**核心数据结构**: +```go +// RequestRecord represents a single incoming request +type RequestRecord struct { + Timestamp time.Time `json:"timestamp"` // 请求时间 + RequestID string `json:"request_id"` // 唯一请求ID + Channel string `json:"channel"` // 来源channel: telegram, discord, etc. + SenderID string `json:"sender_id"` // 发送者ID + SenderInfo SenderInfo `json:"sender_info"` // 发送者详细信息 + ChatID string `json:"chat_id"` // 会话ID + Content string `json:"content"` // 请求内容 (可截断) + ContentLength int `json:"content_len"` // 内容长度 + Peer Peer `json:"peer"` // 对等方信息 + MessageID string `json:"message_id"` // 平台消息ID + MediaCount int `json:"media_count"` // 附件数量 + SessionKey string `json:"session_key"` // 会话Key + ProcessingTime int64 `json:"proc_time_ms"` // 处理耗时(ms) +} +``` + +**日志文件格式**: +- 文件格式: JSON Lines (每行一个JSON对象) +- 文件命名: `requests-2024-01-15.jsonl`, `requests-2024-01-15.jsonl.1.gz` +- 存储路径: `{data_dir}/logs/requests/` + +### 3.2 日志归档模块 + +**位置**: `pkg/requestlog/archiver.go` + +| 功能 | 说明 | +|------|------| +| 定时归档 | 按配置的时间间隔归档日志文件 | +| 压缩归档 | 使用gzip压缩归档文件 | +| 自动清理 | 按保留天数自动删除过期日志 | +| 配置项 | `retention_days`, `archive_interval`, `max_file_size_mb` | + +**配置结构**: +```go +type LogConfig struct { + Enabled bool `json:"enabled"` + LogDir string `json:"log_dir"` // 日志目录 + MaxFileSizeMB int `json:"max_file_size_mb"` // 单文件大小上限 + MaxFiles int `json:"max_files"` // 保留文件数 + RetentionDays int `json:"retention_days"` // 保留天数 + ArchiveInterval string `json:"archive_interval"` // 归档间隔: "1h", "24h" + CompressArchive bool `json:"compress_archive"` // 是否压缩归档 +} +``` + +### 3.3 后端API设计 + +**新增API端点**: + +| 方法 | 路径 | 说明 | +|------|------|------| +| GET | `/api/stats/requests` | 获取请求统计数据 | +| GET | `/api/stats/requests/channels` | 按channel统计请求量 | +| GET | `/api/stats/requests/timeline` | 时间线统计(按小时/天) | +| GET | `/api/logs/requests` | 查询请求日志(分页) | +| GET | `/api/logs/requests/export` | 导出日志(支持过滤) | +| GET | `/api/config/requestlog` | 获取日志配置 | +| PUT | `/api/config/requestlog` | 更新日志配置 | +| POST | `/api/logs/requests/archive-now` | 手动触发归档 | + +**统计API响应示例**: +```json +// GET /api/stats/requests?start=2024-01-01&end=2024-01-31 +{ + "total": 1250, + "by_channel": { + "telegram": 450, + "discord": 380, + "slack": 280, + "feishu": 140 + }, + "by_day": [ + {"date": "2024-01-01", "count": 45}, + {"date": "2024-01-02", "count": 52} + ], + "top_senders": [ + {"sender": "user:123", "channel": "telegram", "count": 28} + ] +} +``` + +### 3.4 前端界面设计 + +**新增页面/组件**: + +| 路径 | 组件 | 功能 | +|------|------|------| +| `/stats` | ` 请求统计概览 | +| `/statsStatsPage` |/channels` | `ChannelStats` | 按channel统计 | +| `/stats/timeline` | `TimelineChart` | 时间线图表 | +| `/logs/requests` | `RequestLogViewer` | 请求日志查看器 | +| `/settings/logs` | `LogSettingsPanel` | 日志配置面板 | + +**Stats页面设计**: +- 顶部: 关键指标卡片 (总请求量, 今日请求, 最活跃channel, 平均响应时间) +- 中部: 时间线图表 (支持按小时/天/周切换) +- 底部: Channel分布饼图 + Top用户列表 + +**Request Log Viewer设计**: +- 顶部: 时间范围选择器 + Channel过滤器 + 用户搜索 +- 中部: 日志表格 (可排序, 可分页) +- 功能: 点击行展开详情, 支持导出CSV/JSON + +--- + +## 四、实现步骤 + +### 阶段一: 基础功能 + +| 序号 | 任务 | 涉及文件 | +|------|------|----------| +| 1.1 | 创建 `pkg/requestlog/` 模块 | 新建目录 | +| 1.2 | 实现 RequestRecord 数据结构 | `pkg/requestlog/record.go` | +| 1.3 | 实现日志写入器 (实现 bus.Subscriber) | `pkg/requestlog/logger.go` | +| 1.4 | 集成到Gateway: 订阅 bus.InboundMessage | 修改gateway启动逻辑 | +| 1.5 | 单元测试 | `pkg/requestlog/*_test.go` | + +### 阶段二: 归档功能 + +| 序号 | 任务 | 涉及文件 | +|------|------|----------| +| 2.1 | 实现日志文件管理 (按日期/大小分割) | `pkg/requestlog/storage.go` | +| 2.2 | 实现归档功能 (压缩/移动) | `pkg/requestlog/archiver.go` | +| 2.3 | 实现定时任务调度 | `pkg/requestlog/scheduler.go` | +| 2.4 | 配置管理与持久化 | `pkg/requestlog/config.go` | + +### 阶段三: Web API + +| 序号 | 任务 | 涉及文件 | +|------|------|----------| +| 3.1 | 添加统计API (按channel/时间) | `web/backend/api/stats.go` | +| 3.2 | 添加日志查询API (分页/过滤) | `web/backend/api/requestlog.go` | +| 3.3 | 添加配置API | `web/backend/api/requestlog.go` | +| 3.4 | 注册新路由 | `web/backend/api/router.go` | + +### 阶段四: 前端界面 + +| 序号 | 任务 | 涉及文件 | +|------|------|----------| +| 4.1 | 创建统计API客户端 | `web/frontend/src/api/stats.ts` | +| 4.2 | 创建日志查看API客户端 | `web/frontend/src/api/requestlog.ts` | +| 4.3 | 实现 Stats 页面 | `web/frontend/src/routes/stats.tsx` | +| 4.4 | 实现 Request Log Viewer | `web/frontend/src/routes/logs/requests.tsx` | +| 4.5 | 实现日志配置面板 | `web/frontend/src/routes/settings/logs.tsx` | +| 4.6 | 添加侧边栏导航 | `web/frontend/src/components/app-sidebar.tsx` | + +### 阶段五: 完善与优化 + +| 序号 | 任务 | 涉及文件 | +|------|------|----------| +| 5.1 | 前端图表集成 (Recharts/Chart.js) | package.json | +| 5.2 | 国际化支持 (en.json, zh.json) | `web/frontend/src/i18n/` | +| 5.3 | 性能优化 (大数据量分页/虚拟滚动) | 前端组件 | +| 5.4 | 日志导出功能 (CSV/JSON) | 后端API + 前端 | + +--- + +## 五、配置文件示例 + +```json +{ + "requestlog": { + "enabled": true, + "log_dir": "~/.picoclaw/logs/requests", + "max_file_size_mb": 100, + "max_files": 100, + "retention_days": 30, + "archive_interval": "24h", + "compress_archive": true, + "log_content_max_length": 1000, + "record_media": false + } +} +``` + +--- + +## 六、注意事项 + +1. **性能考虑**: 日志写入应异步进行,避免阻塞消息处理 +2. **存储空间**: 定期检查磁盘使用情况,设置合理保留策略 +3. **敏感信息**: 对日志内容进行脱敏处理,避免记录敏感信息 +4. **向后兼容**: 归档文件格式应考虑未来兼容性 +5. **配置热更新**: 支持在不重启服务的情况下更新配置 + +--- + +## 七、依赖项 + +| 组件 | 用途 | 建议库 | +|------|------|--------| +| 时间处理 | 时间解析/格式化 | 标准库 `time` | +| 文件压缩 | 归档压缩 | 标准库 `compress/gzip` | +| 定时任务 | 归档调度 | 标准库 `time.Ticker` 或 `github.com/robfig/cron` | +| 前端图表 | 统计可视化 | `recharts` (已在项目中) | +| 前端表格 | 日志展示 | `tanstack/react-table` 或自定义 | diff --git a/go.mod b/go.mod index 3762015e9b..0c7bcab3b6 100644 --- a/go.mod +++ b/go.mod @@ -10,8 +10,8 @@ require ( github.com/chzyer/readline v1.5.1 github.com/ergochat/irc-go v0.5.0 github.com/gdamore/tcell/v2 v2.13.8 - github.com/google/uuid v1.6.0 github.com/gomarkdown/markdown v0.0.0-20260217112301-37c66b85d6ab + github.com/google/uuid v1.6.0 github.com/gorilla/websocket v1.5.3 github.com/h2non/filetype v1.1.3 github.com/larksuite/oapi-sdk-go/v3 v3.5.3 @@ -30,6 +30,7 @@ require ( golang.org/x/oauth2 v0.35.0 golang.org/x/time v0.14.0 google.golang.org/protobuf v1.36.11 + gopkg.in/yaml.v3 v3.0.1 maunium.net/go/mautrix v0.26.3 modernc.org/sqlite v1.46.1 ) @@ -60,7 +61,6 @@ require ( golang.org/x/exp v0.0.0-20260212183809-81e46e3db34a // indirect golang.org/x/term v0.40.0 // indirect golang.org/x/text v0.34.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/libc v1.67.6 // indirect modernc.org/mathutil v1.7.1 // indirect modernc.org/memory v1.11.0 // indirect diff --git a/go.sum b/go.sum index 2e2b1a1ec4..cdca4fc122 100644 --- a/go.sum +++ b/go.sum @@ -271,8 +271,6 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= -golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= -golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo= golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y= golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= diff --git a/pkg/bus/bus.go b/pkg/bus/bus.go index f5ff9587d4..fe082f4659 100644 --- a/pkg/bus/bus.go +++ b/pkg/bus/bus.go @@ -3,6 +3,7 @@ package bus import ( "context" "errors" + "sync" "sync/atomic" "github.com/sipeed/picoclaw/pkg/logger" @@ -19,6 +20,13 @@ type MessageBus struct { outboundMedia chan OutboundMediaMessage done chan struct{} closed atomic.Bool + + inboundMu sync.RWMutex + inboundSubs []chan InboundMessage + outboundMu sync.RWMutex + outboundSubs []chan OutboundMessage + outboundMediaMu sync.RWMutex + outboundMediaSubs []chan OutboundMediaMessage } func NewMessageBus() *MessageBus { @@ -37,6 +45,18 @@ func (mb *MessageBus) PublishInbound(ctx context.Context, msg InboundMessage) er if err := ctx.Err(); err != nil { return err } + + mb.inboundMu.RLock() + subs := mb.inboundSubs + mb.inboundMu.RUnlock() + + for _, sub := range subs { + select { + case sub <- msg: + default: + } + } + select { case mb.inbound <- msg: return nil @@ -47,6 +67,28 @@ func (mb *MessageBus) PublishInbound(ctx context.Context, msg InboundMessage) er } } +func (mb *MessageBus) SubscribeInbound(ctx context.Context) (<-chan InboundMessage, func()) { + ch := make(chan InboundMessage, defaultBusBufferSize) + + mb.inboundMu.Lock() + mb.inboundSubs = append(mb.inboundSubs, ch) + mb.inboundMu.Unlock() + + unsub := func() { + mb.inboundMu.Lock() + for i, sub := range mb.inboundSubs { + if sub == ch { + mb.inboundSubs = append(mb.inboundSubs[:i], mb.inboundSubs[i+1:]...) + break + } + } + mb.inboundMu.Unlock() + close(ch) + } + + return ch, unsub +} + func (mb *MessageBus) ConsumeInbound(ctx context.Context) (InboundMessage, bool) { select { case msg, ok := <-mb.inbound: @@ -65,6 +107,18 @@ func (mb *MessageBus) PublishOutbound(ctx context.Context, msg OutboundMessage) if err := ctx.Err(); err != nil { return err } + + mb.outboundMu.RLock() + subs := mb.outboundSubs + mb.outboundMu.RUnlock() + + for _, sub := range subs { + select { + case sub <- msg: + default: + } + } + select { case mb.outbound <- msg: return nil @@ -86,6 +140,28 @@ func (mb *MessageBus) SubscribeOutbound(ctx context.Context) (OutboundMessage, b } } +func (mb *MessageBus) SubscribeOutboundChan(ctx context.Context) (<-chan OutboundMessage, func()) { + ch := make(chan OutboundMessage, defaultBusBufferSize) + + mb.outboundMu.Lock() + mb.outboundSubs = append(mb.outboundSubs, ch) + mb.outboundMu.Unlock() + + unsub := func() { + mb.outboundMu.Lock() + for i, sub := range mb.outboundSubs { + if sub == ch { + mb.outboundSubs = append(mb.outboundSubs[:i], mb.outboundSubs[i+1:]...) + break + } + } + mb.outboundMu.Unlock() + close(ch) + } + + return ch, unsub +} + func (mb *MessageBus) PublishOutboundMedia(ctx context.Context, msg OutboundMediaMessage) error { if mb.closed.Load() { return ErrBusClosed @@ -93,6 +169,18 @@ func (mb *MessageBus) PublishOutboundMedia(ctx context.Context, msg OutboundMedi if err := ctx.Err(); err != nil { return err } + + mb.outboundMediaMu.RLock() + subs := mb.outboundMediaSubs + mb.outboundMediaMu.RUnlock() + + for _, sub := range subs { + select { + case sub <- msg: + default: + } + } + select { case mb.outboundMedia <- msg: return nil diff --git a/pkg/config/config.go b/pkg/config/config.go index 7a7edb4894..ceea2ff40b 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -75,21 +75,48 @@ func (f *FlexibleStringSlice) UnmarshalText(text []byte) error { } type Config struct { - Agents AgentsConfig `json:"agents"` - Bindings []AgentBinding `json:"bindings,omitempty"` - Session SessionConfig `json:"session,omitempty"` - Channels ChannelsConfig `json:"channels"` - Providers ProvidersConfig `json:"providers,omitempty"` - ModelList []ModelConfig `json:"model_list"` // New model-centric provider configuration - Gateway GatewayConfig `json:"gateway"` - Tools ToolsConfig `json:"tools"` - Heartbeat HeartbeatConfig `json:"heartbeat"` - Devices DevicesConfig `json:"devices"` - Voice VoiceConfig `json:"voice"` + Agents AgentsConfig `json:"agents"` + Bindings []AgentBinding `json:"bindings,omitempty"` + Session SessionConfig `json:"session,omitempty"` + Channels ChannelsConfig `json:"channels"` + Providers ProvidersConfig `json:"providers,omitempty"` + ModelList []ModelConfig `json:"model_list"` // New model-centric provider configuration + Gateway GatewayConfig `json:"gateway"` + Tools ToolsConfig `json:"tools"` + Heartbeat HeartbeatConfig `json:"heartbeat"` + Devices DevicesConfig `json:"devices"` + Voice VoiceConfig `json:"voice"` + RequestLog RequestLogConfig `json:"request_log,omitempty"` // BuildInfo contains build-time version information BuildInfo BuildInfo `json:"build_info,omitempty"` } +type RequestLogConfig struct { + Enabled bool `json:"enabled"` + LogDir string `json:"log_dir,omitempty"` + MaxFileSizeMB int `json:"max_file_size_mb,omitempty"` + MaxFiles int `json:"max_files,omitempty"` + RetentionDays int `json:"retention_days,omitempty"` + ArchiveInterval string `json:"archive_interval,omitempty"` + CompressArchive bool `json:"compress_archive,omitempty"` + LogContentMaxLength int `json:"log_content_max_length,omitempty"` + RecordMedia bool `json:"record_media,omitempty"` +} + +func DefaultRequestLogConfig() RequestLogConfig { + return RequestLogConfig{ + Enabled: true, + LogDir: "logs/requests", + MaxFileSizeMB: 100, + MaxFiles: 100, + RetentionDays: 30, + ArchiveInterval: "24h", + CompressArchive: true, + LogContentMaxLength: 1000, + RecordMedia: false, + } +} + // BuildInfo contains build-time version information type BuildInfo struct { Version string `json:"version"` diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index 80adcf86cf..6eddfc2288 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -44,7 +44,7 @@ func init() { consoleWriter := zerolog.ConsoleWriter{ Out: os.Stdout, - TimeFormat: "15:04:05", // TODO: make it configurable??? + TimeFormat: "15:04:05", } logger = zerolog.New(consoleWriter).With().Timestamp().Logger() @@ -78,7 +78,6 @@ func EnableFileLogging(filePath string) error { return fmt.Errorf("failed to open log file: %w", err) } - // Close old file if exists if logFile != nil { logFile.Close() } @@ -111,7 +110,6 @@ func getCallerInfo() (string, int, string) { continue } - // bypass common loggers if strings.HasSuffix(file, "/logger.go") || strings.HasSuffix(file, "/log.go") { continue @@ -155,7 +153,6 @@ func logMessage(level LogLevel, component string, message string, fields map[str event := getEvent(logger, level) - // Build combined field with component and caller if component != "" { event.Str("caller", fmt.Sprintf("%-6s %s:%d (%s)", component, callerFile, callerLine, callerFunc)) } else { @@ -168,7 +165,6 @@ func logMessage(level LogLevel, component string, message string, fields map[str event.Msg(message) - // Also log to file if enabled if fileLogger.GetLevel() != zerolog.NoLevel { fileEvent := getEvent(fileLogger, level) diff --git a/pkg/requestlog/archiver.go b/pkg/requestlog/archiver.go new file mode 100644 index 0000000000..ea598bd0de --- /dev/null +++ b/pkg/requestlog/archiver.go @@ -0,0 +1,274 @@ +package requestlog + +import ( + "archive/tar" + "compress/gzip" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/sipeed/picoclaw/pkg/logger" +) + +type Archiver struct { + config Config + logDir string + stopChan chan struct{} +} + +func NewArchiver(cfg Config, logDir string) *Archiver { + return &Archiver{ + config: cfg, + logDir: logDir, + stopChan: make(chan struct{}), + } +} + +func (a *Archiver) Start() error { + if !a.config.Enabled { + return nil + } + + interval, err := parseInterval(a.config.ArchiveInterval) + if err != nil { + logger.ErrorCF("requestlog.archiver", "Invalid archive interval", map[string]any{ + "interval": a.config.ArchiveInterval, + "error": err.Error(), + }) + return err + } + + go a.runScheduler(interval) + return nil +} + +func (a *Archiver) Stop() { + close(a.stopChan) +} + +func (a *Archiver) runScheduler(interval time.Duration) { + ticker := time.NewTicker(interval) + defer ticker.Stop() + + for { + select { + case <-a.stopChan: + return + case <-ticker.C: + if err := a.Archive(); err != nil { + logger.ErrorCF("requestlog.archiver", "Archive failed", map[string]any{ + "error": err.Error(), + }) + } + } + } +} + +func (a *Archiver) Archive() error { + return a.archiveWithCutoff(time.Now().AddDate(0, 0, -a.config.RetentionDays), false) +} + +func (a *Archiver) ArchiveAll() error { + return a.archiveWithCutoff(time.Now(), true) +} + +func (a *Archiver) archiveWithCutoff(cutoff time.Time, excludeToday bool) error { + files, err := os.ReadDir(a.logDir) + if err != nil { + return err + } + + today := time.Now().Format("2006-01-02") + archivedCount := 0 + for _, file := range files { + if file.IsDir() { + continue + } + + name := file.Name() + if !strings.HasSuffix(name, ".jsonl") { + continue + } + + if excludeToday && strings.Contains(name, today) { + continue + } + + info, err := file.Info() + if err != nil { + continue + } + + if info.ModTime().After(cutoff) { + continue + } + + if a.config.CompressArchive { + if err := a.compressFile(filepath.Join(a.logDir, name)); err != nil { + logger.ErrorCF("requestlog.archiver", "Failed to compress file", map[string]any{ + "file": name, + "error": err.Error(), + }) + } else { + archivedCount++ + } + } else { + archivedCount++ + } + } + + logger.InfoCF("requestlog.archiver", "Archive completed", map[string]any{ + "files_archived": archivedCount, + }) + + return a.cleanupOldFiles() +} + +func (a *Archiver) compressFile(srcPath string) error { + dstPath := srcPath + ".tar.gz" + + if _, err := os.Stat(dstPath); err == nil { + return nil + } + + srcFile, err := os.Open(srcPath) + if err != nil { + return err + } + defer srcFile.Close() + + dstFile, err := os.Create(dstPath) + if err != nil { + return err + } + defer dstFile.Close() + + gzWriter := gzip.NewWriter(dstFile) + defer gzWriter.Close() + + tarWriter := tar.NewWriter(gzWriter) + defer tarWriter.Close() + + info, err := srcFile.Stat() + if err != nil { + return err + } + + header, err := tar.FileInfoHeader(info, "") + if err != nil { + return err + } + + if err := tarWriter.WriteHeader(header); err != nil { + return err + } + + if _, err := io.Copy(tarWriter, srcFile); err != nil { + return err + } + + if err := os.Remove(srcPath); err != nil { + logger.WarnCF("requestlog.archiver", "Failed to remove original file after compression", map[string]any{ + "file": srcPath, + "error": err.Error(), + }) + } + + logger.InfoCF("requestlog.archiver", "File compressed", map[string]any{ + "source": srcPath, + "compressed": dstPath, + }) + return nil +} + +func (a *Archiver) cleanupOldFiles() error { + files, err := os.ReadDir(a.logDir) + if err != nil { + return err + } + + type fileInfo struct { + name string + modTime time.Time + size int64 + } + + var fileInfos []fileInfo + for _, file := range files { + info, err := file.Info() + if err != nil { + continue + } + fileInfos = append(fileInfos, fileInfo{ + name: file.Name(), + modTime: info.ModTime(), + size: info.Size(), + }) + } + + sort.Slice(fileInfos, func(i, j int) bool { + return fileInfos[i].modTime.Before(fileInfos[j].modTime) + }) + + totalSize := int64(0) + for _, fi := range fileInfos { + totalSize += fi.size + } + + maxSize := int64(a.config.MaxFiles) * int64(a.config.MaxFileSizeMB) * 1024 * 1024 + + for totalSize > maxSize && len(fileInfos) > 0 { + oldest := fileInfos[0] + fileInfos = fileInfos[1:] + + path := filepath.Join(a.logDir, oldest.name) + if err := os.Remove(path); err != nil { + logger.WarnCF("requestlog.archiver", "Failed to remove old file", map[string]any{ + "file": oldest.name, + "error": err.Error(), + }) + continue + } + + totalSize -= oldest.size + logger.InfoCF("requestlog.archiver", "Old file removed", map[string]any{ + "file": oldest.name, + }) + } + + return nil +} + +func parseInterval(intervalStr string) (time.Duration, error) { + intervalStr = strings.TrimSpace(intervalStr) + if intervalStr == "" { + return 24 * time.Hour, nil + } + + d, err := time.ParseDuration(intervalStr) + if err == nil { + return d, nil + } + + var num int + var unit string + _, err = fmt.Sscanf(intervalStr, "%d%s", &num, &unit) + if err != nil { + return 0, err + } + + switch unit { + case "h": + return time.Duration(num) * time.Hour, nil + case "d", "day", "days": + return time.Duration(num) * 24 * time.Hour, nil + case "m", "min", "mins": + return time.Duration(num) * time.Minute, nil + default: + return 0, fmt.Errorf("unknown time unit: %s", unit) + } +} diff --git a/pkg/requestlog/archiver_test.go b/pkg/requestlog/archiver_test.go new file mode 100644 index 0000000000..17d1edae41 --- /dev/null +++ b/pkg/requestlog/archiver_test.go @@ -0,0 +1,233 @@ +package requestlog + +import ( + "archive/tar" + "compress/gzip" + "os" + "path/filepath" + "testing" + "time" +) + +func TestParseInterval(t *testing.T) { + tests := []struct { + name string + input string + expected time.Duration + hasError bool + }{ + {"hours format", "24h", 24 * time.Hour, false}, + {"single hour", "1h", 1 * time.Hour, false}, + {"minutes format", "30m", 30 * time.Minute, false}, + {"day format", "1day", 24 * time.Hour, false}, + {"days format", "2days", 48 * time.Hour, false}, + {"empty string", "", 24 * time.Hour, false}, + {"invalid", "invalid", 0, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseInterval(tt.input) + if tt.hasError { + if err == nil { + t.Error("expected error, got nil") + } + return + } + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if got != tt.expected { + t.Errorf("expected %v, got %v", tt.expected, got) + } + }) + } +} + +func TestArchiver_Archive(t *testing.T) { + tmpDir := t.TempDir() + + oldFile := filepath.Join(tmpDir, "requests-2024-01-01.jsonl") + content := []byte(`{"timestamp":"2024-01-01T00:00:00Z","request_id":"1","channel":"test"}` + "\n") + if err := os.WriteFile(oldFile, content, 0644); err != nil { + t.Fatalf("WriteFile failed: %v", err) + } + + oldTime := time.Now().Add(-40 * 24 * time.Hour) + if err := os.Chtimes(oldFile, oldTime, oldTime); err != nil { + t.Fatalf("Chtimes failed: %v", err) + } + + cfg := Config{ + Enabled: true, + RetentionDays: 30, + CompressArchive: true, + MaxFiles: 100, + MaxFileSizeMB: 100, + } + + archiver := NewArchiver(cfg, tmpDir) + if err := archiver.Archive(); err != nil { + t.Fatalf("Archive failed: %v", err) + } + + compressedFile := oldFile + ".tar.gz" + if _, err := os.Stat(compressedFile); os.IsNotExist(err) { + t.Error("expected compressed file to exist") + } + + if _, err := os.Stat(oldFile); !os.IsNotExist(err) { + t.Error("expected original file to be removed after compression") + } +} + +func TestArchiver_ArchiveNoCompress(t *testing.T) { + tmpDir := t.TempDir() + + oldFile := filepath.Join(tmpDir, "requests-2024-01-01.jsonl") + content := []byte(`{"timestamp":"2024-01-01T00:00:00Z","request_id":"1","channel":"test"}` + "\n") + if err := os.WriteFile(oldFile, content, 0644); err != nil { + t.Fatalf("WriteFile failed: %v", err) + } + + oldTime := time.Now().Add(-40 * 24 * time.Hour) + if err := os.Chtimes(oldFile, oldTime, oldTime); err != nil { + t.Fatalf("Chtimes failed: %v", err) + } + + cfg := Config{ + Enabled: true, + RetentionDays: 30, + CompressArchive: false, + MaxFiles: 100, + MaxFileSizeMB: 100, + } + + archiver := NewArchiver(cfg, tmpDir) + if err := archiver.Archive(); err != nil { + t.Fatalf("Archive failed: %v", err) + } + + compressedFile := oldFile + ".tar.gz" + if _, err := os.Stat(compressedFile); !os.IsNotExist(err) { + t.Error("expected compressed file NOT to exist when CompressArchive is false") + } +} + +func TestArchiver_CleanupOldFiles(t *testing.T) { + tmpDir := t.TempDir() + + for i := range 5 { + filename := filepath.Join(tmpDir, "requests-2024-01-"+padInt(i)+".jsonl") + content := make([]byte, 1024*1024) + if err := os.WriteFile(filename, content, 0644); err != nil { + t.Fatalf("WriteFile failed: %v", err) + } + + oldTime := time.Now().Add(-time.Duration(i+1) * 24 * time.Hour) + if err := os.Chtimes(filename, oldTime, oldTime); err != nil { + t.Fatalf("Chtimes failed: %v", err) + } + } + + cfg := Config{ + Enabled: true, + RetentionDays: 30, + CompressArchive: false, + MaxFiles: 2, + MaxFileSizeMB: 1, + } + + archiver := NewArchiver(cfg, tmpDir) + if err := archiver.cleanupOldFiles(); err != nil { + t.Fatalf("cleanupOldFiles failed: %v", err) + } + + files, err := os.ReadDir(tmpDir) + if err != nil { + t.Fatalf("ReadDir failed: %v", err) + } + + if len(files) > 2 { + t.Errorf("expected at most 2 files after cleanup, got %d", len(files)) + } +} + +func TestArchiver_CompressFile(t *testing.T) { + tmpDir := t.TempDir() + + srcFile := filepath.Join(tmpDir, "test.jsonl") + content := []byte(`{"test":"data"}` + "\n") + if err := os.WriteFile(srcFile, content, 0644); err != nil { + t.Fatalf("WriteFile failed: %v", err) + } + + cfg := Config{CompressArchive: true} + archiver := NewArchiver(cfg, tmpDir) + + if err := archiver.compressFile(srcFile); err != nil { + t.Fatalf("compressFile failed: %v", err) + } + + dstFile := srcFile + ".tar.gz" + f, err := os.Open(dstFile) + if err != nil { + t.Fatalf("Open compressed file failed: %v", err) + } + defer f.Close() + + gzReader, err := gzip.NewReader(f) + if err != nil { + t.Fatalf("gzip.NewReader failed: %v", err) + } + defer gzReader.Close() + + tarReader := tar.NewReader(gzReader) + header, err := tarReader.Next() + if err != nil { + t.Fatalf("tarReader.Next failed: %v", err) + } + + if header.Name != "test.jsonl" { + t.Errorf("expected header name 'test.jsonl', got %q", header.Name) + } +} + +func TestArchiver_ArchiveRecentFiles(t *testing.T) { + tmpDir := t.TempDir() + + recentFile := filepath.Join(tmpDir, "requests-"+time.Now().Format("2006-01-02")+".jsonl") + content := []byte(`{"timestamp":"2024-01-01T00:00:00Z","request_id":"1","channel":"test"}` + "\n") + if err := os.WriteFile(recentFile, content, 0644); err != nil { + t.Fatalf("WriteFile failed: %v", err) + } + + cfg := Config{ + Enabled: true, + RetentionDays: 30, + CompressArchive: true, + MaxFiles: 100, + MaxFileSizeMB: 100, + } + + archiver := NewArchiver(cfg, tmpDir) + if err := archiver.Archive(); err != nil { + t.Fatalf("Archive failed: %v", err) + } + + compressedFile := recentFile + ".tar.gz" + if _, err := os.Stat(compressedFile); !os.IsNotExist(err) { + t.Error("expected recent file NOT to be compressed") + } + + if _, err := os.Stat(recentFile); os.IsNotExist(err) { + t.Error("expected recent file to still exist") + } +} + +func padInt(i int) string { + if i < 10 { + return "0" + string(rune('0'+i)) + } + return string(rune('0'+i/10)) + string(rune('0'+i%10)) +} diff --git a/pkg/requestlog/logger.go b/pkg/requestlog/logger.go new file mode 100644 index 0000000000..7657b3f28e --- /dev/null +++ b/pkg/requestlog/logger.go @@ -0,0 +1,601 @@ +package requestlog + +import ( + "archive/tar" + "compress/gzip" + "context" + "encoding/json" + "io" + "os" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + "github.com/google/uuid" + "github.com/sipeed/picoclaw/pkg/bus" + "github.com/sipeed/picoclaw/pkg/logger" +) + +type Config struct { + Enabled bool `json:"enabled"` + LogDir string `json:"log_dir"` + MaxFileSizeMB int `json:"max_file_size_mb"` + MaxFiles int `json:"max_files"` + RetentionDays int `json:"retention_days"` + ArchiveInterval string `json:"archive_interval"` + CompressArchive bool `json:"compress_archive"` + LogContentMaxLength int `json:"log_content_max_length"` + RecordMedia bool `json:"record_media"` +} + +func DefaultConfig() Config { + return Config{ + Enabled: true, + LogDir: "logs/requests", + MaxFileSizeMB: 100, + MaxFiles: 100, + RetentionDays: 30, + ArchiveInterval: "24h", + CompressArchive: true, + LogContentMaxLength: 1000, + RecordMedia: false, + } +} + +type RequestRecord struct { + Timestamp time.Time `json:"timestamp"` + RequestID string `json:"request_id"` + Channel string `json:"channel"` + SenderID string `json:"sender_id"` + SenderInfo bus.SenderInfo `json:"sender_info"` + ChatID string `json:"chat_id"` + Content string `json:"content"` + ContentLength int `json:"content_length"` + Peer bus.Peer `json:"peer"` + MessageID string `json:"message_id"` + MediaCount int `json:"media_count"` + SessionKey string `json:"session_key"` + ProcessingTime int64 `json:"processing_time_ms"` + Response string `json:"response,omitempty"` + ResponseLength int `json:"response_length,omitempty"` +} + +type Logger struct { + config Config + msgBus *bus.MessageBus + storage *Storage + mu sync.Mutex + running bool + ctx context.Context + cancel context.CancelFunc + closed chan struct{} +} + +func (l *Logger) LogDir() string { + return l.storage.logDir +} + +func NewLogger(cfg Config, msgBus *bus.MessageBus, workspacePath string) *Logger { + if cfg.LogDir == "" { + cfg.LogDir = "logs/requests" + } + logDir := filepath.Join(workspacePath, cfg.LogDir) + + ctx, cancel := context.WithCancel(context.Background()) + + return &Logger{ + config: cfg, + msgBus: msgBus, + storage: NewStorage(logDir, cfg.MaxFileSizeMB), + running: false, + ctx: ctx, + cancel: cancel, + closed: make(chan struct{}), + } +} + +type Reader struct { + config Config + storage *Storage +} + +func NewReader(logDir string, maxFileSizeMB int) *Reader { + return &Reader{ + config: DefaultConfig(), + storage: NewStorage(logDir, maxFileSizeMB), + } +} + +func (r *Reader) Query(opts QueryOptions) ([]RequestRecord, error) { + return r.storage.Query(opts) +} + +func (r *Reader) GetStats(startTime, endTime time.Time) (map[string]any, error) { + opts := QueryOptions{ + StartTime: startTime, + EndTime: endTime, + Limit: 10000, + } + + records, err := r.storage.Query(opts) + if err != nil { + return nil, err + } + + byChannel := make(map[string]int) + byDay := make(map[string]int) + topSenders := make(map[string]int) + + for _, rec := range records { + byChannel[rec.Channel]++ + + day := rec.Timestamp.Format("2006-01-02") + byDay[day]++ + + senderKey := rec.SenderID + ":" + rec.Channel + topSenders[senderKey]++ + } + + result := map[string]any{ + "total": len(records), + "by_channel": byChannel, + "by_day": byDay, + } + + type senderStat struct { + Sender string `json:"sender"` + Channel string `json:"channel"` + Count int `json:"count"` + } + + var topList []senderStat + for k, v := range topSenders { + parts := strings.SplitN(k, ":", 2) + if len(parts) == 2 { + topList = append(topList, senderStat{ + Sender: parts[0], + Channel: parts[1], + Count: v, + }) + } + } + + sort.Slice(topList, func(i, j int) bool { + return topList[i].Count > topList[j].Count + }) + + if len(topList) > 10 { + topList = topList[:10] + } + result["top_senders"] = topList + + return result, nil +} + +func (l *Logger) GetConfig() Config { + return l.config +} + +func (l *Logger) UpdateConfig(cfg Config) error { + l.mu.Lock() + defer l.mu.Unlock() + l.config = cfg + return nil +} + +func (l *Logger) Start() error { + if l.config.Enabled { + l.mu.Lock() + l.running = true + l.mu.Unlock() + + if err := l.storage.Init(); err != nil { + logger.ErrorCF("requestlog", "Failed to initialize storage", map[string]any{ + "error": err.Error(), + }) + return err + } + + go l.consumeLoop() + + logger.InfoCF("requestlog", "Request logger started", map[string]any{ + "log_dir": l.storage.logDir, + }) + } + return nil +} + +func (l *Logger) Stop() error { + l.mu.Lock() + if !l.running { + l.mu.Unlock() + return nil + } + l.running = false + l.mu.Unlock() + + l.cancel() + + <-l.closed + + if err := l.storage.Close(); err != nil { + logger.ErrorCF("requestlog", "Failed to close storage", map[string]any{ + "error": err.Error(), + }) + return err + } + + logger.InfoCF("requestlog", "Request logger stopped", nil) + return nil +} + +func (l *Logger) consumeLoop() { + defer close(l.closed) + + inboundCh, unsubInbound := l.msgBus.SubscribeInbound(l.ctx) + defer unsubInbound() + + for { + select { + case <-l.ctx.Done(): + return + case msg, ok := <-inboundCh: + if !ok { + return + } + l.recordInbound(msg) + } + } +} + +func (l *Logger) recordInbound(msg bus.InboundMessage) { + content := msg.Content + if l.config.LogContentMaxLength > 0 && len(content) > l.config.LogContentMaxLength { + content = content[:l.config.LogContentMaxLength] + } + + record := RequestRecord{ + Timestamp: time.Now(), + RequestID: uuid.New().String(), + Channel: msg.Channel, + SenderID: msg.SenderID, + SenderInfo: msg.Sender, + ChatID: msg.ChatID, + Content: content, + ContentLength: len(msg.Content), + Peer: msg.Peer, + MessageID: msg.MessageID, + MediaCount: len(msg.Media), + SessionKey: msg.SessionKey, + } + + l.writeRecord(record) +} + +func (l *Logger) writeRecord(record RequestRecord) { + data, err := json.Marshal(record) + if err != nil { + logger.ErrorCF("requestlog", "Failed to marshal record", map[string]any{ + "error": err.Error(), + }) + return + } + + if err := l.storage.Write(data); err != nil { + logger.ErrorCF("requestlog", "Failed to write record", map[string]any{ + "error": err.Error(), + }) + } +} + +type Storage struct { + logDir string + maxFileSize int64 + currentFile *os.File + currentSize int64 + mu sync.Mutex +} + +func NewStorage(logDir string, maxFileSizeMB int) *Storage { + return &Storage{ + logDir: logDir, + maxFileSize: int64(maxFileSizeMB) * 1024 * 1024, + } +} + +func (s *Storage) Init() error { + if err := os.MkdirAll(s.logDir, 0755); err != nil { + return err + } + return s.rotateFile() +} + +func (s *Storage) Write(data []byte) error { + s.mu.Lock() + defer s.mu.Unlock() + + if s.currentFile == nil { + if err := s.rotateFile(); err != nil { + return err + } + } + + if s.currentSize >= s.maxFileSize { + if err := s.rotateFile(); err != nil { + return err + } + } + + n, err := s.currentFile.Write(append(data, '\n')) + if err != nil { + return err + } + + s.currentSize += int64(n) + return nil +} + +func (s *Storage) rotateFile() error { + if s.currentFile != nil { + s.currentFile.Close() + s.currentFile = nil + } + + dateStr := time.Now().Format("2006-01-02") + filename := filepath.Join(s.logDir, "requests-"+dateStr+".jsonl") + + f, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return err + } + + stat, err := f.Stat() + if err != nil { + f.Close() + return err + } + + s.currentFile = f + s.currentSize = stat.Size() + return nil +} + +func (s *Storage) Close() error { + s.mu.Lock() + defer s.mu.Unlock() + if s.currentFile != nil { + return s.currentFile.Close() + } + return nil +} + +func fileExists(path string) (bool, error) { + _, err := os.Stat(path) + if err == nil { + return true, nil + } + if os.IsNotExist(err) { + return false, nil + } + return false, err +} + +type QueryOptions struct { + StartTime time.Time + EndTime time.Time + Channel string + SenderID string + Limit int + Offset int +} + +func (l *Logger) Query(opts QueryOptions) ([]RequestRecord, error) { + return l.storage.Query(opts) +} + +func (s *Storage) Query(opts QueryOptions) ([]RequestRecord, error) { + files, err := os.ReadDir(s.logDir) + if err != nil { + return nil, err + } + + var results []RequestRecord + offset := opts.Offset + limit := opts.Limit + if limit <= 0 { + limit = 100 + } + + for _, file := range files { + if file.IsDir() { + continue + } + + name := file.Name() + if !strings.HasSuffix(name, ".jsonl") && !strings.HasSuffix(name, ".tar.gz") { + continue + } + + records, err := s.readFile(filepath.Join(s.logDir, name), opts) + if err != nil { + continue + } + + for _, record := range records { + if offset > 0 { + offset-- + continue + } + if len(results) >= limit { + break + } + results = append(results, record) + } + + if len(results) >= limit { + break + } + } + + return results, nil +} + +func (s *Storage) readFile(path string, opts QueryOptions) ([]RequestRecord, error) { + if strings.HasSuffix(path, ".tar.gz") { + return s.readTarGz(path, opts) + } + + f, err := os.Open(path) + if err != nil { + return nil, err + } + defer f.Close() + + var results []RequestRecord + + dec := json.NewDecoder(f) + for dec.More() { + var record RequestRecord + if err := dec.Decode(&record); err != nil { + continue + } + + if !opts.StartTime.IsZero() && record.Timestamp.Before(opts.StartTime) { + continue + } + if !opts.EndTime.IsZero() && record.Timestamp.After(opts.EndTime) { + continue + } + if opts.Channel != "" && record.Channel != opts.Channel { + continue + } + if opts.SenderID != "" && record.SenderID != opts.SenderID { + continue + } + + results = append(results, record) + } + + return results, nil +} + +func (s *Storage) readTarGz(path string, opts QueryOptions) ([]RequestRecord, error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + defer f.Close() + + gzReader, err := gzip.NewReader(f) + if err != nil { + return nil, err + } + defer gzReader.Close() + + tarReader := tar.NewReader(gzReader) + + var results []RequestRecord + + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } + if err != nil { + continue + } + + if header.Typeflag != tar.TypeReg { + continue + } + + dec := json.NewDecoder(tarReader) + for dec.More() { + var record RequestRecord + if err := dec.Decode(&record); err != nil { + continue + } + + if !opts.StartTime.IsZero() && record.Timestamp.Before(opts.StartTime) { + continue + } + if !opts.EndTime.IsZero() && record.Timestamp.After(opts.EndTime) { + continue + } + if opts.Channel != "" && record.Channel != opts.Channel { + continue + } + if opts.SenderID != "" && record.SenderID != opts.SenderID { + continue + } + + results = append(results, record) + } + } + + return results, nil +} + +func (l *Logger) GetStats(startTime, endTime time.Time) (map[string]any, error) { + opts := QueryOptions{ + StartTime: startTime, + EndTime: endTime, + Limit: 10000, + } + + records, err := l.storage.Query(opts) + if err != nil { + return nil, err + } + + byChannel := make(map[string]int) + byDay := make(map[string]int) + topSenders := make(map[string]int) + + for _, r := range records { + byChannel[r.Channel]++ + + day := r.Timestamp.Format("2006-01-02") + byDay[day]++ + + senderKey := r.SenderID + ":" + r.Channel + topSenders[senderKey]++ + } + + result := map[string]any{ + "total": len(records), + "by_channel": byChannel, + "by_day": byDay, + } + + type senderStat struct { + Sender string `json:"sender"` + Channel string `json:"channel"` + Count int `json:"count"` + } + + var topList []senderStat + for k, v := range topSenders { + parts := strings.SplitN(k, ":", 2) + if len(parts) == 2 { + topList = append(topList, senderStat{ + Sender: parts[0], + Channel: parts[1], + Count: v, + }) + } + } + + sort.Slice(topList, func(i, j int) bool { + return topList[i].Count > topList[j].Count + }) + + if len(topList) > 10 { + topList = topList[:10] + } + result["top_senders"] = topList + + return result, nil +} diff --git a/pkg/requestlog/logger_test.go b/pkg/requestlog/logger_test.go new file mode 100644 index 0000000000..d514ca44f0 --- /dev/null +++ b/pkg/requestlog/logger_test.go @@ -0,0 +1,288 @@ +package requestlog + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" + "time" +) + +func TestDefaultConfig(t *testing.T) { + cfg := DefaultConfig() + + if !cfg.Enabled { + t.Error("expected Enabled to be true by default") + } + if cfg.LogDir != "logs/requests" { + t.Errorf("expected LogDir 'logs/requests', got %q", cfg.LogDir) + } + if cfg.MaxFileSizeMB != 100 { + t.Errorf("expected MaxFileSizeMB 100, got %d", cfg.MaxFileSizeMB) + } + if cfg.RetentionDays != 30 { + t.Errorf("expected RetentionDays 30, got %d", cfg.RetentionDays) + } +} + +func TestStorage_WriteAndQuery(t *testing.T) { + tmpDir := t.TempDir() + storage := NewStorage(tmpDir, 1) + + if err := storage.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + defer storage.Close() + + record := RequestRecord{ + Timestamp: time.Now().UTC(), + RequestID: "test-123", + Channel: "telegram", + SenderID: "user1", + ChatID: "chat1", + Content: "hello world", + } + + data, err := json.Marshal(record) + if err != nil { + t.Fatalf("json.Marshal failed: %v", err) + } + + if err := storage.Write(data); err != nil { + t.Fatalf("Write failed: %v", err) + } + + records, err := storage.Query(QueryOptions{Limit: 10}) + if err != nil { + t.Fatalf("Query failed: %v", err) + } + + if len(records) != 1 { + t.Fatalf("expected 1 record, got %d", len(records)) + } + + if records[0].RequestID != "test-123" { + t.Errorf("expected RequestID 'test-123', got %q", records[0].RequestID) + } + if records[0].Channel != "telegram" { + t.Errorf("expected Channel 'telegram', got %q", records[0].Channel) + } +} + +func TestStorage_QueryWithFilter(t *testing.T) { + tmpDir := t.TempDir() + storage := NewStorage(tmpDir, 1) + + if err := storage.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + defer storage.Close() + + now := time.Now().UTC() + records := []RequestRecord{ + {Timestamp: now.Add(-2 * time.Hour), RequestID: "1", Channel: "telegram", SenderID: "user1"}, + {Timestamp: now.Add(-1 * time.Hour), RequestID: "2", Channel: "discord", SenderID: "user2"}, + {Timestamp: now, RequestID: "3", Channel: "telegram", SenderID: "user1"}, + } + + for _, r := range records { + data, _ := json.Marshal(r) + if err := storage.Write(data); err != nil { + t.Fatalf("Write failed: %v", err) + } + } + + tests := []struct { + name string + opts QueryOptions + expected int + }{ + { + name: "filter by channel", + opts: QueryOptions{Channel: "telegram", Limit: 10}, + expected: 2, + }, + { + name: "filter by sender", + opts: QueryOptions{SenderID: "user2", Limit: 10}, + expected: 1, + }, + { + name: "filter by time range", + opts: QueryOptions{StartTime: now.Add(-90 * time.Minute), EndTime: now.Add(10 * time.Minute), Limit: 10}, + expected: 2, + }, + { + name: "no filter", + opts: QueryOptions{Limit: 10}, + expected: 3, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + results, err := storage.Query(tt.opts) + if err != nil { + t.Fatalf("Query failed: %v", err) + } + if len(results) != tt.expected { + t.Errorf("expected %d records, got %d", tt.expected, len(results)) + } + }) + } +} + +func TestStorage_QueryWithOffset(t *testing.T) { + tmpDir := t.TempDir() + storage := NewStorage(tmpDir, 1) + + if err := storage.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + defer storage.Close() + + for i := range 5 { + record := RequestRecord{ + Timestamp: time.Now().UTC(), + RequestID: string(rune('a' + i)), + Channel: "test", + } + data, _ := json.Marshal(record) + if err := storage.Write(data); err != nil { + t.Fatalf("Write failed: %v", err) + } + } + + results, err := storage.Query(QueryOptions{Offset: 2, Limit: 2}) + if err != nil { + t.Fatalf("Query failed: %v", err) + } + + if len(results) != 2 { + t.Fatalf("expected 2 records, got %d", len(results)) + } +} + +func TestStorage_RotateFile(t *testing.T) { + tmpDir := t.TempDir() + storage := NewStorage(tmpDir, 1) + + if err := storage.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + + storage.mu.Lock() + storage.currentSize = storage.maxFileSize + 1 + storage.mu.Unlock() + + record := RequestRecord{Timestamp: time.Now().UTC(), RequestID: "after-rotate"} + data, _ := json.Marshal(record) + + if err := storage.Write(data); err != nil { + t.Fatalf("Write after rotate failed: %v", err) + } + + storage.Close() + + files, err := os.ReadDir(tmpDir) + if err != nil { + t.Fatalf("ReadDir failed: %v", err) + } + + jsonlCount := 0 + for _, f := range files { + if filepath.Ext(f.Name()) == ".jsonl" { + jsonlCount++ + } + } + + if jsonlCount < 1 { + t.Errorf("expected at least 1 jsonl file after rotation") + } +} + +func TestReader_GetStats(t *testing.T) { + tmpDir := t.TempDir() + storage := NewStorage(tmpDir, 1) + + if err := storage.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + defer storage.Close() + + now := time.Now().UTC() + day1 := now.Format("2006-01-02") + day2 := now.Add(-24 * time.Hour).Format("2006-01-02") + + records := []RequestRecord{ + {Timestamp: now, Channel: "telegram", SenderID: "user1"}, + {Timestamp: now, Channel: "telegram", SenderID: "user2"}, + {Timestamp: now.Add(-24 * time.Hour), Channel: "discord", SenderID: "user1"}, + {Timestamp: now.Add(-24 * time.Hour), Channel: "discord", SenderID: "user3"}, + {Timestamp: now.Add(-24 * time.Hour), Channel: "telegram", SenderID: "user1"}, + } + + for _, r := range records { + data, _ := json.Marshal(r) + if err := storage.Write(data); err != nil { + t.Fatalf("Write failed: %v", err) + } + } + + reader := NewReader(tmpDir, 1) + stats, err := reader.GetStats(time.Now().Add(-48*time.Hour), time.Now().Add(time.Hour)) + if err != nil { + t.Fatalf("GetStats failed: %v", err) + } + + if stats["total"] != 5 { + t.Errorf("expected total 5, got %v", stats["total"]) + } + + byChannel, ok := stats["by_channel"].(map[string]int) + if !ok { + t.Fatal("by_channel is not map[string]int") + } + if byChannel["telegram"] != 3 { + t.Errorf("expected telegram count 3, got %d", byChannel["telegram"]) + } + if byChannel["discord"] != 2 { + t.Errorf("expected discord count 2, got %d", byChannel["discord"]) + } + + byDay, ok := stats["by_day"].(map[string]int) + if !ok { + t.Fatal("by_day is not map[string]int") + } + if byDay[day1] != 2 { + t.Errorf("expected day1 count 2, got %d", byDay[day1]) + } + if byDay[day2] != 3 { + t.Errorf("expected day2 count 3, got %d", byDay[day2]) + } + + topSendersRaw := stats["top_senders"] + if topSendersRaw == nil { + t.Fatal("top_senders is nil") + } + + topSendersData, err := json.Marshal(topSendersRaw) + if err != nil { + t.Fatalf("Marshal top_senders failed: %v", err) + } + + var topSenders []senderStat + if err := json.Unmarshal(topSendersData, &topSenders); err != nil { + t.Fatalf("Unmarshal top_senders failed: %v", err) + } + + if len(topSenders) == 0 { + t.Error("expected top_senders to have entries") + } +} + +type senderStat struct { + Sender string `json:"sender"` + Channel string `json:"channel"` + Count int `json:"count"` +} diff --git a/web/backend/api/requestlog.go b/web/backend/api/requestlog.go new file mode 100644 index 0000000000..9a0d37f106 --- /dev/null +++ b/web/backend/api/requestlog.go @@ -0,0 +1,330 @@ +package api + +import ( + "encoding/csv" + "encoding/json" + "fmt" + "net/http" + "path/filepath" + "strconv" + "time" + + "github.com/sipeed/picoclaw/pkg/config" + "github.com/sipeed/picoclaw/pkg/requestlog" +) + +func (h *Handler) registerRequestLogRoutes(mux *http.ServeMux) { + mux.HandleFunc("GET /api/logs/requests", h.handleGetRequestLogs) + mux.HandleFunc("GET /api/stats/requests", h.handleGetRequestStats) + mux.HandleFunc("GET /api/logs/requests/export", h.handleExportRequestLogs) + mux.HandleFunc("POST /api/logs/requests/archive-now", h.handleArchiveNow) + mux.HandleFunc("GET /api/config/requestlog", h.handleGetRequestLogConfig) + mux.HandleFunc("PUT /api/config/requestlog", h.handlePutRequestLogConfig) +} + +func (h *Handler) loadRequestLogConfig() requestlog.Config { + cfg, err := config.LoadConfig(h.configPath) + if err != nil { + return requestlog.DefaultConfig() + } + return h.toRequestLogConfig(cfg.RequestLog) +} + +func (h *Handler) saveRequestLogConfig(rlCfg requestlog.Config) error { + cfg, err := config.LoadConfig(h.configPath) + if err != nil { + return err + } + cfg.RequestLog = h.fromRequestLogConfig(rlCfg) + return config.SaveConfig(h.configPath, cfg) +} + +func (h *Handler) toRequestLogConfig(cfg config.RequestLogConfig) requestlog.Config { + if !cfg.Enabled && cfg.MaxFileSizeMB == 0 { + return requestlog.DefaultConfig() + } + return requestlog.Config{ + Enabled: cfg.Enabled, + LogDir: cfg.LogDir, + MaxFileSizeMB: cfg.MaxFileSizeMB, + MaxFiles: cfg.MaxFiles, + RetentionDays: cfg.RetentionDays, + ArchiveInterval: cfg.ArchiveInterval, + CompressArchive: cfg.CompressArchive, + LogContentMaxLength: cfg.LogContentMaxLength, + RecordMedia: cfg.RecordMedia, + } +} + +func (h *Handler) fromRequestLogConfig(cfg requestlog.Config) config.RequestLogConfig { + return config.RequestLogConfig{ + Enabled: cfg.Enabled, + LogDir: cfg.LogDir, + MaxFileSizeMB: cfg.MaxFileSizeMB, + MaxFiles: cfg.MaxFiles, + RetentionDays: cfg.RetentionDays, + ArchiveInterval: cfg.ArchiveInterval, + CompressArchive: cfg.CompressArchive, + LogContentMaxLength: cfg.LogContentMaxLength, + RecordMedia: cfg.RecordMedia, + } +} + +func (h *Handler) handleGetRequestLogs(w http.ResponseWriter, r *http.Request) { + if h.requestLogReader == nil { + http.Error(w, "request log not available", http.StatusServiceUnavailable) + return + } + + startStr := r.URL.Query().Get("start") + endStr := r.URL.Query().Get("end") + channel := r.URL.Query().Get("channel") + senderID := r.URL.Query().Get("sender_id") + limitStr := r.URL.Query().Get("limit") + offsetStr := r.URL.Query().Get("offset") + + var startTime, endTime time.Time + var err error + + if startStr != "" { + startTime, err = time.Parse(time.RFC3339, startStr) + if err != nil { + http.Error(w, "invalid start time format", http.StatusBadRequest) + return + } + } + + if endStr != "" { + endTime, err = time.Parse(time.RFC3339, endStr) + if err != nil { + http.Error(w, "invalid end time format", http.StatusBadRequest) + return + } + } + + limit := 100 + if limitStr != "" { + limit, _ = strconv.Atoi(limitStr) + } + + offset := 0 + if offsetStr != "" { + offset, _ = strconv.Atoi(offsetStr) + } + + opts := requestlog.QueryOptions{ + StartTime: startTime, + EndTime: endTime, + Channel: channel, + SenderID: senderID, + Limit: limit, + Offset: offset, + } + + records, err := h.requestLogReader.Query(opts) + if err != nil { + http.Error(w, "failed to query logs: "+err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]any{ + "records": records, + "limit": limit, + "offset": offset, + }) +} + +func (h *Handler) handleGetRequestStats(w http.ResponseWriter, r *http.Request) { + if h.requestLogReader == nil { + http.Error(w, "request log not available", http.StatusServiceUnavailable) + return + } + + startStr := r.URL.Query().Get("start") + endStr := r.URL.Query().Get("end") + + var startTime, endTime time.Time + var err error + + now := time.Now() + if startStr != "" { + startTime, err = time.Parse(time.RFC3339, startStr) + if err != nil { + http.Error(w, "invalid start time format", http.StatusBadRequest) + return + } + } else { + startTime = now.AddDate(0, 0, -30) + } + + if endStr != "" { + endTime, err = time.Parse(time.RFC3339, endStr) + if err != nil { + http.Error(w, "invalid end time format", http.StatusBadRequest) + return + } + } else { + endTime = now + } + + stats, err := h.requestLogReader.GetStats(startTime, endTime) + if err != nil { + http.Error(w, "failed to get stats: "+err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(stats) +} + +func (h *Handler) handleExportRequestLogs(w http.ResponseWriter, r *http.Request) { + if h.requestLogReader == nil { + http.Error(w, "request log not available", http.StatusServiceUnavailable) + return + } + + startStr := r.URL.Query().Get("start") + endStr := r.URL.Query().Get("end") + channel := r.URL.Query().Get("channel") + senderID := r.URL.Query().Get("sender_id") + format := r.URL.Query().Get("format") + if format == "" { + format = "json" + } + + var startTime, endTime time.Time + var err error + + if startStr != "" { + startTime, err = time.Parse(time.RFC3339, startStr) + if err != nil { + http.Error(w, "invalid start time format", http.StatusBadRequest) + return + } + } + + if endStr != "" { + endTime, err = time.Parse(time.RFC3339, endStr) + if err != nil { + http.Error(w, "invalid end time format", http.StatusBadRequest) + return + } + } + + opts := requestlog.QueryOptions{ + StartTime: startTime, + EndTime: endTime, + Channel: channel, + SenderID: senderID, + Limit: 10000, + } + + records, err := h.requestLogReader.Query(opts) + if err != nil { + http.Error(w, "failed to query logs: "+err.Error(), http.StatusInternalServerError) + return + } + + timestamp := time.Now().Format("2006-01-02-150405") + filename := fmt.Sprintf("request-logs-%s", timestamp) + + switch format { + case "csv": + w.Header().Set("Content-Type", "text/csv") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.csv", filename)) + h.exportCSV(w, records) + default: + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.json", filename)) + json.NewEncoder(w).Encode(records) + } +} + +func (h *Handler) exportCSV(w http.ResponseWriter, records []requestlog.RequestRecord) { + writer := csv.NewWriter(w) + defer writer.Flush() + + header := []string{"timestamp", "request_id", "channel", "sender_id", "chat_id", "content", "content_length", "message_id", "media_count", "session_key", "processing_time_ms"} + writer.Write(header) + + for _, r := range records { + row := []string{ + r.Timestamp.Format(time.RFC3339), + r.RequestID, + r.Channel, + r.SenderID, + r.ChatID, + r.Content, + strconv.Itoa(r.ContentLength), + r.MessageID, + strconv.Itoa(r.MediaCount), + r.SessionKey, + strconv.FormatInt(r.ProcessingTime, 10), + } + writer.Write(row) + } +} + +func (h *Handler) handleArchiveNow(w http.ResponseWriter, r *http.Request) { + var cfg requestlog.Config + var logDir string + + if h.requestLogger != nil { + cfg = h.requestLogger.GetConfig() + logDir = h.requestLogger.LogDir() + } else { + cfg = h.loadRequestLogConfig() + if h.workspacePath != "" { + logDir = filepath.Join(h.workspacePath, "logs", "requests") + } else { + http.Error(w, "workspace path not configured", http.StatusServiceUnavailable) + return + } + } + + archiver := requestlog.NewArchiver(cfg, logDir) + if err := archiver.ArchiveAll(); err != nil { + http.Error(w, "archive failed: "+err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(map[string]string{ + "status": "ok", + }) +} + +func (h *Handler) handleGetRequestLogConfig(w http.ResponseWriter, r *http.Request) { + var cfg requestlog.Config + if h.requestLogger != nil { + cfg = h.requestLogger.GetConfig() + } else { + cfg = h.loadRequestLogConfig() + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(cfg) +} + +func (h *Handler) handlePutRequestLogConfig(w http.ResponseWriter, r *http.Request) { + var cfg requestlog.Config + if err := json.NewDecoder(r.Body).Decode(&cfg); err != nil { + http.Error(w, "invalid JSON: "+err.Error(), http.StatusBadRequest) + return + } + + if h.requestLogger != nil { + if err := h.requestLogger.UpdateConfig(cfg); err != nil { + http.Error(w, "failed to update config: "+err.Error(), http.StatusInternalServerError) + return + } + } else { + if err := h.saveRequestLogConfig(cfg); err != nil { + http.Error(w, "failed to save config: "+err.Error(), http.StatusInternalServerError) + return + } + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(cfg) +} diff --git a/web/backend/api/requestlog_test.go b/web/backend/api/requestlog_test.go new file mode 100644 index 0000000000..ba3a05e51a --- /dev/null +++ b/web/backend/api/requestlog_test.go @@ -0,0 +1,242 @@ +package api + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/sipeed/picoclaw/pkg/requestlog" +) + +func TestHandleGetRequestLogs(t *testing.T) { + tmpDir := t.TempDir() + storage := requestlog.NewStorage(tmpDir, 1) + if err := storage.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + storage.Close() + + reader := requestlog.NewReader(tmpDir, 1) + handler := &Handler{ + requestLogReader: reader, + } + + mux := http.NewServeMux() + handler.registerRequestLogRoutes(mux) + + req := httptest.NewRequest("GET", "/api/logs/requests?limit=10", nil) + rec := httptest.NewRecorder() + mux.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Errorf("expected status 200, got %d", rec.Code) + } + + var resp map[string]any + if err := json.Unmarshal(rec.Body.Bytes(), &resp); err != nil { + t.Fatalf("json.Unmarshal failed: %v", err) + } + + if _, ok := resp["records"]; !ok { + t.Error("expected 'records' field in response") + } +} + +func TestHandleGetRequestStats(t *testing.T) { + tmpDir := t.TempDir() + reader := requestlog.NewReader(tmpDir, 1) + handler := &Handler{ + requestLogReader: reader, + } + + mux := http.NewServeMux() + handler.registerRequestLogRoutes(mux) + + req := httptest.NewRequest("GET", "/api/stats/requests", nil) + rec := httptest.NewRecorder() + mux.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Errorf("expected status 200, got %d", rec.Code) + } + + var resp map[string]any + if err := json.Unmarshal(rec.Body.Bytes(), &resp); err != nil { + t.Fatalf("json.Unmarshal failed: %v", err) + } + + if _, ok := resp["total"]; !ok { + t.Error("expected 'total' field in response") + } +} + +func TestHandleExportRequestLogs_JSON(t *testing.T) { + tmpDir := t.TempDir() + storage := requestlog.NewStorage(tmpDir, 1) + if err := storage.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + storage.Close() + + reader := requestlog.NewReader(tmpDir, 1) + handler := &Handler{ + requestLogReader: reader, + } + + mux := http.NewServeMux() + handler.registerRequestLogRoutes(mux) + + req := httptest.NewRequest("GET", "/api/logs/requests/export?format=json", nil) + rec := httptest.NewRecorder() + mux.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Errorf("expected status 200, got %d", rec.Code) + } + + contentType := rec.Header().Get("Content-Type") + if contentType != "application/json" { + t.Errorf("expected Content-Type 'application/json', got %q", contentType) + } + + contentDisposition := rec.Header().Get("Content-Disposition") + if contentDisposition == "" { + t.Error("expected Content-Disposition header") + } +} + +func TestHandleExportRequestLogs_CSV(t *testing.T) { + tmpDir := t.TempDir() + storage := requestlog.NewStorage(tmpDir, 1) + if err := storage.Init(); err != nil { + t.Fatalf("Init failed: %v", err) + } + storage.Close() + + reader := requestlog.NewReader(tmpDir, 1) + handler := &Handler{ + requestLogReader: reader, + } + + mux := http.NewServeMux() + handler.registerRequestLogRoutes(mux) + + req := httptest.NewRequest("GET", "/api/logs/requests/export?format=csv", nil) + rec := httptest.NewRecorder() + mux.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Errorf("expected status 200, got %d", rec.Code) + } + + contentType := rec.Header().Get("Content-Type") + if contentType != "text/csv" { + t.Errorf("expected Content-Type 'text/csv', got %q", contentType) + } +} + +func TestHandleGetRequestLogConfig(t *testing.T) { + tmpDir := t.TempDir() + logger := requestlog.NewLogger(requestlog.DefaultConfig(), nil, tmpDir) + + handler := &Handler{ + requestLogger: logger, + } + + mux := http.NewServeMux() + handler.registerRequestLogRoutes(mux) + + req := httptest.NewRequest("GET", "/api/config/requestlog", nil) + rec := httptest.NewRecorder() + mux.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Errorf("expected status 200, got %d", rec.Code) + } + + var cfg requestlog.Config + if err := json.Unmarshal(rec.Body.Bytes(), &cfg); err != nil { + t.Fatalf("json.Unmarshal failed: %v", err) + } + + if !cfg.Enabled { + t.Error("expected Enabled to be true") + } +} + +func TestHandlePutRequestLogConfig(t *testing.T) { + tmpDir := t.TempDir() + logger := requestlog.NewLogger(requestlog.DefaultConfig(), nil, tmpDir) + + handler := &Handler{ + requestLogger: logger, + } + + mux := http.NewServeMux() + handler.registerRequestLogRoutes(mux) + + newConfig := requestlog.Config{ + Enabled: true, + LogDir: "logs/requests", + MaxFileSizeMB: 50, + MaxFiles: 50, + RetentionDays: 7, + ArchiveInterval: "12h", + CompressArchive: false, + } + + body, _ := json.Marshal(newConfig) + req := httptest.NewRequest("PUT", "/api/config/requestlog", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + + rec := httptest.NewRecorder() + mux.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Errorf("expected status 200, got %d", rec.Code) + } +} + +func TestHandleArchiveNow(t *testing.T) { + tmpDir := t.TempDir() + logDir := filepath.Join(tmpDir, "logs", "requests") + os.MkdirAll(logDir, 0755) + + logger := requestlog.NewLogger(requestlog.DefaultConfig(), nil, tmpDir) + + handler := &Handler{ + requestLogger: logger, + } + + mux := http.NewServeMux() + handler.registerRequestLogRoutes(mux) + + req := httptest.NewRequest("POST", "/api/logs/requests/archive-now", nil) + rec := httptest.NewRecorder() + mux.ServeHTTP(rec, req) + + if rec.Code != http.StatusOK { + t.Errorf("expected status 200, got %d", rec.Code) + } +} + +func TestHandleGetRequestLogs_NotAvailable(t *testing.T) { + handler := &Handler{ + requestLogReader: nil, + } + + mux := http.NewServeMux() + handler.registerRequestLogRoutes(mux) + + req := httptest.NewRequest("GET", "/api/logs/requests", nil) + rec := httptest.NewRecorder() + mux.ServeHTTP(rec, req) + + if rec.Code != http.StatusServiceUnavailable { + t.Errorf("expected status 503, got %d", rec.Code) + } +} diff --git a/web/backend/api/router.go b/web/backend/api/router.go index 5f081dee9d..c9627a9fa2 100644 --- a/web/backend/api/router.go +++ b/web/backend/api/router.go @@ -4,16 +4,20 @@ import ( "net/http" "sync" + "github.com/sipeed/picoclaw/pkg/requestlog" "github.com/sipeed/picoclaw/web/backend/launcherconfig" ) // Handler serves HTTP API requests. type Handler struct { configPath string + workspacePath string serverPort int serverPublic bool serverPublicExplicit bool serverCIDRs []string + requestLogReader *requestlog.Reader + requestLogger *requestlog.Logger oauthMu sync.Mutex oauthFlows map[string]*oauthFlow oauthState map[string]string @@ -22,13 +26,27 @@ type Handler struct { // NewHandler creates an instance of the API handler. func NewHandler(configPath string) *Handler { return &Handler{ - configPath: configPath, - serverPort: launcherconfig.DefaultPort, - oauthFlows: make(map[string]*oauthFlow), - oauthState: make(map[string]string), + configPath: configPath, + serverPort: launcherconfig.DefaultPort, + requestLogReader: nil, + requestLogger: nil, + oauthFlows: make(map[string]*oauthFlow), + oauthState: make(map[string]string), } } +func (h *Handler) SetRequestLogReader(reader *requestlog.Reader) { + h.requestLogReader = reader +} + +func (h *Handler) SetRequestLogger(logger *requestlog.Logger) { + h.requestLogger = logger +} + +func (h *Handler) SetWorkspacePath(path string) { + h.workspacePath = path +} + // SetServerOptions stores current backend listen options for fallback behavior. func (h *Handler) SetServerOptions(port int, public bool, publicExplicit bool, allowedCIDRs []string) { h.serverPort = port @@ -69,4 +87,8 @@ func (h *Handler) RegisterRoutes(mux *http.ServeMux) { // Launcher service parameters (port/public) h.registerLauncherConfigRoutes(mux) + + if h.requestLogReader != nil { + h.registerRequestLogRoutes(mux) + } } diff --git a/web/backend/dist/.gitkeep b/web/backend/dist/.gitkeep deleted file mode 100644 index 4b533f03aa..0000000000 --- a/web/backend/dist/.gitkeep +++ /dev/null @@ -1 +0,0 @@ -# Keep the embedded web backend dist directory in version control. diff --git a/web/backend/main.go b/web/backend/main.go index 650540ea88..d061912882 100644 --- a/web/backend/main.go +++ b/web/backend/main.go @@ -22,6 +22,9 @@ import ( "strconv" "time" + "github.com/sipeed/picoclaw/pkg/config" + "github.com/sipeed/picoclaw/pkg/logger" + "github.com/sipeed/picoclaw/pkg/requestlog" "github.com/sipeed/picoclaw/web/backend/api" "github.com/sipeed/picoclaw/web/backend/launcherconfig" "github.com/sipeed/picoclaw/web/backend/middleware" @@ -114,7 +117,31 @@ func main() { // API Routes (e.g. /api/status) apiHandler := api.NewHandler(absPath) + apiHandler.SetServerOptions(portNum, effectivePublic, explicitPublic, launcherCfg.AllowedCIDRs) + + // Initialize requestlog Reader for stats API + if cfg, err := config.LoadConfig(absPath); err == nil { + workspacePath := cfg.WorkspacePath() + logDir := filepath.Join(workspacePath, "logs", "requests") + // Ensure log directory exists + if err := os.MkdirAll(logDir, 0755); err != nil { + log.Printf("Warning: failed to create request log directory: %v", err) + } + reader := requestlog.NewReader(logDir, 100) + apiHandler.SetRequestLogReader(reader) + apiHandler.SetWorkspacePath(workspacePath) + + // Enable file logging + appLogDir := filepath.Join(workspacePath, "logs") + if err := os.MkdirAll(appLogDir, 0755); err == nil { + logFile := filepath.Join(appLogDir, "launcher.log") + if err := logger.EnableFileLogging(logFile); err != nil { + log.Printf("Warning: failed to enable file logging: %v", err) + } + } + } + apiHandler.RegisterRoutes(mux) // Frontend Embedded Assets diff --git a/web/frontend/src/api/stats.ts b/web/frontend/src/api/stats.ts new file mode 100644 index 0000000000..be102dfb08 --- /dev/null +++ b/web/frontend/src/api/stats.ts @@ -0,0 +1,161 @@ +export interface RequestRecord { + timestamp: string + request_id: string + channel: string + sender_id: string + sender_info: { + platform?: string + platform_id?: string + canonical_id?: string + username?: string + display_name?: string + } + chat_id: string + content: string + content_length: number + peer: { + kind: string + id: string + } + message_id: string + media_count: number + session_key: string + processing_time_ms: number +} + +export interface RequestStats { + total: number + by_channel: Record + by_day: Record + top_senders: Array<{ + sender: string + channel: string + count: number + }> +} + +export interface RequestLogsResponse { + records: RequestRecord[] + limit: number + offset: number +} + +export interface RequestLogConfig { + enabled: boolean + log_dir: string + max_file_size_mb: number + max_files: number + retention_days: number + archive_interval: string + compress_archive: boolean + log_content_max_length: number + record_media: boolean +} + +const BASE_URL = "" + +async function request(path: string, options?: RequestInit): Promise { + const res = await fetch(`${BASE_URL}${path}`, options) + if (!res.ok) { + throw new Error(`API error: ${res.status} ${res.statusText}`) + } + return res.json() as Promise +} + +export async function getRequestStats(options?: { + start?: string + end?: string +}): Promise { + const params = new URLSearchParams() + if (options?.start) { + params.set("start", options.start) + } + if (options?.end) { + params.set("end", options.end) + } + const queryString = params.toString() ? `?${params.toString()}` : "" + return request(`/api/stats/requests${queryString}`) +} + +export async function getRequestLogs(options?: { + start?: string + end?: string + channel?: string + sender_id?: string + limit?: number + offset?: number +}): Promise { + const params = new URLSearchParams() + if (options?.start) { + params.set("start", options.start) + } + if (options?.end) { + params.set("end", options.end) + } + if (options?.channel) { + params.set("channel", options.channel) + } + if (options?.sender_id) { + params.set("sender_id", options.sender_id) + } + if (options?.limit !== undefined) { + params.set("limit", options.limit.toString()) + } + if (options?.offset !== undefined) { + params.set("offset", options.offset.toString()) + } + const queryString = params.toString() ? `?${params.toString()}` : "" + return request(`/api/logs/requests${queryString}`) +} + +export function getExportLogsUrl(options?: { + start?: string + end?: string + channel?: string + sender_id?: string + format?: "json" | "csv" +}): string { + const params = new URLSearchParams() + if (options?.start) { + params.set("start", options.start) + } + if (options?.end) { + params.set("end", options.end) + } + if (options?.channel) { + params.set("channel", options.channel) + } + if (options?.sender_id) { + params.set("sender_id", options.sender_id) + } + if (options?.format) { + params.set("format", options.format) + } + const queryString = params.toString() ? `?${params.toString()}` : "" + return `/api/logs/requests/export${queryString}` +} + +export async function getRequestLogConfig(): Promise { + return request("/api/config/requestlog") +} + +export async function updateRequestLogConfig(config: Partial): Promise { + const res = await fetch(`${BASE_URL}/api/config/requestlog`, { + method: "PUT", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(config), + }) + if (!res.ok) { + throw new Error(`API error: ${res.status} ${res.statusText}`) + } + return res.json() as Promise +} + +export async function archiveNow(): Promise { + const res = await fetch(`${BASE_URL}/api/logs/requests/archive-now`, { + method: "POST", + }) + if (!res.ok) { + throw new Error(`API error: ${res.status} ${res.statusText}`) + } +} diff --git a/web/frontend/src/components/app-sidebar.tsx b/web/frontend/src/components/app-sidebar.tsx index 7022128577..41bf92b054 100644 --- a/web/frontend/src/components/app-sidebar.tsx +++ b/web/frontend/src/components/app-sidebar.tsx @@ -1,6 +1,7 @@ import { IconChevronRight } from "@tabler/icons-react" import { IconAtom, + IconChartBar, IconChevronsDown, IconChevronsUp, IconKey, @@ -137,6 +138,12 @@ export function AppSidebar({ ...props }: React.ComponentProps) { { ...baseNavGroups[3], items: [ + { + title: "navigation.stats", + url: "/stats", + icon: IconChartBar, + translateTitle: true, + }, { title: "navigation.config", url: "/config", diff --git a/web/frontend/src/components/config/config-page.tsx b/web/frontend/src/components/config/config-page.tsx index d7e1aa1b55..6f294d495a 100644 --- a/web/frontend/src/components/config/config-page.tsx +++ b/web/frontend/src/components/config/config-page.tsx @@ -17,6 +17,7 @@ import { AgentDefaultsSection, DevicesSection, LauncherSection, + LogSettingsSection, RuntimeSection, } from "@/components/config/config-sections" import { @@ -318,6 +319,10 @@ export function ConfigPage() { + + + +
diff --git a/web/frontend/src/components/config/config-sections.tsx b/web/frontend/src/components/config/config-sections.tsx index 90813be2af..5d95a1257f 100644 --- a/web/frontend/src/components/config/config-sections.tsx +++ b/web/frontend/src/components/config/config-sections.tsx @@ -1,4 +1,4 @@ -import { IconCode } from "@tabler/icons-react" +import { IconCode, IconFileText } from "@tabler/icons-react" import { Link } from "@tanstack/react-router" import { useTranslation } from "react-i18next" @@ -331,3 +331,23 @@ export function AdvancedSection() { ) } + +export function LogSettingsSection() { + const { t } = useTranslation() + + return ( +
+

+ {t("pages.logs.description")} +

+
+ +
+
+ ) +} diff --git a/web/frontend/src/components/stats/stats-page.tsx b/web/frontend/src/components/stats/stats-page.tsx new file mode 100644 index 0000000000..2cd052bbd2 --- /dev/null +++ b/web/frontend/src/components/stats/stats-page.tsx @@ -0,0 +1,174 @@ +import { useEffect, useState } from "react" +import { useTranslation } from "react-i18next" + +import { + Card, + CardContent, + CardHeader, + CardTitle, +} from "@/components/ui/card" +import { PageHeader } from "@/components/page-header" +import { getRequestStats, type RequestStats } from "@/api/stats" + +export function StatsPage() { + const { t } = useTranslation() + const [stats, setStats] = useState(null) + const [loading, setLoading] = useState(true) + const [error, setError] = useState(null) + + useEffect(() => { + async function loadStats() { + try { + const data = await getRequestStats() + setStats(data) + } catch (err) { + setError(err instanceof Error ? err.message : "Failed to load stats") + } finally { + setLoading(false) + } + } + loadStats() + }, []) + + if (loading) { + return ( +
+ +
{t("pages.stats.loading")}
+
+ ) + } + + if (error) { + return ( +
+ +
{t("pages.stats.error", { error })}
+
+ ) + } + + const channelEntries = stats?.by_channel + ? Object.entries(stats.by_channel).sort((a, b) => b[1] - a[1]) + : [] + + const dayEntries = stats?.by_day + ? Object.entries(stats.by_day).sort((a, b) => a[0].localeCompare(b[0])) + : [] + + return ( +
+ + +
+ + + {t("pages.stats.total_requests")} + + +
{stats?.total ?? 0}
+
+
+ + + + {t("pages.stats.active_channels")} + + +
{channelEntries.length}
+
+
+ + + + {t("pages.stats.days_tracked")} + + +
{dayEntries.length}
+
+
+ + + + {t("pages.stats.top_senders")} + + +
{stats?.top_senders?.length ?? 0}
+
+
+
+ +
+ + + {t("pages.stats.requests_by_channel")} + + +
+ {channelEntries.map(([channel, count]) => ( +
+ {channel} + {count} +
+ ))} + {channelEntries.length === 0 && ( +
{t("pages.stats.no_data")}
+ )} +
+
+
+ + + + {t("pages.stats.top_senders")} + + +
+ {stats?.top_senders?.map((sender, idx) => ( +
+
+ {sender.sender} + + ({sender.channel}) + +
+ {sender.count} +
+ ))} + {(!stats?.top_senders || stats.top_senders.length === 0) && ( +
{t("pages.stats.no_data")}
+ )} +
+
+
+
+ + + + {t("pages.stats.requests_by_day")} + + +
+ {dayEntries.slice(-14).map(([date, count]) => ( +
+ {date} +
+
+ {count} +
+
+ ))} + {dayEntries.length === 0 && ( +
{t("pages.stats.no_data")}
+ )} +
+ + +
+ ) +} diff --git a/web/frontend/src/i18n/locales/en.json b/web/frontend/src/i18n/locales/en.json index b88b5c924e..cb965f0209 100644 --- a/web/frontend/src/i18n/locales/en.json +++ b/web/frontend/src/i18n/locales/en.json @@ -11,8 +11,10 @@ "channels_group": "Channels", "show_more_channels": "More", "show_less_channels": "Less", + "stats": "Statistics", "config": "Config", - "logs": "Logs" + "logs": "Logs", + "request_logs": "Request Logs" }, "chat": { "welcome": "How can I help you today?", @@ -70,7 +72,8 @@ "save": "Save", "saving": "Saving...", "reset": "Reset", - "confirm": "Confirm" + "confirm": "Confirm", + "refresh": "Refresh" }, "labels": { "loading": "Loading..." @@ -329,6 +332,18 @@ } }, "pages": { + "stats": { + "title": "Request Statistics", + "loading": "Loading...", + "error": "Error: {{error}}", + "total_requests": "Total Requests", + "active_channels": "Active Channels", + "days_tracked": "Days Tracked", + "top_senders": "Top Senders", + "requests_by_channel": "Requests by Channel", + "requests_by_day": "Requests by Day", + "no_data": "No data available" + }, "agent": { "load_error": "Failed to load agent support information.", "stats": { @@ -489,6 +504,35 @@ }, "logs": { "description": "System logs and monitoring.", + "requests_title": "Request Logs", + "settings_title": "Log Settings", + "all_channels": "All Channels", + "no_logs": "No logs found", + "timestamp": "Timestamp", + "channel": "Channel", + "sender": "Sender", + "content": "Content", + "proc_time": "Proc Time", + "showing": "Showing", + "previous": "Previous", + "next": "Next", + "enabled": "Enable Logging", + "enabled_hint": "Record incoming requests to log files.", + "max_file_size": "Max File Size (MB)", + "max_file_size_hint": "Maximum size of each log file before rotation.", + "max_files": "Max Files", + "max_files_hint": "Maximum number of log files to keep.", + "retention_days": "Retention Days", + "retention_days_hint": "Number of days to keep log files before archiving.", + "archive_interval": "Archive Interval", + "archive_interval_hint": "How often to archive old logs (e.g., 24h, 1d).", + "compress_archive": "Compress Archives", + "compress_archive_hint": "Compress archived log files to save space.", + "content_max_length": "Content Max Length", + "content_max_length_hint": "Maximum length of logged content (0 for unlimited).", + "archive_now": "Archive Now", + "archive_success": "Archive completed successfully.", + "config_unavailable": "Log configuration not available.", "clear": "Clear logs", "empty": "Waiting for logs..." } diff --git a/web/frontend/src/i18n/locales/zh.json b/web/frontend/src/i18n/locales/zh.json index 12833cbf5b..94deed4ac2 100644 --- a/web/frontend/src/i18n/locales/zh.json +++ b/web/frontend/src/i18n/locales/zh.json @@ -11,8 +11,10 @@ "channels_group": "频道", "show_more_channels": "更多", "show_less_channels": "收起", + "stats": "统计", "config": "配置", - "logs": "日志" + "logs": "日志", + "request_logs": "请求日志" }, "chat": { "welcome": "今天我能为您做些什么?", @@ -70,7 +72,8 @@ "save": "保存", "saving": "保存中...", "reset": "重置", - "confirm": "确认" + "confirm": "确认", + "refresh": "刷新" }, "labels": { "loading": "加载中..." @@ -329,6 +332,18 @@ } }, "pages": { + "stats": { + "title": "请求统计", + "loading": "加载中...", + "error": "错误:{{error}}", + "total_requests": "总请求数", + "active_channels": "活跃频道", + "days_tracked": "统计天数", + "top_senders": "活跃用户", + "requests_by_channel": "按频道统计", + "requests_by_day": "按日期统计", + "no_data": "暂无数据" + }, "agent": { "load_error": "加载 Agent 支持信息失败。", "stats": { @@ -489,6 +504,35 @@ }, "logs": { "description": "系统日志和监控。", + "requests_title": "请求日志", + "settings_title": "日志设置", + "all_channels": "所有频道", + "no_logs": "暂无日志", + "timestamp": "时间", + "channel": "频道", + "sender": "发送者", + "content": "内容", + "proc_time": "处理时间", + "showing": "显示", + "previous": "上一页", + "next": "下一页", + "enabled": "启用日志", + "enabled_hint": "将请求记录到日志文件中。", + "max_file_size": "最大文件大小 (MB)", + "max_file_size_hint": "日志文件轮转前的最大大小。", + "max_files": "最大文件数", + "max_files_hint": "保留的最大日志文件数量。", + "retention_days": "保留天数", + "retention_days_hint": "归档前保留日志文件的天数。", + "archive_interval": "归档间隔", + "archive_interval_hint": "归档旧日志的频率(如 24h、1d)。", + "compress_archive": "压缩归档", + "compress_archive_hint": "压缩归档日志文件以节省空间。", + "content_max_length": "内容最大长度", + "content_max_length_hint": "记录内容的最大长度(0 表示不限制)。", + "archive_now": "立即归档", + "archive_success": "归档完成。", + "config_unavailable": "日志配置不可用。", "clear": "清空日志", "empty": "等待日志中..." } diff --git a/web/frontend/src/routeTree.gen.ts b/web/frontend/src/routeTree.gen.ts index 60f19ab53b..1a73d77e23 100644 --- a/web/frontend/src/routeTree.gen.ts +++ b/web/frontend/src/routeTree.gen.ts @@ -9,6 +9,7 @@ // Additionally, you should also exclude this file from your linter and/or formatter to prevent it from being checked or modified. import { Route as rootRouteImport } from './routes/__root' +import { Route as StatsRouteImport } from './routes/stats' import { Route as ModelsRouteImport } from './routes/models' import { Route as LogsRouteImport } from './routes/logs' import { Route as CredentialsRouteImport } from './routes/credentials' @@ -16,11 +17,18 @@ import { Route as ConfigRouteImport } from './routes/config' import { Route as AgentRouteImport } from './routes/agent' import { Route as ChannelsRouteRouteImport } from './routes/channels/route' import { Route as IndexRouteImport } from './routes/index' +import { Route as LogsRequestsRouteImport } from './routes/logs.requests' import { Route as ConfigRawRouteImport } from './routes/config.raw' +import { Route as ConfigLogsRouteImport } from './routes/config.logs' import { Route as ChannelsNameRouteImport } from './routes/channels/$name' import { Route as AgentToolsRouteImport } from './routes/agent/tools' import { Route as AgentSkillsRouteImport } from './routes/agent/skills' +const StatsRoute = StatsRouteImport.update({ + id: '/stats', + path: '/stats', + getParentRoute: () => rootRouteImport, +} as any) const ModelsRoute = ModelsRouteImport.update({ id: '/models', path: '/models', @@ -56,11 +64,21 @@ const IndexRoute = IndexRouteImport.update({ path: '/', getParentRoute: () => rootRouteImport, } as any) +const LogsRequestsRoute = LogsRequestsRouteImport.update({ + id: '/requests', + path: '/requests', + getParentRoute: () => LogsRoute, +} as any) const ConfigRawRoute = ConfigRawRouteImport.update({ id: '/raw', path: '/raw', getParentRoute: () => ConfigRoute, } as any) +const ConfigLogsRoute = ConfigLogsRouteImport.update({ + id: '/logs', + path: '/logs', + getParentRoute: () => ConfigRoute, +} as any) const ChannelsNameRoute = ChannelsNameRouteImport.update({ id: '/$name', path: '/$name', @@ -83,12 +101,15 @@ export interface FileRoutesByFullPath { '/agent': typeof AgentRouteWithChildren '/config': typeof ConfigRouteWithChildren '/credentials': typeof CredentialsRoute - '/logs': typeof LogsRoute + '/logs': typeof LogsRouteWithChildren '/models': typeof ModelsRoute + '/stats': typeof StatsRoute '/agent/skills': typeof AgentSkillsRoute '/agent/tools': typeof AgentToolsRoute '/channels/$name': typeof ChannelsNameRoute + '/config/logs': typeof ConfigLogsRoute '/config/raw': typeof ConfigRawRoute + '/logs/requests': typeof LogsRequestsRoute } export interface FileRoutesByTo { '/': typeof IndexRoute @@ -96,12 +117,15 @@ export interface FileRoutesByTo { '/agent': typeof AgentRouteWithChildren '/config': typeof ConfigRouteWithChildren '/credentials': typeof CredentialsRoute - '/logs': typeof LogsRoute + '/logs': typeof LogsRouteWithChildren '/models': typeof ModelsRoute + '/stats': typeof StatsRoute '/agent/skills': typeof AgentSkillsRoute '/agent/tools': typeof AgentToolsRoute '/channels/$name': typeof ChannelsNameRoute + '/config/logs': typeof ConfigLogsRoute '/config/raw': typeof ConfigRawRoute + '/logs/requests': typeof LogsRequestsRoute } export interface FileRoutesById { __root__: typeof rootRouteImport @@ -110,12 +134,15 @@ export interface FileRoutesById { '/agent': typeof AgentRouteWithChildren '/config': typeof ConfigRouteWithChildren '/credentials': typeof CredentialsRoute - '/logs': typeof LogsRoute + '/logs': typeof LogsRouteWithChildren '/models': typeof ModelsRoute + '/stats': typeof StatsRoute '/agent/skills': typeof AgentSkillsRoute '/agent/tools': typeof AgentToolsRoute '/channels/$name': typeof ChannelsNameRoute + '/config/logs': typeof ConfigLogsRoute '/config/raw': typeof ConfigRawRoute + '/logs/requests': typeof LogsRequestsRoute } export interface FileRouteTypes { fileRoutesByFullPath: FileRoutesByFullPath @@ -127,10 +154,13 @@ export interface FileRouteTypes { | '/credentials' | '/logs' | '/models' + | '/stats' | '/agent/skills' | '/agent/tools' | '/channels/$name' + | '/config/logs' | '/config/raw' + | '/logs/requests' fileRoutesByTo: FileRoutesByTo to: | '/' @@ -140,10 +170,13 @@ export interface FileRouteTypes { | '/credentials' | '/logs' | '/models' + | '/stats' | '/agent/skills' | '/agent/tools' | '/channels/$name' + | '/config/logs' | '/config/raw' + | '/logs/requests' id: | '__root__' | '/' @@ -153,10 +186,13 @@ export interface FileRouteTypes { | '/credentials' | '/logs' | '/models' + | '/stats' | '/agent/skills' | '/agent/tools' | '/channels/$name' + | '/config/logs' | '/config/raw' + | '/logs/requests' fileRoutesById: FileRoutesById } export interface RootRouteChildren { @@ -165,12 +201,20 @@ export interface RootRouteChildren { AgentRoute: typeof AgentRouteWithChildren ConfigRoute: typeof ConfigRouteWithChildren CredentialsRoute: typeof CredentialsRoute - LogsRoute: typeof LogsRoute + LogsRoute: typeof LogsRouteWithChildren ModelsRoute: typeof ModelsRoute + StatsRoute: typeof StatsRoute } declare module '@tanstack/react-router' { interface FileRoutesByPath { + '/stats': { + id: '/stats' + path: '/stats' + fullPath: '/stats' + preLoaderRoute: typeof StatsRouteImport + parentRoute: typeof rootRouteImport + } '/models': { id: '/models' path: '/models' @@ -220,6 +264,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof IndexRouteImport parentRoute: typeof rootRouteImport } + '/logs/requests': { + id: '/logs/requests' + path: '/requests' + fullPath: '/logs/requests' + preLoaderRoute: typeof LogsRequestsRouteImport + parentRoute: typeof LogsRoute + } '/config/raw': { id: '/config/raw' path: '/raw' @@ -227,6 +278,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof ConfigRawRouteImport parentRoute: typeof ConfigRoute } + '/config/logs': { + id: '/config/logs' + path: '/logs' + fullPath: '/config/logs' + preLoaderRoute: typeof ConfigLogsRouteImport + parentRoute: typeof ConfigRoute + } '/channels/$name': { id: '/channels/$name' path: '/$name' @@ -276,24 +334,37 @@ const AgentRouteChildren: AgentRouteChildren = { const AgentRouteWithChildren = AgentRoute._addFileChildren(AgentRouteChildren) interface ConfigRouteChildren { + ConfigLogsRoute: typeof ConfigLogsRoute ConfigRawRoute: typeof ConfigRawRoute } const ConfigRouteChildren: ConfigRouteChildren = { + ConfigLogsRoute: ConfigLogsRoute, ConfigRawRoute: ConfigRawRoute, } const ConfigRouteWithChildren = ConfigRoute._addFileChildren(ConfigRouteChildren) +interface LogsRouteChildren { + LogsRequestsRoute: typeof LogsRequestsRoute +} + +const LogsRouteChildren: LogsRouteChildren = { + LogsRequestsRoute: LogsRequestsRoute, +} + +const LogsRouteWithChildren = LogsRoute._addFileChildren(LogsRouteChildren) + const rootRouteChildren: RootRouteChildren = { IndexRoute: IndexRoute, ChannelsRouteRoute: ChannelsRouteRouteWithChildren, AgentRoute: AgentRouteWithChildren, ConfigRoute: ConfigRouteWithChildren, CredentialsRoute: CredentialsRoute, - LogsRoute: LogsRoute, + LogsRoute: LogsRouteWithChildren, ModelsRoute: ModelsRoute, + StatsRoute: StatsRoute, } export const routeTree = rootRouteImport ._addFileChildren(rootRouteChildren) diff --git a/web/frontend/src/routes/config.logs.tsx b/web/frontend/src/routes/config.logs.tsx new file mode 100644 index 0000000000..535e4d4abd --- /dev/null +++ b/web/frontend/src/routes/config.logs.tsx @@ -0,0 +1,7 @@ +import { createFileRoute } from "@tanstack/react-router" + +import { LogSettingsPanel } from "@/components/logs/log-settings-panel" + +export const Route = createFileRoute("/config/logs")({ + component: LogSettingsPanel, +}) diff --git a/web/frontend/src/routes/logs.requests.tsx b/web/frontend/src/routes/logs.requests.tsx new file mode 100644 index 0000000000..d567064793 --- /dev/null +++ b/web/frontend/src/routes/logs.requests.tsx @@ -0,0 +1,7 @@ +import { createFileRoute } from "@tanstack/react-router" + +import { RequestLogViewer } from "@/components/logs/request-log-viewer" + +export const Route = createFileRoute("/logs/requests")({ + component: RequestLogViewer, +}) diff --git a/web/frontend/src/routes/logs.tsx b/web/frontend/src/routes/logs.tsx index ef39e0bdf1..978d4c1591 100644 --- a/web/frontend/src/routes/logs.tsx +++ b/web/frontend/src/routes/logs.tsx @@ -1,5 +1,8 @@ + +import { IconFileText } from "@tabler/icons-react" import { IconTrash } from "@tabler/icons-react" import { createFileRoute } from "@tanstack/react-router" +import { Link } from "@tanstack/react-router" import { useAtomValue } from "jotai" import { useEffect, useRef, useState } from "react" import { useTranslation } from "react-i18next" @@ -108,7 +111,17 @@ function LogsPage() { return (
- + + + + {t("navigation.request_logs")} + + + } + />
diff --git a/web/frontend/src/routes/stats.tsx b/web/frontend/src/routes/stats.tsx new file mode 100644 index 0000000000..788a3a1c05 --- /dev/null +++ b/web/frontend/src/routes/stats.tsx @@ -0,0 +1,7 @@ +import { createFileRoute } from "@tanstack/react-router" + +import { StatsPage } from "@/components/stats/stats-page" + +export const Route = createFileRoute("/stats")({ + component: StatsPage, +})