forked from aiming-lab/AutoResearchClaw
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.researchclaw.example.yaml
More file actions
109 lines (95 loc) · 2.98 KB
/
config.researchclaw.example.yaml
File metadata and controls
109 lines (95 loc) · 2.98 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
project:
name: "my-research"
mode: "full-auto"
research:
topic: "Your research topic here"
domains:
- "machine-learning"
daily_paper_count: 10
quality_threshold: 4.0
runtime:
timezone: "America/New_York"
max_parallel_tasks: 3
approval_timeout_hours: 12
retry_limit: 2
notifications:
channel: "console"
target: ""
on_stage_start: true
on_stage_fail: true
on_gate_required: true
knowledge_base:
backend: "markdown"
root: "docs/kb"
openclaw_bridge:
use_cron: false
use_message: false
use_memory: false
use_sessions_spawn: false
use_web_fetch: false
use_browser: false
# ============================================================================
# LLM Provider Configuration
# ============================================================================
# Supported providers:
# - "openai" → OpenAI API (default base_url)
# - "openrouter" → OpenRouter (access to 200+ models via single API)
# - "deepseek" → DeepSeek API
# - "openai-compatible" → Any OpenAI-compatible API (requires base_url)
# - "acp" → ACP agent protocol (for local agents)
#
# OpenRouter Example (https://openrouter.ai):
# Get your API key at https://openrouter.ai/keys
# Models: anthropic/claude-3.5-sonnet, google/gemini-pro-1.5, meta-llama/llama-3.1-70b-instruct
# See all models: https://openrouter.ai/models
# ============================================================================
llm:
# Provider options: "openai", "openrouter", "deepseek", "openai-compatible", "acp"
provider: "openai"
# base_url is auto-set for known providers (openai, openrouter, deepseek)
# Only needed for "openai-compatible" provider
base_url: "https://api.openai.com/v1"
# API key from environment variable (recommended) or direct value
api_key_env: "OPENAI_API_KEY"
api_key: ""
primary_model: "gpt-4o"
fallback_models:
- "gpt-4.1"
- "gpt-4o-mini"
# Semantic Scholar API key (optional, for literature search)
s2_api_key: ""
notes: ""
# ============================================================================
# Example: OpenRouter Configuration
# ============================================================================
# llm:
# provider: "openrouter"
# api_key_env: "OPENROUTER_API_KEY"
# primary_model: "anthropic/claude-3.5-sonnet"
# fallback_models:
# - "google/gemini-pro-1.5"
# - "meta-llama/llama-3.1-70b-instruct"
security:
hitl_required_stages: [5, 9, 20]
allow_publish_without_approval: false
redact_sensitive_logs: true
experiment:
mode: "sandbox"
time_budget_sec: 300
max_iterations: 10
metric_key: "primary_metric"
metric_direction: "minimize"
sandbox:
python_path: ".venv/bin/python3"
gpu_required: false
max_memory_mb: 4096
docker:
image: "researchclaw/experiment:latest"
gpu_enabled: true
memory_limit_mb: 8192
network_policy: "none"
auto_install_deps: true
shm_size_mb: 2048
keep_containers: false
prompts:
custom_file: ""