forked from modelscope/AgentEvolver
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathexample.env
More file actions
29 lines (26 loc) · 1.15 KB
/
example.env
File metadata and controls
29 lines (26 loc) · 1.15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
OPENAI_BASE_URL=https://dashscope.aliyuncs.com/compatible-mode/v1
OPENAI_API_KEY="<YOUR_API_KEY>"
DASHSCOPE_API_KEY="<YOUR_API_KEY>"
HF_ENDPOINT=https://hf-mirror.com
APPWORLD_PATH="./env_service/launch_script"
APPWORLD_SCRIPT="source <YOUR_CONDA_PATH>/etc/profile.d/conda.sh; conda activate appworld; bash appworld.sh"
# BFCL_PATH="./env_service/launch_script"
# BFCL_SCRIPT="source <YOUR_CONDA_PATH>/etc/profile.d/conda.sh; conda activate bfcl; bash bfcl.sh"
FLOW_EMBEDDING_API_KEY="${OPENAI_API_KEY}"
FLOW_EMBEDDING_BASE_URL="${OPENAI_BASE_URL}"
FLOW_LLM_API_KEY="${OPENAI_API_KEY}"
FLOW_LLM_BASE_URL="${OPENAI_BASE_URL}"
REME_PATH="./external/reme"
REME_SCRIPT='source <YOUR_CONDA_PATH>/etc/profile.d/conda.sh; conda activate reme; reme \
config=default \
backend=http \
thread_pool_max_workers=256 \
http.host="127.0.0.1" \
http.port=8001 \
http.limit_concurrency=256 \
llm.default.model_name=qwen-max-2025-01-25 \
embedding_model.default.model_name=text-embedding-v4 \
vector_store.default.backend=local \
op.rerank_memory_op.params.enable_llm_rerank=false
'
NVIDIA_NIM_API_KEY=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