-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig-auth.env.example
More file actions
246 lines (218 loc) · 7.38 KB
/
config-auth.env.example
File metadata and controls
246 lines (218 loc) · 7.38 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
# ==================================
# Gopie Service Configuration
# ==================================
GOPIE_API_SERVER_HOST=localhost
GOPIE_API_SERVER_PORT=8002
GOPIE_LOGGER_LEVEL=info
GOPIE_LOGGER_FILE=gopie.log
GOPIE_LOGGER_MODE=dev
GOPIE_OPENAI_APIKEY=
GOPIE_OPENAI_BASEURL=
GOPIE_OPENAI_MODEL=
GOPIE_OPENAI_OPTIONS=
# EXAMPLE USAGE FOR PORTKEY GATE WAY
# GOPIE_OPENAI_OPTIONS='x-portkey-virtual-key=YOUR_VIRTUAL_KEY,x-portkey-api-key=YOUR_API_KEY'
GOPIE_DUCKDB_PATH=./dataful/gopie.duckdb
GOPIE_OLAPDB_DBTYPE=duckdb
GOPIE_OLAPDB_ACCESS_MODE=read_write
GOPIE_POSTGRES_HOST=postgres
GOPIE_POSTGRES_PORT=5432
GOPIE_POSTGRES_DB=gopie
GOPIE_POSTGRES_USER=postgres
GOPIE_POSTGRES_PASSWORD=postgres
GOPIE_POSTGRES_SSLMODE=disable
GOPIE_AIAGENT_URL=http://chat-server:8000
GOPIE_ENCRYPTION_KEY=E5B8A0F3C1D9E7B2A5F0C3D8E6B1A4F2
GOPIE_ENABLED_SERVERS=api
GOPIE_S3_ACCESS_KEY=minioadmin
GOPIE_S3_SECRET_KEY=minioadmin
GOPIE_S3_ENDPOINT=http://minio:9000
GOPIE_S3_SSL=false
GOPIE_S3_REGION=us-east-1
GOPIE_DOWNLOADS_S3_BUCKET=downloads
# Flag to determine if CORS is handled by ingress (true) or by the application (false)
GOPIE_CORS_HANDLED_BY_INGRESS=false
GOPIE_ENABLE_AUTH=true
GOPIE_USE_SECURE_AUTH_COOKIE=false
# ==================================
# Gopie Web UI Configuration
# ==================================
NEXT_PUBLIC_APP_URL=http://localhost:3000
NEXT_PUBLIC_COMPANION_URL="http://localhost:3020"
NEXT_PUBLIC_GOPIE_API_URL="http://localhost:8000"
NEXT_PUBLIC_ENABLE_AUTH="true"
NEXT_PUBLIC_LIVEKIT_URL=http://localhost:7880
GOPIE_API_URL="http://gopie-server:8000"
DATABASE_URL="postgresql://postgres:postgres@postgres:5432/gopie"
BETTER_AUTH_SECRET="change-me-to-a-random-secret"
BETTER_AUTH_URL="http://localhost:3000"
NEXT_PUBLIC_BETTER_AUTH_URL="http://localhost:3000"
NEXT_PUBLIC_STORAGE_URL="http://localhost:9000"
PASSWORD_ENCRYPTION_PRIVATE_KEY=
# GOOGLE_CLIENT_ID=
# GOOGLE_CLIENT_SECRET=
# ==================================
# Companion Service Configuration
# ==================================
COMPANION_AWS_ENDPOINT=http://localhost:9000
COMPANION_AWS_REGION=us-east-1
COMPANION_AWS_BUCKET=gopie
COMPANION_AWS_KEY=minioadmin
COMPANION_AWS_SECRET=minioadmin
COMPANION_DOMAIN=localhost:3020
COMPANION_PROTOCOL=http
COMPANION_DATADIR=/
COMPANION_SELF_ENDPOINT=localhost:3020
NODE_ENV=dev
# ==================================
# Chat Server General Configuration
# ==================================
CHAT_PROJECT_NAME="Gopie Chat Server"
CHAT_API_V1_STR="/api/v1"
CHAT_MODE="development"
CHAT_GOPIE_API_ENDPOINT="http://gopie-server:8001"
# Tool Call & Retry Limits
CHAT_MAX_TOOL_CALL_LIMIT=10
CHAT_MAX_VALIDATION_RETRY_COUNT=2
CHAT_MAX_SEMANTIC_SEARCH_RETRY=1
CHAT_MAX_VIZ_TOOL_CALLS=12
# CORS Configuration
CHAT_CORS_ORIGINS='["*"]'
CHAT_CORS_METHODS='["*"]'
CHAT_CORS_HEADERS='["*"]'
# Chat History
CHAT_CHAT_HISTORY_MAX_MESSAGES=20
CHAT_CHAT_HISTORY_MAX_TOKENS=10000
# ==================================
# AI Gateways & Providers (Enable one)
# ==================================
# LLM Gateway Provider Options (choose one):
# - "portkey" : Portkey AI gateway with routing, caching, and fallbacks
# - "litellm" : LiteLLM unified interface for 100+ LLM providers
# - "cloudflare" : Cloudflare Workers AI gateway
# - "openrouter" : OpenRouter for accessing multiple AI models
# - "custom" : Custom LLM endpoints for self-hosted models or direct provider access
#
# Note: For direct access to any OpenAI-compatible provider, use "custom" # # # with:
# CHAT_LLM_GATEWAY_PROVIDER="custom"
# CHAT_CUSTOM_PROVIDER_NAME="<PROVIDER_NAME>"
# CHAT_CUSTOM_LLM_BASE_URL="<PROVIDER_BASE_URL>"
# CHAT_CUSTOM_LLM_API_KEY="<YOUR_API_KEY>"
CHAT_LLM_GATEWAY_PROVIDER="custom"
# If Custom LLM Provider is chosen above the following need to be updated based on the provider
# The following providers are supported directly:
# - OpenAI
# - Gemini
# - Groq
# - Together
# - Perplexity
# - Fireworks
# - Anthropic
# - DeepInfra
# - HuggingFace
# Example Usage for OpenAI:
# CHAT_LLM_GATEWAY_PROVIDER="custom"
# CHAT_CUSTOM_PROVIDER_NAME="openai"
# OPENAI_API_KEY="<YOUR_API_KEY>"
CHAT_CUSTOM_PROVIDER_NAME="openai"
CHAT_CUSTOM_LLM_BASE_URL=""
CHAT_CUSTOM_LLM_API_KEY="<API KEY>"
# Embedding Gateway Provider Options (choose one):
# - "portkey" : Portkey for embedding models
# - "litellm" : LiteLLM for embedding providers
# - "openai" : Direct OpenAI embeddings API
# - "custom" : Custom embedding endpoints
CHAT_EMBEDDING_GATEWAY_PROVIDER="openai"
# Portkey
PORTKEY_API_KEY=""
PORTKEY_URL=""
PORTKEY_PROVIDER_API_KEY=""
PORTKEY_PROVIDER_NAME=""
PORTKEY_CONFIG_ID=""
PORTKEY_EMBEDDING_PROVIDER_API_KEY=""
PORTKEY_EMBEDDING_PROVIDER_NAME=""
# LiteLLM
LITELLM_BASE_URL="http://host.docker.local:4000/v1"
LITELLM_MASTER_KEY=""
LITELLM_KEY_HEADER_NAME=""
LITELLM_VIRTUAL_KEY=""
# Cloudflare AI Gateway
CLOUDFLARE_GATEWAY_URL="https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}"
CLOUDFLARE_ACCOUNT_ID=""
CLOUDFLARE_API_TOKEN=""
CLOUDFLARE_GATEWAY_ID="gopie-chat-server"
CLOUDFLARE_PROVIDER=""
CLOUDFLARE_PROVIDER_API_KEY=""
# OpenRouter
OPENROUTER_API_KEY=""
OPENROUTER_BASE_URL="https://openrouter.ai/api/v1"
# E2B (Code Interpreter) - Chat server specific
CHAT_E2B_API_KEY="<YOUR_E2B_API_KEY>"
CHAT_E2B_TIMEOUT=120
# If Custom Embedding Provider you need to update the following accordingly
CHAT_CUSTOM_EMBEDDING_BASE_URL=""
CHAT_CUSTOM_EMBEDDING_API_KEY=""
# Direct Provider Keys
OPENAI_API_KEY="<YOUR_API_KEY>"
GROQ_API_KEY="<YOUR_API_KEY>"
TOGETHER_API_KEY="<YOUR_API_KEY>"
PERPLEXITY_API_KEY="<YOUR_API_KEY>"
FIREWORKS_API_KEY="<YOUR_API_KEY>"
ANTHROPIC_API_KEY="<YOUR_API_KEY>"
DEEPINFRA_API_TOKEN="<YOUR_API_KEY>"
HF_TOKEN="<YOUR_API_KEY>"
# ==================================
# Default Model Configuration
# ==================================
CHAT_FAST_MODEL="gpt-4.1-mini"
CHAT_BALANCED_MODEL="gpt-4.1"
CHAT_ADVANCED_MODEL="gpt-5"
CHAT_FALLBACK_MODEL=""
CHAT_DEFAULT_LLM_MODEL="gpt-4.1"
CHAT_DEFAULT_EMBEDDING_MODEL="text-embedding-3-large"
CHAT_DEFAULT_EMBEDDING_SIZE=3072
CHAT_EMBEDDINGS_MAX_TOKEN=""
CHAT_DEFAULT_SPARSE_MODEL="prithivida/Splade_PP_en_v1"
# Temperature Configuration
CHAT_DETERMINISTIC_TEMPERATURE=0.0
CHAT_LOW_VARIATION_TEMPERATURE=0.3
CHAT_BALANCED_TEMPERATURE=0.5
CHAT_CREATIVE_TEMPERATURE=0.7
# ==================================
# S3 Storage (Chat Server)
# ==================================
CHAT_INTERNAL_S3_HOST="http://minio:9000"
CHAT_EXTERNAL_S3_HOST="http://localhost:9000"
CHAT_S3_ACCESS_KEY="minioadmin"
CHAT_S3_SECRET_KEY="minioadmin"
CHAT_S3_BUCKET="gopie"
CHAT_S3_REGION="us-east-1"
# ==================================
# Vector Database (Qdrant)
# ==================================
CHAT_QDRANT_HOST="qdrant"
CHAT_QDRANT_COLLECTION="dataset_collection"
CHAT_QDRANT_DUCKDB_COLLECTION="duckdb_docs_collection"
CHAT_QDRANT_PORT=6333
CHAT_QDRANT_TOP_K=5
CHAT_QDRANT_DUCKDB_TOP_K=5
# ==================================
# Dataset & Truncation Limits
# ==================================
CHAT_ROW_TRUNCATION_LIMIT=250
CHAT_DATASET_TOKEN_TRUNCATION_LIMIT=100000
CHAT_COLUMN_TRUNCATION_LIMIT=200
CHAT_DISPLAY_ROWS_AFTER_TRUNCATION_LIMIT=20
# Dataset Sampling
CHAT_TARGET_ROWS=150000
CHAT_SAMPLING_THRESHOLD=150000
# OLAP Backend Configuration
CHAT_OLAP_DB_TYPE="duckdb"
# ==================================
# LangSmith (LLM Tracing)
# ==================================
LANGSMITH_TRACING=false
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
LANGSMITH_PROJECT="gopie-chat-server-local"
LANGSMITH_API_KEY=""
CHAT_LANGSMITH_PROMPT=false