Skip to content

Commit f35ef30

Browse files
committed
change host and port names for external llm env variables
Signed-off-by: devpramod <[email protected]>
1 parent 5d4576a commit f35ef30

File tree

3 files changed

+7
-5
lines changed

3 files changed

+7
-5
lines changed

helm-charts/docsum/templates/deployment.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,13 +36,13 @@ spec:
3636
env:
3737
- name: LLM_SERVICE_HOST_IP
3838
{{- if .Values.externalLLM.enabled }}
39-
value: {{ .Values.externalLLM.LLM_SERVICE_HOST_IP }}
39+
value: {{ .Values.externalLLM.LLM_SERVER_HOST }}
4040
{{- else }}
4141
value: {{ include "llm-uservice.fullname" (index .Subcharts "llm-uservice") }}
4242
{{- end }}
4343
- name: LLM_SERVICE_PORT
4444
{{- if .Values.externalLLM.enabled }}
45-
value: {{ .Values.externalLLM.LLM_SERVICE_PORT | default "80" | quote }}
45+
value: {{ .Values.externalLLM.LLM_SERVER_PORT | default "80" | quote }}
4646
{{- else }}
4747
value: {{ index .Values "llm-uservice" "service" "port" | quote }}
4848
{{- end }}

helm-charts/docsum/values.yaml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,7 @@ vllm:
8080
nginx:
8181
enabled: false
8282
docsum-ui:
83+
# if false, set also nginx.enabled=false
8384
enabled: true
8485
image:
8586
repository: opea/docsum-gradio-ui
@@ -112,7 +113,8 @@ whisper:
112113
# External LLM configuration
113114
externalLLM:
114115
enabled: false
115-
LLM_SERVICE_HOST_IP: "http://your-llm-server"
116+
LLM_SERVER_HOST: "http://your-llm-server"
117+
LLM_SERVER_PORT: "80"
116118
LLM_MODEL: "your-model"
117119
OPENAI_API_KEY: "your-api-key"
118120

helm-charts/docsum/variant_external-llm-values.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@
44
# External LLM configuration override
55
externalLLM:
66
enabled: true # Enable external LLM service
7-
LLM_SERVICE_HOST_IP: "http://your-llm-server" # External LLM service host
7+
LLM_SERVER_HOST: "http://your-llm-server" # External LLM service host
8+
LLM_SERVER_PORT: "80" # Port for the external LLM service
89
LLM_MODEL: "your-model" # LLM model to use
910
OPENAI_API_KEY: "your-api-key" # OpenAI API key for authentication
10-
LLM_SERVER_PORT: "80" # Port for the external LLM service
1111

1212
# Disable internal LLM service when using external LLM
1313
llm-uservice:

0 commit comments

Comments
 (0)