Skip to content

Commit 8b1b207

Browse files
committed
feat: add LLM_SERVER_PORT to external LLM configuration across multiple charts
Signed-off-by: devpramod <[email protected]>
1 parent 057fac1 commit 8b1b207

File tree

6 files changed

+25
-4
lines changed

6 files changed

+25
-4
lines changed

helm-charts/chatqna/external-llm-values.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ externalLLM:
77
LLM_SERVER_HOST_IP: "http://your-llm-server" # External LLM service host
88
LLM_MODEL: "your-model" # LLM model to use
99
OPENAI_API_KEY: "your-api-key" # OpenAI API key for authentication
10+
LLM_SERVER_PORT: "80" # Port for the external LLM service
1011

1112
# Disable internal LLM services when using external LLM
1213
llm-uservice:

helm-charts/chatqna/templates/deployment.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,11 @@ spec:
5050
{{- fail "ChatQnA needs a LLM inference backend!" }}
5151
{{- end }}
5252
- name: LLM_SERVER_PORT
53+
{{- if .Values.externalLLM.enabled }}
54+
value: {{ .Values.externalLLM.LLM_SERVER_PORT | default "80" | quote }}
55+
{{- else }}
5356
value: "80"
57+
{{- end }}
5458
- name: LLM_MODEL
5559
{{- if .Values.ollama.enabled }}
5660
value: {{ .Values.ollama.LLM_MODEL_ID | quote }}

helm-charts/codegen/external-llm-values.yaml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,11 @@
33

44
# External LLM configuration
55
externalLLM:
6-
enabled: true
7-
LLM_SERVICE_HOST_IP: "http://your-llm-server"
8-
LLM_MODEL: "your-model"
9-
OPENAI_API_KEY: "your-api-key"
6+
enabled: true # Enable external LLM service
7+
LLM_SERVICE_HOST_IP: "http://your-llm-server" # External LLM service host
8+
LLM_MODEL: "your-model" # LLM model to use
9+
OPENAI_API_KEY: "your-api-key" # OpenAI API key for authentication
10+
LLM_SERVER_PORT: "80" # Port for the external LLM service
1011

1112
# Disable internal LLM services when using external LLM
1213
tgi:

helm-charts/codegen/templates/deployment.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,21 +41,29 @@ spec:
4141
value: {{ include "llm-uservice.fullname" (index .Subcharts "llm-uservice") | quote }}
4242
{{- end }}
4343
- name: LLM_SERVICE_PORT
44+
{{- if .Values.externalLLM.enabled }}
45+
value: {{ .Values.externalLLM.LLM_SERVICE_PORT | default "80" | quote }}
46+
{{- else }}
4447
value: {{ index .Values "llm-uservice" "service" "port" | quote }}
48+
{{- end }}
4549
{{- if .Values.externalLLM.enabled }}
4650
- name: LLM_MODEL
4751
value: {{ .Values.externalLLM.LLM_MODEL }}
4852
- name: OPENAI_API_KEY
4953
value: {{ .Values.externalLLM.OPENAI_API_KEY }}
5054
{{- end }}
55+
{{- if index .Values "retriever-usvc" "enabled" }}
5156
- name: RETRIEVAL_SERVICE_HOST_IP
5257
value: {{ include "retriever-usvc.fullname" (index .Subcharts "retriever-usvc") | quote }}
5358
- name: REDIS_RETRIEVER_PORT
5459
value: {{ index .Values "retriever-usvc" "service" "port" | quote }}
60+
{{- end }}
61+
{{- if index .Values "embedding-usvc" "enabled" }}
5562
- name: TEI_EMBEDDING_HOST_IP
5663
value: {{ include "embedding-usvc.fullname" (index .Subcharts "embedding-usvc") | quote }}
5764
- name: EMBEDDER_PORT
5865
value: {{ index .Values "embedding-usvc" "service" "port" | quote }}
66+
{{- end }}
5967
securityContext:
6068
{{- toYaml .Values.securityContext | nindent 12 }}
6169
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"

helm-charts/docsum/external-llm-values.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ externalLLM:
77
LLM_SERVICE_HOST_IP: "http://your-llm-server" # External LLM service host
88
LLM_MODEL: "your-model" # LLM model to use
99
OPENAI_API_KEY: "your-api-key" # OpenAI API key for authentication
10+
LLM_SERVER_PORT: "80" # Port for the external LLM service
1011

1112
# Disable internal LLM service when using external LLM
1213
llm-uservice:

helm-charts/docsum/templates/deployment.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,17 @@ spec:
4141
value: {{ include "llm-uservice.fullname" (index .Subcharts "llm-uservice") }}
4242
{{- end }}
4343
- name: LLM_SERVICE_PORT
44+
{{- if .Values.externalLLM.enabled }}
45+
value: {{ .Values.externalLLM.LLM_SERVICE_PORT | default "80" | quote }}
46+
{{- else }}
4447
value: {{ index .Values "llm-uservice" "service" "port" | quote }}
48+
{{- end }}
49+
{{- if .Values.whisper.enabled }}
4550
- name: ASR_SERVICE_HOST_IP
4651
value: {{ include "whisper.fullname" (index .Subcharts "whisper") }}
4752
- name: ASR_SERVICE_PORT
4853
value: {{ index .Values "whisper" "service" "port" | quote }}
54+
{{- end }}
4955
{{- if .Values.externalLLM.enabled }}
5056
- name: LLM_MODEL
5157
value: {{ .Values.externalLLM.LLM_MODEL }}

0 commit comments

Comments
 (0)