File tree Expand file tree Collapse file tree 6 files changed +25
-4
lines changed
Expand file tree Collapse file tree 6 files changed +25
-4
lines changed Original file line number Diff line number Diff line change @@ -7,6 +7,7 @@ externalLLM:
77 LLM_SERVER_HOST_IP : " http://your-llm-server" # External LLM service host
88 LLM_MODEL : " your-model" # LLM model to use
99 OPENAI_API_KEY : " your-api-key" # OpenAI API key for authentication
10+ LLM_SERVER_PORT : " 80" # Port for the external LLM service
1011
1112# Disable internal LLM services when using external LLM
1213llm-uservice :
Original file line number Diff line number Diff line change 5050 {{- fail "ChatQnA needs a LLM inference backend!" }}
5151 {{- end }}
5252 - name : LLM_SERVER_PORT
53+ {{- if .Values.externalLLM.enabled }}
54+ value : {{ .Values.externalLLM.LLM_SERVER_PORT | default "80" | quote }}
55+ {{- else }}
5356 value : " 80"
57+ {{- end }}
5458 - name : LLM_MODEL
5559 {{- if .Values.ollama.enabled }}
5660 value : {{ .Values.ollama.LLM_MODEL_ID | quote }}
Original file line number Diff line number Diff line change 33
44# External LLM configuration
55externalLLM :
6- enabled : true
7- LLM_SERVICE_HOST_IP : " http://your-llm-server"
8- LLM_MODEL : " your-model"
9- OPENAI_API_KEY : " your-api-key"
6+ enabled : true # Enable external LLM service
7+ LLM_SERVICE_HOST_IP : " http://your-llm-server" # External LLM service host
8+ LLM_MODEL : " your-model" # LLM model to use
9+ OPENAI_API_KEY : " your-api-key" # OpenAI API key for authentication
10+ LLM_SERVER_PORT : " 80" # Port for the external LLM service
1011
1112# Disable internal LLM services when using external LLM
1213tgi :
Original file line number Diff line number Diff line change @@ -41,21 +41,29 @@ spec:
4141 value : {{ include "llm-uservice.fullname" (index .Subcharts "llm-uservice") | quote }}
4242 {{- end }}
4343 - name : LLM_SERVICE_PORT
44+ {{- if .Values.externalLLM.enabled }}
45+ value : {{ .Values.externalLLM.LLM_SERVICE_PORT | default "80" | quote }}
46+ {{- else }}
4447 value : {{ index .Values "llm-uservice" "service" "port" | quote }}
48+ {{- end }}
4549 {{- if .Values.externalLLM.enabled }}
4650 - name : LLM_MODEL
4751 value : {{ .Values.externalLLM.LLM_MODEL }}
4852 - name : OPENAI_API_KEY
4953 value : {{ .Values.externalLLM.OPENAI_API_KEY }}
5054 {{- end }}
55+ {{- if index .Values "retriever-usvc" "enabled" }}
5156 - name : RETRIEVAL_SERVICE_HOST_IP
5257 value : {{ include "retriever-usvc.fullname" (index .Subcharts "retriever-usvc") | quote }}
5358 - name : REDIS_RETRIEVER_PORT
5459 value : {{ index .Values "retriever-usvc" "service" "port" | quote }}
60+ {{- end }}
61+ {{- if index .Values "embedding-usvc" "enabled" }}
5562 - name : TEI_EMBEDDING_HOST_IP
5663 value : {{ include "embedding-usvc.fullname" (index .Subcharts "embedding-usvc") | quote }}
5764 - name : EMBEDDER_PORT
5865 value : {{ index .Values "embedding-usvc" "service" "port" | quote }}
66+ {{- end }}
5967 securityContext :
6068 {{- toYaml .Values.securityContext | nindent 12 }}
6169 image : " {{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
Original file line number Diff line number Diff line change @@ -7,6 +7,7 @@ externalLLM:
77 LLM_SERVICE_HOST_IP : " http://your-llm-server" # External LLM service host
88 LLM_MODEL : " your-model" # LLM model to use
99 OPENAI_API_KEY : " your-api-key" # OpenAI API key for authentication
10+ LLM_SERVER_PORT : " 80" # Port for the external LLM service
1011
1112# Disable internal LLM service when using external LLM
1213llm-uservice :
Original file line number Diff line number Diff line change @@ -41,11 +41,17 @@ spec:
4141 value : {{ include "llm-uservice.fullname" (index .Subcharts "llm-uservice") }}
4242 {{- end }}
4343 - name : LLM_SERVICE_PORT
44+ {{- if .Values.externalLLM.enabled }}
45+ value : {{ .Values.externalLLM.LLM_SERVICE_PORT | default "80" | quote }}
46+ {{- else }}
4447 value : {{ index .Values "llm-uservice" "service" "port" | quote }}
48+ {{- end }}
49+ {{- if .Values.whisper.enabled }}
4550 - name : ASR_SERVICE_HOST_IP
4651 value : {{ include "whisper.fullname" (index .Subcharts "whisper") }}
4752 - name : ASR_SERVICE_PORT
4853 value : {{ index .Values "whisper" "service" "port" | quote }}
54+ {{- end }}
4955 {{- if .Values.externalLLM.enabled }}
5056 - name : LLM_MODEL
5157 value : {{ .Values.externalLLM.LLM_MODEL }}
You can’t perform that action at this time.
0 commit comments