From c4f1b89d6880e7d199388a6e9fa11bd58252a66e Mon Sep 17 00:00:00 2001 From: Lianhao Lu Date: Tue, 27 May 2025 04:14:43 +0000 Subject: [PATCH] llm-faqgen: support air gapped environment Replace langserve.serialization with langchain's own serialization functions. Signed-off-by: Lianhao Lu --- comps/llms/src/faq-generation/Dockerfile | 2 +- comps/llms/src/faq-generation/entrypoint.sh | 8 -------- comps/llms/src/faq-generation/integrations/common.py | 6 ++---- comps/llms/src/faq-generation/requirements-runtime.txt | 1 - 4 files changed, 3 insertions(+), 14 deletions(-) delete mode 100644 comps/llms/src/faq-generation/entrypoint.sh delete mode 100644 comps/llms/src/faq-generation/requirements-runtime.txt diff --git a/comps/llms/src/faq-generation/Dockerfile b/comps/llms/src/faq-generation/Dockerfile index a07ea2bd75..af5d763604 100644 --- a/comps/llms/src/faq-generation/Dockerfile +++ b/comps/llms/src/faq-generation/Dockerfile @@ -30,4 +30,4 @@ USER user WORKDIR /home/user/comps/llms/src/faq-generation -ENTRYPOINT ["bash", "entrypoint.sh"] +ENTRYPOINT ["python", "opea_faqgen_microservice.py"] diff --git a/comps/llms/src/faq-generation/entrypoint.sh b/comps/llms/src/faq-generation/entrypoint.sh deleted file mode 100644 index d3ad707a59..0000000000 --- a/comps/llms/src/faq-generation/entrypoint.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -pip --no-cache-dir install -r requirements-runtime.txt - -python opea_faqgen_microservice.py diff --git a/comps/llms/src/faq-generation/integrations/common.py b/comps/llms/src/faq-generation/integrations/common.py index fcd9e4c709..659fca47f5 100644 --- a/comps/llms/src/faq-generation/integrations/common.py +++ b/comps/llms/src/faq-generation/integrations/common.py @@ -8,6 +8,7 @@ from langchain.chains.summarize import load_summarize_chain from langchain.docstore.document import Document from langchain.text_splitter import CharacterTextSplitter +from langchain_core.load import dumps as langchain_dumps from langchain_core.prompts import PromptTemplate from comps import CustomLogger, GeneratedDoc, OpeaComponent, ServiceType @@ -105,11 +106,8 @@ async def generate(self, input: ChatCompletionRequest, client): if input.stream: async def stream_generator(): - from langserve.serialization import WellKnownLCSerializer - - _serializer = WellKnownLCSerializer() async for chunk in llm_chain.astream_log(docs): - data = _serializer.dumps({"ops": chunk.ops}).decode("utf-8") + data = langchain_dumps({"ops": chunk.ops}) if logflag: logger.info(data) yield f"data: {data}\n\n" diff --git a/comps/llms/src/faq-generation/requirements-runtime.txt b/comps/llms/src/faq-generation/requirements-runtime.txt deleted file mode 100644 index 225adde271..0000000000 --- a/comps/llms/src/faq-generation/requirements-runtime.txt +++ /dev/null @@ -1 +0,0 @@ -langserve