Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion comps/llms/src/faq-generation/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ USER user

WORKDIR /home/user/comps/llms/src/faq-generation

ENTRYPOINT ["bash", "entrypoint.sh"]
ENTRYPOINT ["python", "opea_faqgen_microservice.py"]
8 changes: 0 additions & 8 deletions comps/llms/src/faq-generation/entrypoint.sh

This file was deleted.

6 changes: 2 additions & 4 deletions comps/llms/src/faq-generation/integrations/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from langchain.chains.summarize import load_summarize_chain
from langchain.docstore.document import Document
from langchain.text_splitter import CharacterTextSplitter
from langchain_core.load import dumps as langchain_dumps
from langchain_core.prompts import PromptTemplate

from comps import CustomLogger, GeneratedDoc, OpeaComponent, ServiceType
Expand Down Expand Up @@ -105,11 +106,8 @@ async def generate(self, input: ChatCompletionRequest, client):
if input.stream:

async def stream_generator():
from langserve.serialization import WellKnownLCSerializer

_serializer = WellKnownLCSerializer()
async for chunk in llm_chain.astream_log(docs):
data = _serializer.dumps({"ops": chunk.ops}).decode("utf-8")
data = langchain_dumps({"ops": chunk.ops})
if logflag:
logger.info(data)
yield f"data: {data}\n\n"
Expand Down
1 change: 0 additions & 1 deletion comps/llms/src/faq-generation/requirements-runtime.txt

This file was deleted.