Skip to content

Commit 2af1ea0

Browse files
remove examples gateway. (opea-project#1243)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent c760cac commit 2af1ea0

File tree

4 files changed

+120
-31
lines changed

4 files changed

+120
-31
lines changed

ChatQnA/chatqna.py

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,8 @@
66
import os
77
import re
88

9-
from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
9+
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
10+
from comps.cores.mega.utils import handle_message
1011
from comps.cores.proto.api_protocol import (
1112
ChatCompletionRequest,
1213
ChatCompletionResponse,
@@ -187,15 +188,15 @@ def align_generator(self, gen, **kwargs):
187188
yield "data: [DONE]\n\n"
188189

189190

190-
class ChatQnAService(Gateway):
191+
class ChatQnAService:
191192
def __init__(self, host="0.0.0.0", port=8000):
192193
self.host = host
193194
self.port = port
194195
ServiceOrchestrator.align_inputs = align_inputs
195196
ServiceOrchestrator.align_outputs = align_outputs
196197
ServiceOrchestrator.align_generator = align_generator
197-
198198
self.megaservice = ServiceOrchestrator()
199+
self.endpoint = str(MegaServiceEndpoint.CHAT_QNA)
199200

200201
def add_remote_service(self):
201202

@@ -332,7 +333,7 @@ async def handle_request(self, request: Request):
332333
data = await request.json()
333334
stream_opt = data.get("stream", True)
334335
chat_request = ChatCompletionRequest.parse_obj(data)
335-
prompt = self._handle_message(chat_request.messages)
336+
prompt = handle_message(chat_request.messages)
336337
parameters = LLMParams(
337338
max_tokens=chat_request.max_tokens if chat_request.max_tokens else 1024,
338339
top_k=chat_request.top_k if chat_request.top_k else 10,
@@ -379,15 +380,20 @@ async def handle_request(self, request: Request):
379380

380381
def start(self):
381382

382-
super().__init__(
383-
megaservice=self.megaservice,
383+
self.service = MicroService(
384+
self.__class__.__name__,
385+
service_role=ServiceRoleType.MEGASERVICE,
384386
host=self.host,
385387
port=self.port,
386-
endpoint=str(MegaServiceEndpoint.CHAT_QNA),
388+
endpoint=self.endpoint,
387389
input_datatype=ChatCompletionRequest,
388390
output_datatype=ChatCompletionResponse,
389391
)
390392

393+
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
394+
395+
self.service.start()
396+
391397

392398
if __name__ == "__main__":
393399
parser = argparse.ArgumentParser()

ChatQnA/chatqna_wrapper.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,8 @@
33

44
import os
55

6-
from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
6+
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
7+
from comps.cores.mega.utils import handle_message
78
from comps.cores.proto.api_protocol import (
89
ChatCompletionRequest,
910
ChatCompletionResponse,
@@ -27,11 +28,12 @@
2728
LLM_SERVICE_PORT = int(os.getenv("LLM_SERVICE_PORT", 9000))
2829

2930

30-
class ChatQnAService(Gateway):
31+
class ChatQnAService:
3132
def __init__(self, host="0.0.0.0", port=8000):
3233
self.host = host
3334
self.port = port
3435
self.megaservice = ServiceOrchestrator()
36+
self.endpoint = str(MegaServiceEndpoint.CHAT_QNA)
3537

3638
def add_remote_service(self):
3739
embedding = MicroService(
@@ -75,7 +77,7 @@ async def handle_request(self, request: Request):
7577
data = await request.json()
7678
stream_opt = data.get("stream", True)
7779
chat_request = ChatCompletionRequest.parse_obj(data)
78-
prompt = self._handle_message(chat_request.messages)
80+
prompt = handle_message(chat_request.messages)
7981
parameters = LLMParams(
8082
max_tokens=chat_request.max_tokens if chat_request.max_tokens else 1024,
8183
top_k=chat_request.top_k if chat_request.top_k else 10,
@@ -121,15 +123,17 @@ async def handle_request(self, request: Request):
121123
return ChatCompletionResponse(model="chatqna", choices=choices, usage=usage)
122124

123125
def start(self):
124-
125-
super().__init__(
126-
megaservice=self.megaservice,
126+
self.service = MicroService(
127+
self.__class__.__name__,
128+
service_role=ServiceRoleType.MEGASERVICE,
127129
host=self.host,
128130
port=self.port,
129-
endpoint=str(MegaServiceEndpoint.CHAT_QNA),
131+
endpoint=self.endpoint,
130132
input_datatype=ChatCompletionRequest,
131133
output_datatype=ChatCompletionResponse,
132134
)
135+
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
136+
self.service.start()
133137

134138

135139
if __name__ == "__main__":

DocSum/docsum.py

Lines changed: 49 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
import os
66
from typing import List
77

8-
from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
9-
from comps.cores.mega.gateway import read_text_from_file
8+
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
9+
from comps.cores.mega.utils import handle_message
1010
from comps.cores.proto.api_protocol import (
1111
ChatCompletionRequest,
1212
ChatCompletionResponse,
@@ -27,11 +27,47 @@
2727
LLM_SERVICE_PORT = int(os.getenv("LLM_SERVICE_PORT", 9000))
2828

2929

30-
class DocSumService(Gateway):
30+
def read_pdf(file):
31+
from langchain.document_loaders import PyPDFLoader
32+
33+
loader = PyPDFLoader(file)
34+
docs = loader.load_and_split()
35+
return docs
36+
37+
38+
def read_text_from_file(file, save_file_name):
39+
import docx2txt
40+
from langchain.text_splitter import CharacterTextSplitter
41+
42+
# read text file
43+
if file.headers["content-type"] == "text/plain":
44+
file.file.seek(0)
45+
content = file.file.read().decode("utf-8")
46+
# Split text
47+
text_splitter = CharacterTextSplitter()
48+
texts = text_splitter.split_text(content)
49+
# Create multiple documents
50+
file_content = texts
51+
# read pdf file
52+
elif file.headers["content-type"] == "application/pdf":
53+
documents = read_pdf(save_file_name)
54+
file_content = [doc.page_content for doc in documents]
55+
# read docx file
56+
elif (
57+
file.headers["content-type"] == "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
58+
or file.headers["content-type"] == "application/octet-stream"
59+
):
60+
file_content = docx2txt.process(save_file_name)
61+
62+
return file_content
63+
64+
65+
class DocSumService:
3166
def __init__(self, host="0.0.0.0", port=8000):
3267
self.host = host
3368
self.port = port
3469
self.megaservice = ServiceOrchestrator()
70+
self.endpoint = str(MegaServiceEndpoint.DOC_SUMMARY)
3571

3672
def add_remote_service(self):
3773

@@ -62,7 +98,7 @@ async def handle_request(self, request: Request, files: List[UploadFile] = File(
6298
data = await request.json()
6399
stream_opt = data.get("stream", True)
64100
chat_request = ChatCompletionRequest.model_validate(data)
65-
prompt = self._handle_message(chat_request.messages)
101+
prompt = handle_message(chat_request.messages)
66102

67103
initial_inputs_data = {data["type"]: prompt}
68104

@@ -98,9 +134,9 @@ async def handle_request(self, request: Request, files: List[UploadFile] = File(
98134
file_summaries.append(docs)
99135

100136
if file_summaries:
101-
prompt = self._handle_message(chat_request.messages) + "\n".join(file_summaries)
137+
prompt = handle_message(chat_request.messages) + "\n".join(file_summaries)
102138
else:
103-
prompt = self._handle_message(chat_request.messages)
139+
prompt = handle_message(chat_request.messages)
104140

105141
data_type = data.get("type")
106142
if data_type is not None:
@@ -151,14 +187,18 @@ async def handle_request(self, request: Request, files: List[UploadFile] = File(
151187
return ChatCompletionResponse(model="docsum", choices=choices, usage=usage)
152188

153189
def start(self):
154-
super().__init__(
155-
megaservice=self.megaservice,
190+
191+
self.service = MicroService(
192+
self.__class__.__name__,
193+
service_role=ServiceRoleType.MEGASERVICE,
156194
host=self.host,
157195
port=self.port,
158-
endpoint=str(MegaServiceEndpoint.DOC_SUMMARY),
196+
endpoint=self.endpoint,
159197
input_datatype=ChatCompletionRequest,
160198
output_datatype=ChatCompletionResponse,
161199
)
200+
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
201+
self.service.start()
162202

163203

164204
if __name__ == "__main__":

FaqGen/faqgen.py

Lines changed: 47 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
import os
66
from typing import List
77

8-
from comps import Gateway, MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceType
9-
from comps.cores.mega.gateway import read_text_from_file
8+
from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType
9+
from comps.cores.mega.utils import handle_message
1010
from comps.cores.proto.api_protocol import (
1111
ChatCompletionRequest,
1212
ChatCompletionResponse,
@@ -23,11 +23,47 @@
2323
LLM_SERVICE_PORT = int(os.getenv("LLM_SERVICE_PORT", 9000))
2424

2525

26-
class FaqGenService(Gateway):
26+
def read_pdf(file):
27+
from langchain.document_loaders import PyPDFLoader
28+
29+
loader = PyPDFLoader(file)
30+
docs = loader.load_and_split()
31+
return docs
32+
33+
34+
def read_text_from_file(file, save_file_name):
35+
import docx2txt
36+
from langchain.text_splitter import CharacterTextSplitter
37+
38+
# read text file
39+
if file.headers["content-type"] == "text/plain":
40+
file.file.seek(0)
41+
content = file.file.read().decode("utf-8")
42+
# Split text
43+
text_splitter = CharacterTextSplitter()
44+
texts = text_splitter.split_text(content)
45+
# Create multiple documents
46+
file_content = texts
47+
# read pdf file
48+
elif file.headers["content-type"] == "application/pdf":
49+
documents = read_pdf(save_file_name)
50+
file_content = [doc.page_content for doc in documents]
51+
# read docx file
52+
elif (
53+
file.headers["content-type"] == "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
54+
or file.headers["content-type"] == "application/octet-stream"
55+
):
56+
file_content = docx2txt.process(save_file_name)
57+
58+
return file_content
59+
60+
61+
class FaqGenService:
2762
def __init__(self, host="0.0.0.0", port=8000):
2863
self.host = host
2964
self.port = port
3065
self.megaservice = ServiceOrchestrator()
66+
self.endpoint = str(MegaServiceEndpoint.FAQ_GEN)
3167

3268
def add_remote_service(self):
3369
llm = MicroService(
@@ -61,9 +97,9 @@ async def handle_request(self, request: Request, files: List[UploadFile] = File(
6197
file_summaries.append(docs)
6298

6399
if file_summaries:
64-
prompt = self._handle_message(chat_request.messages) + "\n".join(file_summaries)
100+
prompt = handle_message(chat_request.messages) + "\n".join(file_summaries)
65101
else:
66-
prompt = self._handle_message(chat_request.messages)
102+
prompt = handle_message(chat_request.messages)
67103

68104
parameters = LLMParams(
69105
max_tokens=chat_request.max_tokens if chat_request.max_tokens else 1024,
@@ -101,14 +137,17 @@ async def handle_request(self, request: Request, files: List[UploadFile] = File(
101137
return ChatCompletionResponse(model="faqgen", choices=choices, usage=usage)
102138

103139
def start(self):
104-
super().__init__(
105-
megaservice=self.megaservice,
140+
self.service = MicroService(
141+
self.__class__.__name__,
142+
service_role=ServiceRoleType.MEGASERVICE,
106143
host=self.host,
107144
port=self.port,
108-
endpoint=str(MegaServiceEndpoint.FAQ_GEN),
145+
endpoint=self.endpoint,
109146
input_datatype=ChatCompletionRequest,
110147
output_datatype=ChatCompletionResponse,
111148
)
149+
self.service.add_route(self.endpoint, self.handle_request, methods=["POST"])
150+
self.service.start()
112151

113152

114153
if __name__ == "__main__":

0 commit comments

Comments
 (0)