From 6e8be6afc5c87b4635fd7d31dc9948f5e7a2b688 Mon Sep 17 00:00:00 2001 From: AquibPy Date: Fri, 29 Mar 2024 03:02:22 +0530 Subject: [PATCH] MODIFIED: changes in RAG_PDF_Groq route --- README.md | 3 +++ api.py | 8 +++++--- helper_functions.py | 4 ++-- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index d651fe4..16366c5 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,10 @@ Generative AI, powered by advanced machine learning models, enables the creation - **Route:** `/text_summarizer_groq` - **Description:** Dive into a realm of creativity with our text summarization endpoint, where the model mixtral-8x7b-32768 crafts concise summaries from your input text, delivering insights at the speed of thought. +### 12. RAG Using Groq +- **Route:** `/RAG_PDF_Groq` +- **Description:** This endpoint uses the pdf and give the answer based on the prompt provided using Groq,with a default model input of llama2-70b-4096, but offering alternatives like mixtral-8x7b-32768 and gemma-7b-it. ## Usage diff --git a/api.py b/api.py index 1e924b9..6cb4584 100644 --- a/api.py +++ b/api.py @@ -289,10 +289,12 @@ async def groq_text_summary(input_text: str = Form(...)): except Exception as e: return ResponseText(response=f"Error: {str(e)}") -@app.post("/RAG_PDF_Groq",description="The endpoint uses the pdf and give the answer based on the prompt provided using groq") -async def talk_pd_groq(pdf: UploadFile = File(...),prompt: str = Form(...)): +@app.post("/RAG_PDF_Groq",description="The endpoint uses the pdf and give the answer based on the prompt provided using groq\ + In model input default is mixtral-8x7b-32768 but you can choose llama2-70b-4096 and gemma-7b-it.") +async def talk_pd_groq(pdf: UploadFile = File(...),prompt: str = Form(...), + model: Optional[str] = Form('llama2-70b-4096')): try: - rag_chain = groq_pdf(pdf.file) + rag_chain = groq_pdf(pdf.file,model) out = rag_chain.invoke(prompt) db = MongoDB() payload = { diff --git a/helper_functions.py b/helper_functions.py index 43b960c..a579118 100644 --- a/helper_functions.py +++ b/helper_functions.py @@ -188,10 +188,10 @@ def questions_generator(doc): ques = ques_gen_chain.run(document_ques_gen) return ques -def groq_pdf(pdf): +def groq_pdf(pdf,model): llm = ChatGroq( api_key=os.environ['GROQ_API_KEY'], - model_name='mixtral-8x7b-32768' + model_name=model ) text = "".join(page.extract_text() for page in PdfReader(pdf).pages) text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)