Skip to content

Commit

Permalink
ADDED: ChatBot using groq
Browse files Browse the repository at this point in the history
  • Loading branch information
AquibPy committed Mar 20, 2024
1 parent 384888e commit c92fa5b
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 3 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,13 @@ Generative AI, powered by advanced machine learning models, enables the creation
- **Route:** `/questions_generator`
- **Description:** The endpoint uses the pdf and generate the questions.It will be helpful for the students or teachers preparing for their exams or test.

### 10. ChatBot Using Groq

- **Route:** `/chat_groq`
- **Description:** This route utilizes Groq for enhanced language processing speed, with a default model input of mixtral-8x7b-32768, but offering alternatives like llama2-70b-4096 and gemma-7b-it, and a conversational memory length option of 1 to 10, which maintains a list of recent interactions in the conversation, considering only the latest K interactions.



## Usage

Each endpoint accepts specific parameters as described in the respective endpoint documentation. Users can make POST requests to these endpoints with the required parameters to perform the desired tasks.
Expand Down
31 changes: 29 additions & 2 deletions api.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
import os
from fastapi import FastAPI,Form,File,UploadFile
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse,RedirectResponse
from typing import List
from typing import List,Optional
from pydantic import BaseModel
import google.generativeai as genai
from fastapi.middleware.cors import CORSMiddleware
from settings import invoice_prompt,youtube_transcribe_prompt,text2sql_prompt,EMPLOYEE_DB
from mongo import MongoDB
from helper_functions import get_qa_chain,get_gemini_response,get_url_doc_qa,extract_transcript_details,\
get_gemini_response_health,get_gemini_pdf,read_sql_query,remove_substrings,questions_generator
from langchain_groq import ChatGroq
from langchain.chains import ConversationChain
from langchain.chains.conversation.memory import ConversationBufferWindowMemory

app = FastAPI(title="Genify By Mohd Aquib",
summary="This API contains routes of different Gen AI usecases")
Expand Down Expand Up @@ -227,4 +231,27 @@ async def pdf_questions_generator(pdf: UploadFile = File(...)):
print(result)
return ResponseText(response=remove_substrings(out))
except Exception as e:
return ResponseText(response=f"Error: {str(e)}")
return ResponseText(response=f"Error: {str(e)}")

@app.post("/chat_groq", description= """This route uses groq for faster response using Language Processing Unit(LPU).
\n In model input default is mixtral-8x7b-32768 but you can choose llama2-70b-4096 and gemma-7b-it.
\n conversational_memory_length ranges from 1 to 10. It keeps a list of the interactions of the conversation over time.
It only uses the last K interactions """)
async def groq_chatbot(question: str = Form(...), model: Optional[str] = Form('mixtral-8x7b-32768'),
conversational_memory_length: Optional[int] = Form(5)):

memory=ConversationBufferWindowMemory(k=conversational_memory_length)
groq_chat = ChatGroq(groq_api_key= os.environ['GROQ_API_KEY'], model_name=model)
conversation = ConversationChain(llm=groq_chat,memory=memory)

response = conversation.invoke(question)
db = MongoDB()
payload = {
"endpoint" : "/chat_groq",
"model" : model,
"conversational_memory_length": conversational_memory_length,
"output" : response['response']
}
mongo_data = {"Document": payload}
result = db.insert_data(mongo_data)
return {"Chatbot": response['response']}
4 changes: 3 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,6 @@ youtube_transcript_api
llama-index
pypdf
pymongo
pytest
pytest
groq
langchain-groq

0 comments on commit c92fa5b

Please sign in to comment.