Skip to content

Commit

Permalink
ADDED
Browse files Browse the repository at this point in the history
: gemini flash in some route
  • Loading branch information
AquibPy committed May 31, 2024
1 parent c3f6378 commit 73ff0c9
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 4 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
__pycache__/
venv/
test_venv/
.env
.pytest_cache/
.vscode
Expand Down
2 changes: 1 addition & 1 deletion api.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ async def youtube_video_transcribe_summarizer_gemini(url: str = Form(...)):
print("Retrieving response from Redis cache")
return ResponseText(response=cached_response.decode("utf-8"))

model = genai.GenerativeModel(settings.GEMINI_PRO)
model = genai.GenerativeModel(settings.GEMINI_FLASH)
transcript_text = extract_transcript_details(url)
response = model.generate_content(settings.youtube_transcribe_prompt + transcript_text)
redis.set(cache_key, response.text, ex=60)
Expand Down
6 changes: 3 additions & 3 deletions helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def get_qa_chain():
return chain

def get_url_doc_qa(url,doc):
llm = GoogleGenerativeAI(model= settings.GEMINI_PRO, google_api_key=os.getenv("GOOGLE_API_KEY"),temperature=0.3)
llm = GoogleGenerativeAI(model= settings.GEMINI_FLASH, google_api_key=os.getenv("GOOGLE_API_KEY"),temperature=0.3)
if url:
loader = WebBaseLoader(url)
data = loader.load()
Expand Down Expand Up @@ -135,7 +135,7 @@ def get_gemini_pdf(pdf):
text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)
chunks = text_splitter.split_text(text)
vector_store = FAISS.from_texts(chunks, embedding=google_embedding)
llm = GoogleGenerativeAI(model= settings.GEMINI_PRO, google_api_key=os.getenv("GOOGLE_API_KEY"),temperature=0.7)
llm = GoogleGenerativeAI(model= settings.GEMINI_FLASH, google_api_key=os.getenv("GOOGLE_API_KEY"),temperature=0.7)
retriever = vector_store.as_retriever(score_threshold=0.7)
PROMPT = PromptTemplate(
template=settings.prompt_pdf, input_variables=["context", "question"]
Expand Down Expand Up @@ -188,7 +188,7 @@ def questions_generator(doc):
# splitter_ans_gen = TokenTextSplitter(chunk_size = 1000,chunk_overlap = 100)
# document_answer_gen = splitter_ans_gen.split_documents(document_ques_gen)

llm_ques_gen_pipeline = ChatGoogleGenerativeAI(model= settings.GEMINI_PRO,google_api_key=os.getenv("GOOGLE_API_KEY"),temperature=0.3)
llm_ques_gen_pipeline = ChatGoogleGenerativeAI(model= settings.GEMINI_FLASH,google_api_key=os.getenv("GOOGLE_API_KEY"),temperature=0.3)
PROMPT_QUESTIONS = PromptTemplate(template=settings.question_prompt_template, input_variables=["text"])
REFINE_PROMPT_QUESTIONS = PromptTemplate(input_variables=["existing_answer", "text"],template=settings.question_refine_template)
ques_gen_chain = load_summarize_chain(llm = llm_ques_gen_pipeline,
Expand Down

0 comments on commit 73ff0c9

Please sign in to comment.