1
+ from langchain .embeddings import SentenceTransformerEmbeddings #HuggingFaceInstructEmbeddings
2
+ from langchain .vectorstores import FAISS
3
+ import os
4
+ import copy
5
+ import pprint
6
+ #import google.generativeai as palm
7
+ from langchain .llms import GooglePalm
8
+ from langchain import PromptTemplate
9
+ from langchain .chains import RetrievalQA
10
+ import streamlit as st
11
+ import warnings
12
+ warnings .filterwarnings ("ignore" )
13
+
14
+
15
+ @st .cache_resource
16
+ def getapi ():
17
+ return str (open ("API.txt" ,"r" ,encoding = 'utf-8' ).read ())
18
+
19
+
20
+ PALM_API = getapi ()
21
+ #palm.configure(api_key=PALM_API)
22
+
23
+
24
+ @st .cache_resource
25
+ def getmodel ():
26
+ "test"
27
+ embeddings = SentenceTransformerEmbeddings (model_name = "all-MiniLM-L6-v2" )
28
+ db = FAISS .load_local ("faiss" , embeddings )
29
+ retriever = db .as_retriever (search_kwargs = {'k' : 10 })
30
+ #prompt=getprompt()
31
+ llm = GooglePalm (google_api_key = PALM_API ,temperature = 0 ,max_output_tokens = 512 )
32
+ qa_llm = RetrievalQA .from_chain_type (llm = llm ,
33
+ chain_type = 'refine' ,
34
+ retriever = retriever ,
35
+ return_source_documents = True ,
36
+ #chain_type_kwargs={'prompt': prompt},
37
+ verbose = True )
38
+ return qa_llm
39
+
40
+ @st .cache_resource
41
+ def getprompt ():
42
+ template = """Use the information to elaborate in points about the user's query.
43
+ If user mentions something not in the 'Context', just answer that you don't know.
44
+ If you don't know the answer, just say that you don't know, don't try to make up an answer.
45
+
46
+ Context: {context}
47
+
48
+ Query: {question}
49
+
50
+ Only return the helpful answer below and nothing else.
51
+
52
+ Helpful answer:
53
+ """
54
+ prompt = PromptTemplate (
55
+ template = template ,
56
+ input_variables = ['context' , 'question' ])
57
+ return prompt
58
+
59
+ def parseresult (result ):
60
+
61
+ PARSED = copy .deepcopy (result )
62
+ docs = PARSED ['source_documents' ]
63
+ sourcepage = []
64
+ for d in docs :
65
+ sourcepage .append (d .metadata ['page' ])
66
+ PARSED ['source_pages' ]= copy .deepcopy (sourcepage )
67
+ del sourcepage ,result
68
+ return PARSED
69
+
70
+ def getsources (result ):
71
+ sources = []
72
+ for s in result ['source_documents' ]:
73
+ sources .append (f"{ s .metadata } " )
74
+ return sources
75
+
76
+ st .title ('Query Docs' )
77
+
78
+ prompt = st .sidebar .text_input ("Enter query" )
79
+ try :
80
+ llm = getmodel ()
81
+ except :
82
+ st .write ("CANNOT LOAD MODEL OR DATABASE" )
83
+ #print("ERROR LOADING MODEL OR DATABASE")
84
+
85
+ if prompt :
86
+ if prompt .find ("exit" )== 0 :
87
+ import sys
88
+ sys .exit ()
89
+ try :
90
+ result = parseresult (llm (prompt ))
91
+ sources = getsources (result )
92
+ result = result ["result" ]
93
+ except :
94
+ result = "Error in retrieving! \n You can try reframing your query, if it doesnt work there may be something broken. \n :/ "
95
+ sources = []
96
+
97
+ print (">>>>>>>>>>>>><<<<<<<<<<<<<<<<<" )
98
+ st .header ("Result" )
99
+ st .write (result )
100
+ st .header ("Sources" )
101
+ st .write (sources )
0 commit comments