-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
44 lines (32 loc) · 1.2 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
# import Anthropic's LLM as the brain of our app
from langchain_anthropic import ChatAnthropic
from langchain_community.document_loaders import PyPDFLoader
chat_model_Anthropic = ChatAnthropic(
model="claude-3-sonnet-20240229",
temperature=0,
api_key="place_your_anthropic_api_key_here"
)
# External data
# file = open('source.txt', 'r', encoding='utf-8')
# SOURCE = file.read()
pdf_path = 'path_of_your_pdf'
loader = PyPDFLoader(file_path=pdf_path)
pages = loader.load_and_split()
documents = loader.load()
# Instruction for LLM to generate the response
from langchain_core.prompts import ChatPromptTemplate
rag_prompt = ChatPromptTemplate.from_messages([
("system", 'You are a helpful assistant. Use the following context when responding:\n\n{context}.'),
("human", "{question}")
])
# Organize LLM's response into structured output
from langchain_core.output_parsers import StrOutputParser
output_parser = StrOutputParser()
rag_chain = rag_prompt | chat_model_Anthropic | StrOutputParser()
user_input = str(input("enter question: "))
# Initiat the App
response = rag_chain.invoke({
"question": user_input,
"context": documents
})
print(response)