Skip to content

Commit

Permalink
ADDED: query db endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
AquibPy committed Jun 6, 2024
1 parent 74729af commit a69b134
Show file tree
Hide file tree
Showing 6 changed files with 119 additions and 8 deletions.
16 changes: 13 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,16 @@ percentage, missing keywords, and profile summary.
- Returns articles and research findings related to the specified tech topic.
- Handles exceptions gracefully and returns error messages in JSON format.

### 22. Query Database

- **Route:** `/query_db`
- **Description:** This API endpoint facilitates querying SQL databases using a Cohere ReAct Agent, integrated with Langchain's SQLDBToolkit.
- **Feature:**
- Upload a `.db` file containing the database to be queried.
- Provide a natural language prompt or query to retrieve relevant information
from the database.
- Utilizes a Cohere ReAct Agent to process user queries and generate responses.

## Usage

Each endpoint accepts specific parameters as described in the respective endpoint documentation. Users can make POST requests to these endpoints with the required parameters to perform the desired tasks.
Expand All @@ -204,7 +214,7 @@ Each endpoint accepts specific parameters as described in the respective endpoin
1. Clone the repository:

```bash
git clone https://github.com/AquibPy/LLM-use-cases-API.git
git clone https://github.com/AquibPy/Genify.git
```

2. Install dependencies:
Expand All @@ -213,15 +223,15 @@ Each endpoint accepts specific parameters as described in the respective endpoin
pip install -r requirements.txt
```

3. Create ```.evn``` file
3. Create ```.env``` file

Save Google and Hugging Face API key in this file.

## Running the Server

Start the FastAPI server by running the following command:
```
uvicorn main:app --reload
fastapi run api.py
```

## Contributing
Expand Down
67 changes: 65 additions & 2 deletions api.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
from mongo import MongoDB
from helper_functions import get_qa_chain,get_gemini_response,get_url_doc_qa,extract_transcript_details,\
get_gemini_response_health,get_gemini_pdf,read_sql_query,remove_substrings,questions_generator,groq_pdf,\
summarize_audio,chatbot_send_message,extraxt_pdf_text,advance_rag_llama_index
summarize_audio,chatbot_send_message,extraxt_pdf_text,advance_rag_llama_index,parse_sql_response
from langchain_groq import ChatGroq
from langchain.chains import ConversationChain
from langchain.chains.conversation.base import ConversationChain
from langchain.chains.conversation.memory import ConversationBufferWindowMemory
from langchain_core.prompts import ChatPromptTemplate
from auth import create_access_token
Expand All @@ -30,6 +30,14 @@
from sendgrid.helpers.mail import Mail
from uuid import uuid4
# from tech_news_agent.crew import run_crew
from langchain.agents import AgentExecutor
from langchain_core.prompts import ChatPromptTemplate
from langchain_cohere.react_multi_hop.agent import create_cohere_react_agent
from langchain_cohere.chat_models import ChatCohere
from langchain_community.utilities.sql_database import SQLDatabase
from langchain_community.agent_toolkits import SQLDatabaseToolkit
import tempfile
import shutil


os.environ["LANGCHAIN_TRACING_V2"]="true"
Expand Down Expand Up @@ -654,6 +662,61 @@ async def get_data(endpoint_name: str, token: str = Depends(oauth2_scheme)):
# return ResponseText(response=output)
# except Exception as e:
# return {"error": str(e)}

@app.post("/query_db",description="""
The Query Database endpoint provides a service for interacting with SQL databases using a Cohere ReAct Agent.
It leverages Langchain's existing SQLDBToolkit to answer questions and perform queries over SQL database.
""")
async def query_db(database: UploadFile = File(...), prompt: str = Form(...)):
try:
with tempfile.NamedTemporaryFile(delete=False, suffix='.' + database.filename.split('.')[-1]) as temp_file:
shutil.copyfileobj(database.file, temp_file)
db_path = temp_file.name

llm = ChatCohere(model="command-r-plus", temperature=0.1, verbose=True,cohere_api_key=os.getenv("COHERE_API_KEY"))
db = SQLDatabase.from_uri(f"sqlite:///{db_path}")
toolkit = SQLDatabaseToolkit(db=db, llm=llm)
context = toolkit.get_context()
tools = toolkit.get_tools()
chat_prompt = ChatPromptTemplate.from_template("{input}")

agent = create_cohere_react_agent(
llm=llm,
tools=tools,
prompt=chat_prompt
)
agent_executor = AgentExecutor(
agent=agent,
tools=tools,
verbose=True,
return_intermediate_steps=False,
)

preamble = settings.QUERY_DB_PROMPT.format(schema_info=context)

out = agent_executor.invoke({
"input": prompt,
"preamble": preamble
})

output = parse_sql_response(out["output"])
db = MongoDB()
payload = {
"endpoint": "/query_db",
"input": prompt,
"output": output
}
mongo_data = {"Document": payload}
result = db.insert_data(mongo_data)
print(result)

return ResponseText(response=output)

except Exception as e:
raise Exception(f"Error handling uploaded file: {e}")

finally:
database.file.close()

if __name__ == '__main__':
import uvicorn
Expand Down
Binary file added data/Chinook.db
Binary file not shown.
21 changes: 20 additions & 1 deletion helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,26 @@ def advance_rag_llama_index(pdf,model,question):
os.remove(tmp_path)

return str(response)

import re
def parse_sql_response(response):
# Split the response into individual SQL statements
sql_statements = re.split(r"(?<=\*\/)\n\n+", response)

# Format each SQL statement
formatted_sql_statements = []
for sql_statement in sql_statements:
if sql_statement.strip():
# Remove comments
sql_statement = re.sub(r'/\*.*?\*/', '', sql_statement, flags=re.DOTALL)
# Replace newlines and tabs with spaces
sql_statement = sql_statement.replace('\n', ' ').replace('\t', ' ')
# Add a newline after each semicolon
sql_statement = re.sub(r';(?!\s*CREATE|INSERT|SELECT|UPDATE|DELETE)', ';\n', sql_statement)
formatted_sql_statements.append(sql_statement.strip())

# Join the formatted SQL statements into a single string
formatted_response = '\n'.join(formatted_sql_statements)
return formatted_response

if __name__ == "__main__":
create_vector_db()
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,5 @@ llama-index-embeddings-langchain
grpcio
# crewai
# crewai-tools
proto-plus
proto-plus
langchain-cohere
20 changes: 19 additions & 1 deletion settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,22 @@
navigating issues of climate change, political polarization, and the ethical implications of artificial intelligence.
As we reflect on the journey of humanity, from ancient civilizations to the digital age, we are reminded of our shared past and
the collective responsibility to shape a more equitable and sustainable future.
"""
"""

QUERY_DB_PROMPT = """## Task And Context
You use your advanced complex reasoning capabilities to help people by answering their questions and other requests interactively.
You will be asked a very wide array of requests on all kinds of topics. You will be equipped with a wide range of search engines or similar tools to help you,
which you use to research your answer. You may need to use multiple tools in parallel or sequentially to complete your task.
You should focus on serving the user's needs as best you can, which will be wide-ranging.
## Style Guide
Unless the user asks for a different style of answer, you should answer in full sentences, using proper grammar and spelling.
## Additional Information
You are an expert who answers the user's question by creating SQL queries and executing them.
You are equipped with a number of relevant SQL tools.
You should also present the SQL query used to provide the data.
Here is information about the database:
{schema_info}
"""

0 comments on commit a69b134

Please sign in to comment.