Skip to content

Commit cd0d705

Browse files
authored
Merge pull request #2 from NicholasGoh/feat/fastapi-boilerplate
Feat/fastapi boilerplate
2 parents d99a439 + 5db609d commit cd0d705

File tree

22 files changed

+1134
-536
lines changed

22 files changed

+1134
-536
lines changed

.devcontainer/devcontainer.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
"dockerComposeFile": [
77
"../compose-dev.yaml"
88
],
9-
"service": "backend",
9+
"service": "api",
1010
"workspaceFolder": "/app",
1111
"customizations": {
1212
"vscode": {
@@ -39,6 +39,7 @@
3939
"editor.formatOnSave": true,
4040
"files.insertFinalNewline": true,
4141
"files.trimFinalNewlines": true,
42+
"python.venvPath": "/app/.venv/bin/python",
4243
"[python]": {
4344
"editor.formatOnSave": true,
4445
"editor.codeActionsOnSave": {

backend/Dockerfile

Lines changed: 0 additions & 6 deletions
This file was deleted.

backend/api/.vscode/launch.json

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
{
2+
// Use IntelliSense to learn about possible attributes.
3+
// Hover to view descriptions of existing attributes.
4+
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5+
"version": "0.2.0",
6+
"configurations": [
7+
{
8+
"name": "Python Debugger: FastAPI",
9+
"type": "debugpy",
10+
"request": "launch",
11+
"module": "fastapi",
12+
"args": [
13+
"run",
14+
"api/main.py",
15+
"--reload"
16+
]
17+
}
18+
]
19+
}

backend/api/Dockerfile

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
FROM ghcr.io/astral-sh/uv:python3.13-bookworm
2+
3+
WORKDIR /app
4+
COPY ./backend/api/uv.lock ./backend/api/pyproject.toml ./
5+
RUN uv sync --frozen
6+
RUN apt-get update && apt-get install -y --no-install-recommends curl
7+
COPY ./envs/backend.env /opt/.env
8+
COPY ./backend/api /app/api
9+
COPY ./backend/shared_mcp /app/shared_mcp
10+
ENV PYTHONPATH /app:$PYTHONPATH
11+
ENTRYPOINT ["uv", "run", "fastapi", "run", "api/main.py"]

backend/api/config.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
from pydantic_settings import BaseSettings, SettingsConfigDict
2+
3+
4+
class Settings(BaseSettings):
5+
model_config = SettingsConfigDict(
6+
env_file="/opt/.env",
7+
env_ignore_empty=True,
8+
extra="ignore",
9+
)
10+
11+
model: str = "gpt-4o-mini-2024-07-18"
12+
openai_api_key: str = ""
13+
mcp_server_port: int = 8050
14+
15+
pg_url: str = "postgres://postgres"
16+
pg_user: str = "postgres"
17+
pg_pass: str = "postgres"
18+
19+
20+
settings = Settings()

backend/api/dependencies.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
from typing import Annotated, Iterable
2+
3+
from config import settings
4+
from fastapi import Depends
5+
from langchain_openai import ChatOpenAI
6+
7+
8+
def llm_factory() -> ChatOpenAI:
9+
llm = ChatOpenAI(
10+
streaming=True,
11+
model=settings.model,
12+
temperature=0,
13+
api_key=settings.openai_api_key,
14+
stream_usage=True,
15+
)
16+
return llm
17+
18+
19+
def get_llm_session() -> Iterable[ChatOpenAI]:
20+
yield llm_factory()
21+
22+
23+
LLMDep = Annotated[ChatOpenAI, Depends(get_llm_session)]

backend/api/main.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
from fastapi import FastAPI
2+
3+
from api.routers import llms, mcps
4+
5+
app = FastAPI(swagger_ui_parameters={"tryItOutEnabled": True})
6+
app.include_router(llms.router, prefix="/v1")
7+
app.include_router(mcps.router, prefix="/v1")

backend/pyproject.toml renamed to backend/api/pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
[project]
2-
name = "app"
2+
name = "api"
33
version = "0.1.0"
44
description = "Add your description here"
55
readme = "README.md"
@@ -14,6 +14,7 @@ dependencies = [
1414
"langchain-postgres==0.0.12",
1515
"langfuse==2.60.2",
1616
"langgraph==0.2.39",
17+
"mcp[cli]>=1.6.0",
1718
"prometheus-client==0.21.1",
1819
"psycopg[binary]==3.2.3",
1920
"pydantic-settings==2.6.0",

backend/api/routers/llms.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
from typing import AsyncGenerator
2+
3+
from fastapi import APIRouter
4+
from sse_starlette.sse import EventSourceResponse
5+
from starlette.responses import Response
6+
7+
from api.dependencies import LLMDep
8+
9+
router = APIRouter(tags=["chat"])
10+
11+
12+
async def stream(
13+
query: str, llm: LLMDep
14+
) -> AsyncGenerator[dict[str, str], None]:
15+
async for chunk in llm.astream_events(query):
16+
yield dict(data=chunk)
17+
18+
19+
@router.get("/chat/completions")
20+
async def completions(query: str, llm: LLMDep) -> Response:
21+
"""Stream completions via Server Sent Events"""
22+
return EventSourceResponse(stream(query, llm))

backend/api/routers/mcps.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
from contextlib import asynccontextmanager
2+
from typing import Iterable
3+
4+
from config import settings
5+
from fastapi import APIRouter
6+
from mcp import ClientSession, types
7+
from mcp.client.sse import sse_client
8+
9+
from shared_mcp.models import ToolRequest
10+
11+
router = APIRouter(prefix="/mcps", tags=["mcps"])
12+
13+
14+
@asynccontextmanager
15+
async def mcp_sse_client():
16+
async with sse_client(f"http://mcp:{settings.mcp_server_port}/sse") as (
17+
read_stream,
18+
write_stream,
19+
):
20+
async with ClientSession(read_stream, write_stream) as session:
21+
await session.initialize()
22+
yield session
23+
24+
25+
@router.get("/list-tools")
26+
async def list_tools() -> Iterable[types.Tool]:
27+
"""
28+
Lists tools available from MCP server
29+
30+
This endpoint establishes a Server-Sent Events connection with the client
31+
and forwards communication to the Model Context Protocol server.
32+
"""
33+
async with mcp_sse_client() as session:
34+
response = await session.list_tools()
35+
return response.tools
36+
37+
38+
@router.post("/call-tool")
39+
async def call_tool(request: ToolRequest) -> str:
40+
"""
41+
Calls tool available from MCP server
42+
43+
This endpoint establishes a Server-Sent Events connection with the client
44+
and forwards communication to the Model Context Protocol server.
45+
"""
46+
async with mcp_sse_client() as session:
47+
response = await session.call_tool(
48+
request.tool_name,
49+
arguments=request.model_dump(exclude=["tool_name"]),
50+
)
51+
return response.content[0].text

0 commit comments

Comments
 (0)