diff --git a/backend/app/api/v1/__init__.py b/backend/app/api/v1/__init__.py
index f37adeb..c94d15e 100644
--- a/backend/app/api/v1/__init__.py
+++ b/backend/app/api/v1/__init__.py
@@ -1,7 +1,7 @@
"""Versioned API router."""
from fastapi import APIRouter
from .arxiv import router as arxiv_router
-from app.api.v1.endpoints import health, papers, users, auth, academic, conversations
+from app.api.v1.endpoints import health, papers, users, auth, academic, conversations, library
api_router = APIRouter()
api_router.include_router(health.router, prefix="/health", tags=["health"])
@@ -11,3 +11,4 @@
api_router.include_router(academic.router, prefix="/academic", tags=["academic"])
api_router.include_router(arxiv_router,prefix="/arxiv",tags=["arxiv"])
api_router.include_router(conversations.router, prefix="/conversations", tags=["conversations"])
+api_router.include_router(library.router, prefix="/library", tags=["library"])
diff --git a/backend/app/api/v1/api.py b/backend/app/api/v1/api.py
index 4e4cb5e..1bd2f41 100644
--- a/backend/app/api/v1/api.py
+++ b/backend/app/api/v1/api.py
@@ -1,7 +1,7 @@
"""API v1 router aggregation."""
from fastapi import APIRouter
-from app.api.v1.endpoints import academic, auth, conversations, papers, users
+from app.api.v1.endpoints import academic, auth, conversations, library, papers, users
# 创建主路由器
api_router = APIRouter()
@@ -25,6 +25,12 @@
tags=["论文检索"]
)
+api_router.include_router(
+ library.router,
+ prefix="/library",
+ tags=["我的文库"]
+)
+
api_router.include_router(
academic.router,
prefix="/academic",
@@ -35,4 +41,4 @@
conversations.router,
prefix="/conversations",
tags=["对话历史"]
-)
\ No newline at end of file
+)
diff --git a/backend/app/api/v1/endpoints/auth.py b/backend/app/api/v1/endpoints/auth.py
index 029b84b..1e0b886 100644
--- a/backend/app/api/v1/endpoints/auth.py
+++ b/backend/app/api/v1/endpoints/auth.py
@@ -1,16 +1,33 @@
"""认证授权相关API接口"""
-from fastapi import APIRouter, Depends, HTTPException, status
+from __future__ import annotations
+
+import json
+import secrets
+from typing import cast
+from urllib.parse import urlencode
+
+import httpx
+from fastapi import APIRouter, Depends, HTTPException, Request, status
+from fastapi.responses import RedirectResponse
from fastapi.security import OAuth2PasswordRequestForm
from sqlalchemy.ext.asyncio import AsyncSession
-from typing import cast
from app.core.auth import create_access_token
-from app.core.security import verify_password
+from app.core.config import get_settings
+from app.core.security import hash_password, verify_password
from app.db import UserRepository
from app.db.session import get_db
from app.schemas.auth import Token # type: ignore[import-not-found]
router = APIRouter()
+settings = get_settings()
+
+GITHUB_AUTHORIZE_URL = "https://github.com/login/oauth/authorize"
+GITHUB_TOKEN_URL = "https://github.com/login/oauth/access_token"
+GITHUB_USER_API = "https://api.github.com/user"
+GITHUB_EMAILS_API = "https://api.github.com/user/emails"
+GITHUB_STATE_COOKIE = "github_oauth_state"
+GITHUB_STATE_TTL = 600
@router.post(
@@ -99,4 +116,189 @@ async def login_for_access_token(
)
token = create_access_token(subject=cast(str, getattr(user, "email", "")))
- return Token(access_token=token)
\ No newline at end of file
+ return Token(access_token=token)
+
+
+@router.get("/github/login")
+async def github_login(request: Request, next: str | None = None):
+ client_id = settings.github_client_id
+ client_secret = settings.github_client_secret
+ if not client_id or not client_secret:
+ raise HTTPException(status_code=status.HTTP_503_SERVICE_UNAVAILABLE, detail="GitHub OAuth 未配置")
+
+ nonce = secrets.token_urlsafe(32)
+ cookie_payload = json.dumps(
+ {
+ "nonce": nonce,
+ "next": _sanitize_next_path(next),
+ }
+ )
+
+ callback_url = str(request.url_for("github_callback"))
+ params = {
+ "client_id": client_id,
+ "redirect_uri": callback_url,
+ "scope": "read:user user:email",
+ "state": nonce,
+ "allow_signup": "true",
+ }
+ authorize_url = f"{GITHUB_AUTHORIZE_URL}?{urlencode(params)}"
+ response = RedirectResponse(authorize_url, status_code=status.HTTP_302_FOUND)
+ response.set_cookie(
+ GITHUB_STATE_COOKIE,
+ cookie_payload,
+ max_age=GITHUB_STATE_TTL,
+ httponly=True,
+ secure=_is_cookie_secure(),
+ samesite="lax",
+ )
+ return response
+
+
+@router.get("/github/callback", name="github_callback")
+async def github_callback(
+ request: Request,
+ code: str | None = None,
+ state: str | None = None,
+ db: AsyncSession = Depends(get_db),
+):
+ cookie_payload = request.cookies.get(GITHUB_STATE_COOKIE)
+ if not cookie_payload:
+ return _oauth_error_redirect("Missing OAuth state cookie.")
+
+ try:
+ payload = json.loads(cookie_payload)
+ except json.JSONDecodeError:
+ return _oauth_error_redirect("Invalid OAuth state payload.")
+
+ expected_state = payload.get("nonce")
+ next_path = _sanitize_next_path(payload.get("next"))
+ if not code or not state or expected_state != state:
+ return _oauth_error_redirect("OAuth state mismatch.", next_path=next_path)
+
+ try:
+ callback_url = str(request.url_for("github_callback"))
+ token_data = await _exchange_github_code_for_token(code, callback_url)
+ access_token = token_data.get("access_token")
+ if not access_token:
+ raise RuntimeError("GitHub did not return an access token.")
+
+ github_user, primary_email = await _fetch_github_profile(access_token)
+ if not primary_email:
+ raise RuntimeError("未能获取 GitHub 邮箱,请在 GitHub 账户开启公开邮箱或授权 email scope。")
+
+ github_user_id = str(github_user.get("id"))
+ repo = UserRepository(db)
+ user = await repo.get_by_oauth_account("github", github_user_id)
+
+ if user is None:
+ user = await repo.get_by_email(primary_email)
+ if user:
+ user = await repo.update(
+ user,
+ {
+ "oauth_provider": "github",
+ "oauth_account_id": github_user_id,
+ "avatar_url": user.avatar_url or github_user.get("avatar_url"),
+ },
+ )
+ else:
+ hashed_password = hash_password(secrets.token_urlsafe(32))
+ user = await repo.create(
+ email=primary_email,
+ hashed_password=hashed_password,
+ full_name=github_user.get("name") or github_user.get("login"),
+ avatar_url=github_user.get("avatar_url"),
+ oauth_provider="github",
+ oauth_account_id=github_user_id,
+ )
+ else:
+ updates: dict[str, str | None] = {}
+ if not user.avatar_url and github_user.get("avatar_url"):
+ updates["avatar_url"] = github_user.get("avatar_url")
+ updates["oauth_provider"] = "github"
+ updates["oauth_account_id"] = github_user_id
+ user = await repo.update(user, updates)
+
+ await db.commit()
+ await db.refresh(user)
+
+ token = create_access_token(subject=cast(str, getattr(user, "email", "")))
+ redirect_target = _build_frontend_redirect(token=token, next_path=next_path)
+ response = RedirectResponse(redirect_target, status_code=status.HTTP_302_FOUND)
+ response.delete_cookie(GITHUB_STATE_COOKIE)
+ return response
+ except Exception as exc: # pragma: no cover - defensive
+ return _oauth_error_redirect(str(exc), next_path=next_path)
+
+
+def _sanitize_next_path(value: str | None) -> str:
+ if not value or not value.startswith("/"):
+ return "/"
+ return value
+
+
+def _is_cookie_secure() -> bool:
+ return settings.environment not in {"local", "development"}
+
+
+async def _exchange_github_code_for_token(code: str, redirect_uri: str) -> dict[str, str]:
+ client_id = settings.github_client_id
+ client_secret = settings.github_client_secret
+ if not client_id or not client_secret:
+ raise RuntimeError("GitHub OAuth 未配置。")
+
+ data = {
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "code": code,
+ "redirect_uri": redirect_uri,
+ }
+
+ async with httpx.AsyncClient(timeout=15.0, headers={"Accept": "application/json"}) as client:
+ response = await client.post(GITHUB_TOKEN_URL, data=data)
+ response.raise_for_status()
+ return response.json()
+
+
+async def _fetch_github_profile(access_token: str) -> tuple[dict[str, str], str | None]:
+ headers = {
+ "Accept": "application/json",
+ "Authorization": f"Bearer {access_token}",
+ }
+ async with httpx.AsyncClient(timeout=15.0) as client:
+ user_resp = await client.get(GITHUB_USER_API, headers=headers)
+ user_resp.raise_for_status()
+ user_data = user_resp.json()
+ email = user_data.get("email")
+ if not email:
+ emails_resp = await client.get(GITHUB_EMAILS_API, headers=headers)
+ emails_resp.raise_for_status()
+ emails = emails_resp.json()
+ email = next(
+ (
+ item.get("email")
+ for item in emails
+ if item.get("primary") and item.get("verified")
+ ),
+ None,
+ ) or next((item.get("email") for item in emails if item.get("verified")), None)
+ return user_data, email
+
+
+def _build_frontend_redirect(*, token: str | None, next_path: str) -> str:
+ params = {"next": _sanitize_next_path(next_path)}
+ if token:
+ params["token"] = token
+ params["provider"] = "github"
+ return f"{settings.frontend_oauth_redirect_url}?{urlencode(params)}"
+
+
+def _oauth_error_redirect(message: str, *, next_path: str = "/") -> RedirectResponse:
+ params = {
+ "error": message,
+ "next": _sanitize_next_path(next_path),
+ }
+ response = RedirectResponse(f"{settings.frontend_oauth_redirect_url}?{urlencode(params)}", status_code=status.HTTP_302_FOUND)
+ response.delete_cookie(GITHUB_STATE_COOKIE)
+ return response
diff --git a/backend/app/api/v1/endpoints/library.py b/backend/app/api/v1/endpoints/library.py
new file mode 100644
index 0000000..abcdf27
--- /dev/null
+++ b/backend/app/api/v1/endpoints/library.py
@@ -0,0 +1,369 @@
+"""Endpoints for managing personal library folders."""
+from __future__ import annotations
+
+import asyncio
+import hashlib
+from pathlib import Path
+from typing import Any
+from uuid import uuid4
+
+import httpx
+from fastapi import APIRouter, Depends, HTTPException, Response, status
+from fastapi.responses import FileResponse, StreamingResponse
+from sqlalchemy.exc import IntegrityError
+from sqlalchemy.ext.asyncio import AsyncSession
+
+from app.core.config import get_settings
+from app.core.auth import decode_access_token
+from app.db.repository import LibraryFolderRepository, UploadedPaperRepository, UserRepository
+from app.db.session import get_db
+from app.dependencies.auth import get_current_user, get_current_user_optional
+from app.schemas import library as library_schema
+from app.schemas import upload as upload_schema
+from app.schemas.auth import TokenPayload
+
+settings = get_settings()
+UPLOAD_DIR = settings.media_path / "uploads"
+UPLOAD_DIR.mkdir(parents=True, exist_ok=True)
+
+router = APIRouter()
+
+
+@router.get(
+ "/folders",
+ response_model=library_schema.LibraryFolderListResponse,
+ summary="列出我的文库文件夹",
+)
+async def list_library_folders(
+ current_user=Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> library_schema.LibraryFolderListResponse:
+ folder_repo = LibraryFolderRepository(db)
+ uploaded_repo = UploadedPaperRepository(db)
+ folders = await folder_repo.list_for_user(current_user.id)
+ counts = await uploaded_repo.count_by_folder(current_user.id)
+ folder_items: list[library_schema.LibraryFolderRead] = []
+ for folder in folders:
+ base = library_schema.LibraryFolderRead.model_validate(folder, from_attributes=True)
+ folder_items.append(base.model_copy(update={"paper_count": counts.get(folder.id, 0)}))
+ return library_schema.LibraryFolderListResponse(
+ folders=folder_items,
+ unfiled_count=counts.get(None, 0),
+ )
+
+
+@router.post(
+ "/folders",
+ response_model=library_schema.LibraryFolderRead,
+ status_code=status.HTTP_201_CREATED,
+ summary="新增文库文件夹",
+)
+async def create_library_folder(
+ payload: library_schema.LibraryFolderCreate,
+ current_user=Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> library_schema.LibraryFolderRead:
+ repo = LibraryFolderRepository(db)
+ try:
+ folder = await repo.create(
+ user_id=current_user.id,
+ name=payload.name.strip(),
+ color=payload.color,
+ )
+ await db.commit()
+ except IntegrityError:
+ await db.rollback()
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="文件夹名称已存在")
+
+ await db.refresh(folder)
+ return library_schema.LibraryFolderRead.model_validate(folder, from_attributes=True)
+
+
+@router.patch(
+ "/folders/{folder_id}",
+ response_model=library_schema.LibraryFolderRead,
+ summary="更新文件夹信息",
+)
+async def rename_library_folder(
+ folder_id: int,
+ payload: library_schema.LibraryFolderUpdate,
+ current_user=Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> library_schema.LibraryFolderRead:
+ repo = LibraryFolderRepository(db)
+ folder = await repo.get_for_user(folder_id, current_user.id)
+ if folder is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到该文件夹")
+
+ try:
+ updated = await repo.rename(folder, name=payload.name.strip(), color=payload.color)
+ await db.commit()
+ except IntegrityError:
+ await db.rollback()
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="文件夹名称已存在")
+
+ await db.refresh(updated)
+ uploaded_repo = UploadedPaperRepository(db)
+ counts = await uploaded_repo.count_by_folder(current_user.id)
+ base = library_schema.LibraryFolderRead.model_validate(updated, from_attributes=True)
+ return base.model_copy(update={"paper_count": counts.get(folder_id, 0)})
+
+
+@router.delete(
+ "/folders/{folder_id}",
+ summary="删除文件夹",
+)
+async def delete_library_folder(
+ folder_id: int,
+ current_user=Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> None:
+ repo = LibraryFolderRepository(db)
+ folder = await repo.get_for_user(folder_id, current_user.id)
+ if folder is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到该文件夹")
+
+ uploaded_repo = UploadedPaperRepository(db)
+ total = await uploaded_repo.count_unique_for_user(current_user.id, folder_id=folder_id)
+ if total:
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="请先移动或删除文件夹中的文献")
+
+ await repo.delete(folder)
+ await db.commit()
+ return Response(status_code=status.HTTP_204_NO_CONTENT)
+
+
+@router.patch(
+ "/uploads/{paper_id}/folder",
+ response_model=upload_schema.UploadedPaperRead,
+ summary="更新文献所在文件夹",
+)
+async def assign_uploaded_paper_folder(
+ paper_id: int,
+ payload: library_schema.LibraryFolderAssignment,
+ current_user=Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> upload_schema.UploadedPaperRead:
+ folder_repo = LibraryFolderRepository(db)
+ uploaded_repo = UploadedPaperRepository(db)
+ record = await uploaded_repo.get_for_user(paper_id, current_user.id)
+ if record is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到对应的上传文件")
+
+ target_folder_id: int | None = None
+ if payload.folder_id is not None:
+ folder = await folder_repo.get_for_user(payload.folder_id, current_user.id)
+ if folder is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="目标文件夹不存在")
+ target_folder_id = folder.id
+
+ await uploaded_repo.update_folder(record, target_folder_id)
+ await db.commit()
+ await db.refresh(record)
+ return upload_schema.UploadedPaperRead.model_validate(record, from_attributes=True)
+
+
+@router.get(
+ "/uploads/{paper_id}/download",
+ summary="下载上传的 PDF 原文件",
+)
+async def download_uploaded_paper(
+ paper_id: int,
+ access_token: str | None = None,
+ current_user=Depends(get_current_user_optional),
+ db: AsyncSession = Depends(get_db),
+) -> Any:
+ user = current_user
+ if user is None:
+ if not access_token:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="请先登录")
+ user = await _get_user_from_token(access_token, db)
+
+ uploaded_repo = UploadedPaperRepository(db)
+ record = await uploaded_repo.get_for_user(paper_id, user.id)
+ if record is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到对应的上传文件")
+
+ file_url = record.file_url or ""
+ filename = record.original_filename or f"paper_{paper_id}.pdf"
+
+ if file_url.startswith(("http://", "https://")):
+ try:
+ async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
+ response = await client.get(file_url)
+ response.raise_for_status()
+ content = response.content
+ except httpx.HTTPError as exc:
+ raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail="无法读取远程 PDF") from exc
+
+ headers = {"Content-Disposition": f'inline; filename="{filename}"'}
+ return StreamingResponse(
+ iter([content]),
+ media_type=record.content_type or "application/pdf",
+ headers=headers,
+ )
+
+ local_path = _resolve_local_media_path(file_url)
+ if local_path is None or not local_path.exists():
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="PDF 文件已丢失,请重新上传")
+
+ headers = {"Content-Disposition": f'inline; filename="{filename}"'}
+ return FileResponse(
+ local_path,
+ media_type=record.content_type or "application/pdf",
+ filename=filename,
+ headers=headers,
+ )
+
+
+@router.head(
+ "/uploads/{paper_id}/download",
+ summary="HEAD 检查上传 PDF",
+)
+async def head_uploaded_paper(
+ paper_id: int,
+ access_token: str | None = None,
+ current_user=Depends(get_current_user_optional),
+ db: AsyncSession = Depends(get_db),
+) -> Response:
+ user = current_user
+ if user is None:
+ if not access_token:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="请先登录")
+ user = await _get_user_from_token(access_token, db)
+
+ uploaded_repo = UploadedPaperRepository(db)
+ record = await uploaded_repo.get_for_user(paper_id, user.id)
+ if record is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到对应的上传文件")
+
+ file_url = record.file_url or ""
+ if file_url.startswith(("http://", "https://")):
+ return Response(status_code=status.HTTP_200_OK)
+
+ local_path = _resolve_local_media_path(file_url)
+ if local_path is None or not local_path.exists():
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="PDF 文件已丢失,请重新上传")
+
+ return Response(status_code=status.HTTP_200_OK)
+
+
+@router.post(
+ "/uploads/{paper_id}/ensure-local",
+ response_model=upload_schema.UploadedPaperRead,
+ summary="下载远程 PDF 到本地",
+)
+async def ensure_uploaded_paper_local(
+ paper_id: int,
+ payload: library_schema.LibraryDownloadRequest,
+ current_user=Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> upload_schema.UploadedPaperRead:
+ uploaded_repo = UploadedPaperRepository(db)
+ record = await uploaded_repo.get_for_user(paper_id, current_user.id)
+ if record is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到对应的上传文件")
+
+ candidate_url = payload.pdf_url or record.file_url
+
+ local_path = _resolve_local_media_path(record.file_url or "")
+ if candidate_url is None or candidate_url.startswith("#"):
+ if local_path and local_path.exists():
+ return upload_schema.UploadedPaperRead.model_validate(record, from_attributes=True)
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="缺少可下载的 PDF 链接")
+
+ if not candidate_url.startswith(("http://", "https://")):
+ if local_path and local_path.exists():
+ return upload_schema.UploadedPaperRead.model_validate(record, from_attributes=True)
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="本地 PDF 文件已丢失,请提供远程链接")
+
+ content = await _download_pdf_from_url(candidate_url)
+ stored_filename, file_url, file_size, file_hash = await _save_pdf_bytes(current_user.id, content)
+
+ await uploaded_repo.update_file_fields(
+ record,
+ stored_filename=stored_filename,
+ file_url=file_url,
+ file_size=file_size,
+ file_hash=file_hash,
+ content_type="application/pdf",
+ )
+ await db.commit()
+ await db.refresh(record)
+
+ return upload_schema.UploadedPaperRead.model_validate(record, from_attributes=True)
+
+
+@router.delete(
+ "/uploads/{paper_id}",
+ summary="删除上传的文档",
+)
+async def delete_uploaded_paper(
+ paper_id: int,
+ current_user=Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> Response:
+ uploaded_repo = UploadedPaperRepository(db)
+ record = await uploaded_repo.get_for_user(paper_id, current_user.id)
+ if record is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到对应的上传文件")
+
+ file_url = record.file_url or ""
+ local_path = _resolve_local_media_path(file_url)
+ if local_path is not None and local_path.exists():
+ try:
+ local_path.unlink()
+ except OSError:
+ pass
+
+ await uploaded_repo.delete(record)
+ await db.commit()
+ return Response(status_code=status.HTTP_204_NO_CONTENT)
+
+
+def _resolve_local_media_path(file_url: str) -> Path | None:
+ if not file_url or file_url.startswith(("http://", "https://")):
+ return None
+
+ relative_path = file_url.lstrip("/")
+ if relative_path.startswith("media/"):
+ relative_path = relative_path[len("media/") :]
+ candidate = (settings.media_path / relative_path).resolve()
+ media_root = settings.media_path.resolve()
+ if not str(candidate).startswith(str(media_root)):
+ return None
+ return candidate
+
+
+async def _get_user_from_token(token: str, db: AsyncSession):
+ try:
+ payload_dict = decode_access_token(token)
+ payload = TokenPayload.model_validate(payload_dict)
+ except Exception as exc: # pragma: no cover - defensive
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="无效的访问令牌") from exc
+
+ if not payload.sub:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="无效的访问令牌")
+
+ repo = UserRepository(db)
+ user = await repo.get_by_email(payload.sub)
+ if user is None:
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="用户不存在")
+ return user
+
+
+async def _download_pdf_from_url(url: str) -> bytes:
+ async with httpx.AsyncClient(timeout=30.0, follow_redirects=True) as client:
+ response = await client.get(url)
+ response.raise_for_status()
+ return response.content
+
+
+async def _save_pdf_bytes(user_id: int, content: bytes) -> tuple[str, str, int, str]:
+ stored_filename = f"paper_{user_id}_{uuid4().hex}.pdf"
+ destination = UPLOAD_DIR / stored_filename
+ await asyncio.to_thread(destination.write_bytes, content)
+ file_url = f"/media/uploads/{stored_filename}"
+ file_size = len(content)
+ file_hash = hashlib.sha256(content).hexdigest()
+ return stored_filename, file_url, file_size, file_hash
diff --git a/backend/app/api/v1/endpoints/papers.py b/backend/app/api/v1/endpoints/papers.py
index e092b60..d32c409 100644
--- a/backend/app/api/v1/endpoints/papers.py
+++ b/backend/app/api/v1/endpoints/papers.py
@@ -18,18 +18,20 @@
import logging
import math
from contextlib import suppress
+from datetime import datetime
from pathlib import Path
from typing import TYPE_CHECKING, Any, Final, cast
from uuid import uuid4
import httpx
-from fastapi import APIRouter, Depends, File, HTTPException, Query, UploadFile, status
+from fastapi import APIRouter, Depends, File, Form, HTTPException, Query, UploadFile, status
from sqlalchemy.ext.asyncio import AsyncSession
from pydantic import BaseModel, Field
from app.core.config import get_settings
from app.db.paper_repository import PaperRepository
from app.db.repository import (
+ LibraryFolderRepository,
MineruParseJobRepository,
ParsedPaperCacheRepository,
UploadedPaperRepository,
@@ -44,6 +46,7 @@
from app.schemas.annotation import AnnotationApplyResponse, AnnotationCreate
from app.schemas.conversation import ConversationCreate, ConversationMessageCreate
from app.services.annotations import apply_annotation
+from app.services.pdf_metadata import extract_pdf_metadata_async
from app.services.mineru_cli import parse_pdf
from app.services.ai.llm_client import DeepSeekClient
from app.workers.tasks import run_mineru_parse
@@ -73,6 +76,26 @@ async def search_papers(
raise HTTPException(status_code=status.HTTP_501_NOT_IMPLEMENTED, detail="Search service not yet available")
+def _resolve_folder_filter(folder_id: int | None) -> tuple[int | None, bool]:
+ if folder_id == 0:
+ return None, True
+ return folder_id, False
+
+
+async def _ensure_folder_access(
+ folder_repo: LibraryFolderRepository,
+ *,
+ folder_id: int | None,
+ user_id: int,
+) -> int | None:
+ if folder_id is None:
+ return None
+ folder = await folder_repo.get_for_user(folder_id, user_id)
+ if folder is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到指定的文库文件夹")
+ return folder.id
+
+
@router.get(
"/uploads",
response_model=upload_schema.UploadedPaperListResponse,
@@ -84,9 +107,15 @@ async def list_uploaded_papers(
db: AsyncSession = Depends(get_db),
page: int = Query(1, ge=1, description="页码,从 1 开始"),
page_size: int = Query(12, ge=1, le=60, description="每页数量,最多 60 条"),
+ folder_id: int | None = Query(None, ge=0, description="文件夹 ID(0 表示未分类,仅显示未分配文件夹的文献)"),
) -> upload_schema.UploadedPaperListResponse:
repo = UploadedPaperRepository(db)
- total = await repo.count_for_user(current_user.id)
+ normalized_folder_id, unfiled_only = _resolve_folder_filter(folder_id)
+ total = await repo.count_unique_for_user(
+ current_user.id,
+ folder_id=normalized_folder_id,
+ unfiled_only=unfiled_only,
+ )
total_pages = math.ceil(total / page_size) if total else 0
if total_pages and page > total_pages:
@@ -95,10 +124,12 @@ async def list_uploaded_papers(
page = 1
offset = max(0, (page - 1) * page_size)
- records = await repo.list_for_user_paginated(
+ records = await repo.list_unique_for_user_paginated(
current_user.id,
offset=offset,
limit=page_size,
+ folder_id=normalized_folder_id,
+ unfiled_only=unfiled_only,
)
items = [
upload_schema.UploadedPaperRead.model_validate(record, from_attributes=True)
@@ -122,6 +153,7 @@ async def list_uploaded_papers(
)
async def upload_paper(
file: UploadFile = File(..., description="需要上传的 PDF 文件"),
+ folder_id: int | None = Form(None, description="文件夹 ID,不填则保存在未分类"),
current_user=Depends(get_current_user),
db: AsyncSession = Depends(get_db),
) -> upload_schema.UploadedPaperRead:
@@ -151,7 +183,19 @@ async def upload_paper(
relative_url = f"/media/uploads/{stored_filename}"
+ folder_repo = LibraryFolderRepository(db)
repo = UploadedPaperRepository(db)
+ resolved_folder_id = await _ensure_folder_access(
+ folder_repo,
+ folder_id=folder_id if folder_id and folder_id > 0 else None,
+ user_id=current_user.id,
+ )
+ metadata_json: dict | None = None
+ try:
+ metadata_json = await extract_pdf_metadata_async(destination, sanitized_name)
+ except Exception as exc: # pragma: no cover - best effort only
+ logger.warning("Failed to extract metadata for uploaded PDF: %s", exc)
+ metadata_json = None
try:
record = await repo.create(
@@ -162,6 +206,8 @@ async def upload_paper(
file_size=len(cleaned_bytes),
file_url=relative_url,
file_hash=file_hash,
+ folder_id=resolved_folder_id,
+ metadata_json=metadata_json,
)
await db.commit()
except Exception:
@@ -175,6 +221,23 @@ async def upload_paper(
return upload_schema.UploadedPaperRead.model_validate(record, from_attributes=True)
+@router.get(
+ "/uploads/{paper_id}",
+ response_model=upload_schema.UploadedPaperRead,
+ summary="获取单篇上传文档详情",
+)
+async def get_uploaded_paper_detail(
+ paper_id: int,
+ current_user=Depends(get_current_user),
+ db: AsyncSession = Depends(get_db),
+) -> upload_schema.UploadedPaperRead:
+ repo = UploadedPaperRepository(db)
+ record = await repo.get_for_user(paper_id, current_user.id)
+ if record is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="未找到对应的上传文件")
+ return upload_schema.UploadedPaperRead.model_validate(record, from_attributes=True)
+
+
@router.get("/{paper_id}", response_model=paper_schema.PaperRead)
async def get_paper(paper_id: int, db: AsyncSession = Depends(get_db)) -> paper_schema.PaperRead:
"""Fetch a single paper by identifier."""
@@ -182,12 +245,12 @@ async def get_paper(paper_id: int, db: AsyncSession = Depends(get_db)) -> paper_
raise HTTPException(status_code=status.HTTP_501_NOT_IMPLEMENTED, detail="Paper retrieval not yet available")
-@router.post("/import", response_model=paper_schema.PaperRead, status_code=status.HTTP_201_CREATED)
+@router.post("/import", response_model=paper_schema.PaperImportResponse, status_code=status.HTTP_201_CREATED)
async def import_paper(
request: paper_schema.PaperImportRequest,
current_user=Depends(get_current_user),
db: AsyncSession = Depends(get_db)
-) -> paper_schema.PaperRead:
+) -> paper_schema.PaperImportResponse:
"""Import a recommended paper into the local library and optionally download its PDF.
Behavior:
@@ -198,7 +261,14 @@ async def import_paper(
logger.info(f"Importing paper for user {current_user.id}: {request.title}")
+ folder_repo = LibraryFolderRepository(db)
+
try:
+ resolved_folder_id = await _ensure_folder_access(
+ folder_repo,
+ folder_id=request.folder_id if request.folder_id and request.folder_id > 0 else None,
+ user_id=current_user.id,
+ )
repo = PaperRepository(db)
paper = await repo.create(
title=request.title,
@@ -227,6 +297,10 @@ async def import_paper(
downloaded_url = None
download_error = None
+ metadata_json: dict | None = None
+
+ safe_title = "".join(c for c in (request.title or "paper")[:50] if c.isalnum() or c in (' ', '-', '_')).strip()
+
if pdf_url:
# Normalize common arXiv abstract links to direct PDF links
if "arxiv.org/abs/" in pdf_url and "arxiv.org/pdf/" not in pdf_url:
@@ -262,6 +336,11 @@ async def import_paper(
downloaded_filename = filename
downloaded_content = content
downloaded_url = f"/media/papers/{filename}"
+ try:
+ metadata_json = await extract_pdf_metadata_async(dest, safe_title)
+ except Exception as exc: # pragma: no cover
+ logger.warning("Failed to parse metadata for imported paper %s: %s", paper.id, exc)
+ metadata_json = None
# attach file path to metadata
paper.metadata_json = (paper.metadata_json or {})
@@ -279,14 +358,23 @@ async def import_paper(
paper.metadata_json = (paper.metadata_json or {})
paper.metadata_json["download_error"] = download_error
+ if metadata_json is None:
+ metadata_json = {
+ "title": request.title,
+ "summary": request.abstract,
+ "topics": [],
+ "extracted_at": datetime.utcnow().isoformat(),
+ }
+
# Always save to UploadedPaper table, regardless of download success
uploaded_repo = UploadedPaperRepository(db)
- safe_title = "".join(c for c in (request.title or "paper")[:50] if c.isalnum() or c in (' ', '-', '_')).strip()
+
+ uploaded_record: UploadedPaper | None = None
if downloaded_filename and downloaded_content:
# Downloaded successfully - save with local file reference
logger.info(f"Saving to UploadedPaper with downloaded file: {downloaded_filename}")
- await uploaded_repo.create(
+ uploaded_record = await uploaded_repo.create(
user_id=current_user.id,
stored_filename=downloaded_filename,
original_filename=f"{safe_title}.pdf",
@@ -294,13 +382,15 @@ async def import_paper(
file_size=len(downloaded_content),
file_url=downloaded_url,
file_hash=_calculate_file_hash(downloaded_content),
+ folder_id=resolved_folder_id,
+ metadata_json=metadata_json,
)
else:
# No download or download failed - save metadata only with external URL
logger.info(f"Saving to UploadedPaper without local file (pdf_url={pdf_url})")
# Create a placeholder entry - use original pdf_url as file_url if available
fallback_url = pdf_url if pdf_url else f"#{paper.id}"
- await uploaded_repo.create(
+ uploaded_record = await uploaded_repo.create(
user_id=current_user.id,
stored_filename=f"pending_{paper.id}.pdf",
original_filename=f"{safe_title}.pdf",
@@ -308,15 +398,24 @@ async def import_paper(
file_size=0, # 0 indicates no local file
file_url=fallback_url,
file_hash=None,
+ folder_id=resolved_folder_id,
+ metadata_json=metadata_json,
)
# Commit all changes
await db.commit()
await db.refresh(paper)
+ if uploaded_record is not None:
+ await db.refresh(uploaded_record)
logger.info(f"Successfully imported paper id={paper.id} for user {current_user.id}")
- return paper
+ return paper_schema.PaperImportResponse(
+ paper=paper_schema.PaperRead.model_validate(paper, from_attributes=True),
+ uploaded=upload_schema.UploadedPaperRead.model_validate(uploaded_record, from_attributes=True)
+ if uploaded_record
+ else None,
+ )
except Exception as exc:
await db.rollback()
diff --git a/backend/app/core/config.py b/backend/app/core/config.py
index 5290773..223e8ff 100644
--- a/backend/app/core/config.py
+++ b/backend/app/core/config.py
@@ -51,7 +51,11 @@ class Settings(BaseSettings):
OPENALEX_TRUST_ENV: bool = True
OPENALEX_MAILTO: str = "insightreading@example.com"
-
+ github_client_id: str | None = None
+ github_client_secret: str | None = None
+ frontend_app_url: str = "http://localhost:3000"
+ frontend_oauth_redirect_path: str = "/auth/sso"
+
DEEPSEEK_API_KEY: str = "sk-7b3e938c92fb4a5ca601a35becdee4e2"
DEEPSEEK_MODEL: str = "deepseek-chat"
DEEPSEEK_BASE_URL: str = "https://api.deepseek.com/v1"
@@ -111,6 +115,14 @@ def media_path(self) -> Path:
return Path(override)
return Path(self.media_root)
+ @property
+ def frontend_oauth_redirect_url(self) -> str:
+ base = (self.frontend_app_url or "http://localhost:3000").rstrip("/")
+ path = self.frontend_oauth_redirect_path or "/auth/sso"
+ if not path.startswith("/"):
+ path = f"/{path}"
+ return f"{base}{path}"
+
@lru_cache
def get_settings() -> Settings:
diff --git a/backend/app/db/repository.py b/backend/app/db/repository.py
index 7cdcf24..7d6ed41 100644
--- a/backend/app/db/repository.py
+++ b/backend/app/db/repository.py
@@ -5,8 +5,10 @@
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.orm import selectinload
from app.models.mineru_parse_job import MineruParseJob
+from app.models.library_folder import LibraryFolder
from app.models.parsed_paper_cache import ParsedPaperCache
from app.models.uploaded_paper import UploadedPaper
from app.models.user import User
@@ -23,8 +25,24 @@ async def get_by_email(self, email: str) -> User | None:
result = await self._session.execute(stmt)
return result.scalar_one_or_none()
- async def create(self, *, email: str, hashed_password: str, full_name: str | None) -> User:
- user = User(email=email, hashed_password=hashed_password, full_name=full_name)
+ async def create(
+ self,
+ *,
+ email: str,
+ hashed_password: str,
+ full_name: str | None,
+ avatar_url: str | None = None,
+ oauth_provider: str | None = None,
+ oauth_account_id: str | None = None,
+ ) -> User:
+ user = User(
+ email=email,
+ hashed_password=hashed_password,
+ full_name=full_name,
+ avatar_url=avatar_url,
+ oauth_provider=oauth_provider,
+ oauth_account_id=oauth_account_id,
+ )
self._session.add(user)
await self._session.flush()
return user
@@ -36,6 +54,55 @@ async def update(self, user: User, updates: Mapping[str, Any]) -> User:
await self._session.flush()
return user
+ async def get_by_oauth_account(self, provider: str, account_id: str) -> User | None:
+ stmt = select(User).where(
+ User.oauth_provider == provider,
+ User.oauth_account_id == account_id,
+ )
+ result = await self._session.execute(stmt)
+ return result.scalar_one_or_none()
+
+
+class LibraryFolderRepository:
+ """Persistence helpers for organizing library folders."""
+
+ def __init__(self, session: AsyncSession) -> None:
+ self._session = session
+
+ async def list_for_user(self, user_id: int) -> list[LibraryFolder]:
+ stmt = (
+ select(LibraryFolder)
+ .where(LibraryFolder.user_id == user_id)
+ .order_by(LibraryFolder.created_at.asc(), LibraryFolder.id.asc())
+ )
+ result = await self._session.execute(stmt)
+ return list(result.scalars().all())
+
+ async def get_for_user(self, folder_id: int, user_id: int) -> LibraryFolder | None:
+ stmt = (
+ select(LibraryFolder)
+ .where(LibraryFolder.id == folder_id, LibraryFolder.user_id == user_id)
+ )
+ result = await self._session.execute(stmt)
+ return result.scalar_one_or_none()
+
+ async def create(self, *, user_id: int, name: str, color: str | None = None) -> LibraryFolder:
+ folder = LibraryFolder(user_id=user_id, name=name, color=color)
+ self._session.add(folder)
+ await self._session.flush()
+ return folder
+
+ async def delete(self, folder: LibraryFolder) -> None:
+ await self._session.delete(folder)
+ await self._session.flush()
+
+ async def rename(self, folder: LibraryFolder, *, name: str, color: str | None = None) -> LibraryFolder:
+ folder.name = name
+ if color is not None:
+ folder.color = color
+ await self._session.flush()
+ return folder
+
class UploadedPaperRepository:
"""Database operations for UploadedPaper model."""
@@ -43,6 +110,36 @@ class UploadedPaperRepository:
def __init__(self, session: AsyncSession) -> None:
self._session = session
+ def _deduped_uploads_subquery(
+ self,
+ user_id: int,
+ *,
+ folder_id: int | None = None,
+ unfiled_only: bool = False,
+ per_folder_partition: bool = False,
+ ):
+ dedupe_key = func.coalesce(UploadedPaper.file_hash, UploadedPaper.original_filename, UploadedPaper.stored_filename)
+ partition_columns: list[Any] = [dedupe_key]
+ if per_folder_partition:
+ partition_columns.append(UploadedPaper.folder_id)
+ row_number = func.row_number().over(
+ partition_by=partition_columns,
+ order_by=(UploadedPaper.uploaded_at.desc(), UploadedPaper.id.desc()),
+ )
+ stmt = select(
+ UploadedPaper.id.label("id"),
+ UploadedPaper.uploaded_at.label("uploaded_at"),
+ UploadedPaper.folder_id.label("folder_id"),
+ row_number.label("rn"),
+ ).where(UploadedPaper.user_id == user_id)
+
+ if unfiled_only:
+ stmt = stmt.where(UploadedPaper.folder_id.is_(None))
+ elif folder_id is not None:
+ stmt = stmt.where(UploadedPaper.folder_id == folder_id)
+
+ return stmt.subquery()
+
async def count_for_user(self, user_id: int) -> int:
stmt = select(func.count()).select_from(UploadedPaper).where(UploadedPaper.user_id == user_id)
result = await self._session.execute(stmt)
@@ -62,11 +159,59 @@ async def list_for_user_paginated(self, user_id: int, *, offset: int, limit: int
async def get_for_user(self, paper_id: int, user_id: int) -> UploadedPaper | None:
stmt = (
select(UploadedPaper)
+ .options(selectinload(UploadedPaper.folder))
.where(UploadedPaper.id == paper_id, UploadedPaper.user_id == user_id)
)
result = await self._session.execute(stmt)
return result.scalar_one_or_none()
+ async def count_unique_for_user(
+ self,
+ user_id: int,
+ *,
+ folder_id: int | None = None,
+ unfiled_only: bool = False,
+ ) -> int:
+ deduped = self._deduped_uploads_subquery(user_id, folder_id=folder_id, unfiled_only=unfiled_only)
+ stmt = select(func.count()).select_from(deduped).where(deduped.c.rn == 1)
+ result = await self._session.execute(stmt)
+ return int(result.scalar_one())
+
+ async def list_unique_for_user_paginated(
+ self,
+ user_id: int,
+ *,
+ offset: int,
+ limit: int,
+ folder_id: int | None = None,
+ unfiled_only: bool = False,
+ ) -> list[UploadedPaper]:
+ deduped = self._deduped_uploads_subquery(user_id, folder_id=folder_id, unfiled_only=unfiled_only)
+ stmt = (
+ select(UploadedPaper)
+ .options(selectinload(UploadedPaper.folder))
+ .join(deduped, UploadedPaper.id == deduped.c.id)
+ .where(deduped.c.rn == 1)
+ .order_by(UploadedPaper.uploaded_at.desc(), UploadedPaper.id.desc())
+ .offset(offset)
+ .limit(limit)
+ )
+ result = await self._session.execute(stmt)
+ return list(result.scalars().unique().all())
+
+ async def count_by_folder(self, user_id: int) -> dict[int | None, int]:
+ deduped = self._deduped_uploads_subquery(user_id, per_folder_partition=True)
+ stmt = (
+ select(deduped.c.folder_id, func.count().label("total"))
+ .where(deduped.c.rn == 1)
+ .group_by(deduped.c.folder_id)
+ )
+ result = await self._session.execute(stmt)
+ counts: dict[int | None, int] = {}
+ for folder_id, total in result.all():
+ counts[folder_id] = int(total)
+ return counts
+
async def create(
self,
*,
@@ -77,6 +222,8 @@ async def create(
file_size: int,
file_url: str,
file_hash: str | None,
+ folder_id: int | None = None,
+ metadata_json: dict | None = None,
) -> UploadedPaper:
record = UploadedPaper(
user_id=user_id,
@@ -86,11 +233,46 @@ async def create(
file_size=file_size,
file_url=file_url,
file_hash=file_hash,
+ folder_id=folder_id,
+ metadata_json=metadata_json,
)
self._session.add(record)
await self._session.flush()
return record
+ async def update_folder(self, record: UploadedPaper, folder_id: int | None) -> UploadedPaper:
+ record.folder_id = folder_id
+ await self._session.flush()
+ return record
+
+ async def update_metadata(self, record: UploadedPaper, metadata_json: dict | None) -> UploadedPaper:
+ record.metadata_json = metadata_json
+ await self._session.flush()
+ return record
+
+ async def update_file_fields(
+ self,
+ record: UploadedPaper,
+ *,
+ stored_filename: str,
+ file_url: str,
+ file_size: int,
+ file_hash: str | None,
+ content_type: str | None = None,
+ ) -> UploadedPaper:
+ record.stored_filename = stored_filename
+ record.file_url = file_url
+ record.file_size = file_size
+ record.file_hash = file_hash
+ if content_type is not None:
+ record.content_type = content_type
+ await self._session.flush()
+ return record
+
+ async def delete(self, record: UploadedPaper) -> None:
+ await self._session.delete(record)
+ await self._session.flush()
+
class ParsedPaperCacheRepository:
"""Cache MinerU parse outputs by file hash."""
diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py
index 36d03da..7ed654a 100644
--- a/backend/app/models/__init__.py
+++ b/backend/app/models/__init__.py
@@ -2,6 +2,7 @@
# Importing these modules registers the SQLAlchemy models.
from app.models import conversation # noqa: F401
+from app.models import library_folder # noqa: F401
from app.models import mineru_parse_job # noqa: F401
from app.models import parsed_paper_cache # noqa: F401
from app.models import paper # noqa: F401
diff --git a/backend/app/models/library_folder.py b/backend/app/models/library_folder.py
new file mode 100644
index 0000000..d9e1a0c
--- /dev/null
+++ b/backend/app/models/library_folder.py
@@ -0,0 +1,23 @@
+"""Model representing custom folders inside a user's personal library."""
+from datetime import datetime
+
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, String
+from sqlalchemy.orm import relationship
+
+from app.models.base import Base
+
+
+class LibraryFolder(Base):
+ """User-scoped folder used to organize uploaded papers."""
+
+ __tablename__ = "library_folders"
+
+ id = Column(Integer, primary_key=True, index=True)
+ user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
+ name = Column(String(120), nullable=False)
+ color = Column(String(32), nullable=True)
+ created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
+ updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
+
+ papers = relationship("UploadedPaper", back_populates="folder", cascade="all, delete", passive_deletes=True)
+
diff --git a/backend/app/models/uploaded_paper.py b/backend/app/models/uploaded_paper.py
index 2c81acd..0a5e4e6 100644
--- a/backend/app/models/uploaded_paper.py
+++ b/backend/app/models/uploaded_paper.py
@@ -5,6 +5,7 @@
from sqlalchemy.orm import relationship
from app.models.base import Base
+from app.models.types import JSONBCompat
class UploadedPaper(Base):
@@ -20,6 +21,9 @@ class UploadedPaper(Base):
file_size = Column(Integer, nullable=False)
file_url = Column(String(512), nullable=False)
file_hash = Column(String(128), nullable=True, index=True)
+ folder_id = Column(Integer, ForeignKey("library_folders.id", ondelete="SET NULL"), nullable=True, index=True)
+ metadata_json = Column("metadata", JSONBCompat(), nullable=True)
uploaded_at = Column(DateTime, nullable=False, default=datetime.utcnow)
user = relationship("User", backref="uploaded_papers")
+ folder = relationship("LibraryFolder", back_populates="papers")
diff --git a/backend/app/models/user.py b/backend/app/models/user.py
index 3795d7f..a738f4d 100644
--- a/backend/app/models/user.py
+++ b/backend/app/models/user.py
@@ -19,6 +19,8 @@ class User(Base):
avatar_url = Column(String(512), nullable=True)
research_interests = Column(Text, nullable=True)
is_active = Column(Boolean, default=True, nullable=False)
+ oauth_provider = Column(String(50), nullable=True)
+ oauth_account_id = Column(String(255), nullable=True, index=True)
created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
# 关系
diff --git a/backend/app/schemas/library.py b/backend/app/schemas/library.py
new file mode 100644
index 0000000..7724119
--- /dev/null
+++ b/backend/app/schemas/library.py
@@ -0,0 +1,49 @@
+"""Pydantic schemas for personal library folders."""
+from datetime import datetime
+
+from pydantic import BaseModel, Field, validator
+
+
+class LibraryFolderBase(BaseModel):
+ name: str = Field(..., min_length=1, max_length=120)
+ color: str | None = Field(None, max_length=32)
+
+
+class LibraryFolderCreate(LibraryFolderBase):
+ pass
+
+
+class LibraryFolderUpdate(LibraryFolderBase):
+ pass
+
+
+class LibraryFolderSummary(BaseModel):
+ id: int
+ name: str
+ color: str | None = None
+
+ class Config:
+ from_attributes = True
+
+
+class LibraryFolderRead(LibraryFolderSummary):
+ paper_count: int = 0
+ created_at: datetime
+ updated_at: datetime
+
+ @validator("paper_count", pre=True, always=True)
+ def default_count(cls, value: int | None) -> int: # noqa: B902
+ return value or 0
+
+
+class LibraryFolderListResponse(BaseModel):
+ folders: list[LibraryFolderRead]
+ unfiled_count: int = 0
+
+
+class LibraryFolderAssignment(BaseModel):
+ folder_id: int | None = Field(None, description="Target folder ID;null 表示移动到未分类")
+
+
+class LibraryDownloadRequest(BaseModel):
+ pdf_url: str | None = Field(None, description="远程 PDF 下载链接,可覆盖已有 file_url")
diff --git a/backend/app/schemas/paper.py b/backend/app/schemas/paper.py
index a660978..d83dc46 100644
--- a/backend/app/schemas/paper.py
+++ b/backend/app/schemas/paper.py
@@ -3,6 +3,8 @@
from pydantic import BaseModel
+from app.schemas import upload as upload_schema
+
class PaperBase(BaseModel):
title: str
@@ -37,3 +39,10 @@ class PaperImportRequest(PaperBase):
"""
pdf_url: str | None = None
+ folder_id: int | None = None
+
+
+class PaperImportResponse(BaseModel):
+ paper: "PaperRead"
+ uploaded: upload_schema.UploadedPaperRead | None = None
+ folder_id: int | None = None
diff --git a/backend/app/schemas/upload.py b/backend/app/schemas/upload.py
index dbe8f08..215dea8 100644
--- a/backend/app/schemas/upload.py
+++ b/backend/app/schemas/upload.py
@@ -3,6 +3,8 @@
from pydantic import BaseModel
+from app.schemas.library import LibraryFolderSummary
+
class UploadedPaperBase(BaseModel):
original_filename: str
@@ -10,11 +12,14 @@ class UploadedPaperBase(BaseModel):
content_type: str
file_size: int
file_hash: str | None = None
+ folder_id: int | None = None
+ metadata_json: dict | None = None
class UploadedPaperRead(UploadedPaperBase):
id: int
uploaded_at: datetime
+ folder: LibraryFolderSummary | None = None
class Config:
from_attributes = True
diff --git a/backend/app/services/ai/llm_client.py b/backend/app/services/ai/llm_client.py
index 2c01c02..96e591e 100644
--- a/backend/app/services/ai/llm_client.py
+++ b/backend/app/services/ai/llm_client.py
@@ -8,6 +8,9 @@
from aiohttp import ClientTimeout
from app.core.config import settings
+import logging
+
+logger = logging.getLogger(__name__)
class DeepSeekClient:
@@ -120,7 +123,15 @@ def _build_ssl_context(self) -> ssl.SSLContext:
if self.ca_bundle:
try:
context.load_verify_locations(self.ca_bundle)
- except Exception as exc:
+ except FileNotFoundError as exc:
+ # Fallback to default cert store when the configured CA bundle does not exist.
+ # Logging is intentionally simple since DeepSeek usage isn't critical in local dev.
+ logger.warning(
+ "DeepSeek CA bundle not found at %s, falling back to default cert store: %s",
+ self.ca_bundle,
+ exc,
+ )
+ except Exception as exc: # pragma: no cover - defensive
raise RuntimeError(
f"Failed to load DeepSeek CA bundle at {self.ca_bundle}: {exc}"
) from exc
diff --git a/backend/app/services/pdf_metadata.py b/backend/app/services/pdf_metadata.py
new file mode 100644
index 0000000..40c614c
--- /dev/null
+++ b/backend/app/services/pdf_metadata.py
@@ -0,0 +1,130 @@
+"""Lightweight PDF metadata extractor for organizing the personal library."""
+from __future__ import annotations
+
+import asyncio
+from collections import Counter
+from datetime import datetime
+import re
+from pathlib import Path
+from typing import Any
+
+import fitz # PyMuPDF
+
+_TOKEN_PATTERN = re.compile(r"[\u4e00-\u9fa5]{2,}|[A-Za-z]{4,}")
+_STOPWORDS = {
+ "abstract",
+ "introduction",
+ "research",
+ "paper",
+ "study",
+ "analysis",
+ "system",
+ "design",
+ "using",
+ "based",
+ "results",
+ "method",
+ "methods",
+ "conclusion",
+ "data",
+ "model",
+ "models",
+ "framework",
+ "information",
+ "对于",
+ "我们",
+ "提出",
+ "研究",
+ "方案",
+ "模型",
+ "数据",
+ "结果",
+ "本文",
+ "以及",
+ "进行",
+}
+
+
+def _summarize_text(text: str, max_length: int = 420) -> str | None:
+ cleaned = " ".join(part.strip() for part in text.splitlines() if part.strip())
+ if not cleaned:
+ return None
+ if len(cleaned) <= max_length:
+ return cleaned
+ return f"{cleaned[:max_length].rstrip()}…"
+
+
+def _extract_topics(text: str, limit: int = 4) -> list[str]:
+ if not text:
+ return []
+ tokens = [token.lower() for token in _TOKEN_PATTERN.findall(text)]
+ filtered = [token for token in tokens if token not in _STOPWORDS and len(token) > 1]
+ if not filtered:
+ return []
+ counter = Counter(filtered)
+ topics: list[str] = []
+ for token, _ in counter.most_common(limit * 2):
+ if token not in topics:
+ topics.append(token)
+ if len(topics) >= limit:
+ break
+ return topics
+
+
+def extract_pdf_metadata(pdf_path: Path, fallback_title: str | None = None) -> dict[str, Any]:
+ """Parse lightweight metadata such as title/author/summary/topics from a PDF."""
+ result: dict[str, Any] = {
+ "title": fallback_title or pdf_path.stem,
+ "summary": None,
+ "topics": [],
+ "page_count": None,
+ "author": None,
+ "subject": None,
+ "keywords": None,
+ "extracted_at": datetime.utcnow().isoformat(),
+ }
+
+ if not pdf_path.exists():
+ return result
+
+ doc = fitz.open(pdf_path)
+ try:
+ base_meta = doc.metadata or {}
+ title = (base_meta.get("title") or fallback_title or pdf_path.stem).strip()
+ author = (base_meta.get("author") or "").strip() or None
+ subject = (base_meta.get("subject") or "").strip() or None
+ keywords = (base_meta.get("keywords") or "").strip() or None
+
+ text_parts: list[str] = []
+ max_pages = min(doc.page_count, 3)
+ for page_index in range(max_pages):
+ try:
+ page = doc.load_page(page_index)
+ text_parts.append(page.get_text("text"))
+ except Exception:
+ continue
+
+ combined_text = "\n".join(text_parts).strip()
+ summary = _summarize_text(combined_text)
+ topics = _extract_topics(combined_text)
+
+ result.update(
+ {
+ "title": title or fallback_title or pdf_path.stem,
+ "author": author,
+ "subject": subject,
+ "keywords": keywords,
+ "summary": summary,
+ "topics": topics,
+ "page_count": doc.page_count,
+ }
+ )
+ finally:
+ doc.close()
+
+ return result
+
+
+async def extract_pdf_metadata_async(pdf_path: Path, fallback_title: str | None = None) -> dict[str, Any]:
+ """Async wrapper for `extract_pdf_metadata` to keep FastAPI endpoints non-blocking."""
+ return await asyncio.to_thread(extract_pdf_metadata, pdf_path, fallback_title)
diff --git a/backend/migrations/versions/20251110_add_library_folders.py b/backend/migrations/versions/20251110_add_library_folders.py
new file mode 100644
index 0000000..d5412c2
--- /dev/null
+++ b/backend/migrations/versions/20251110_add_library_folders.py
@@ -0,0 +1,55 @@
+"""Add library folders and metadata for uploaded papers."""
+from __future__ import annotations
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql as psql
+
+
+# revision identifiers, used by Alembic.
+revision = "20251110_add_library_folders"
+down_revision = "20251106_add_mineru_parse_jobs"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ op.create_table(
+ "library_folders",
+ sa.Column("id", sa.Integer(), primary_key=True),
+ sa.Column("user_id", sa.Integer(), nullable=False),
+ sa.Column("name", sa.String(length=120), nullable=False),
+ sa.Column("color", sa.String(length=32), nullable=True),
+ sa.Column("created_at", sa.DateTime(), nullable=False, server_default=sa.func.now()),
+ sa.Column("updated_at", sa.DateTime(), nullable=False, server_default=sa.func.now()),
+ sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
+ )
+ op.create_index("ix_library_folders_user_id", "library_folders", ["user_id"])
+ op.create_unique_constraint(
+ "uq_library_folders_user_id_name",
+ "library_folders",
+ ["user_id", "name"],
+ )
+
+ op.add_column("uploaded_papers", sa.Column("folder_id", sa.Integer(), nullable=True))
+ op.add_column("uploaded_papers", sa.Column("metadata", psql.JSONB(astext_type=sa.Text()), nullable=True))
+ op.create_foreign_key(
+ "fk_uploaded_papers_folder_id",
+ "uploaded_papers",
+ "library_folders",
+ ["folder_id"],
+ ["id"],
+ ondelete="SET NULL",
+ )
+ op.create_index("ix_uploaded_papers_folder_id", "uploaded_papers", ["folder_id"])
+
+
+def downgrade() -> None:
+ op.drop_index("ix_uploaded_papers_folder_id", table_name="uploaded_papers")
+ op.drop_constraint("fk_uploaded_papers_folder_id", "uploaded_papers", type_="foreignkey")
+ op.drop_column("uploaded_papers", "metadata")
+ op.drop_column("uploaded_papers", "folder_id")
+
+ op.drop_constraint("uq_library_folders_user_id_name", "library_folders", type_="unique")
+ op.drop_index("ix_library_folders_user_id", table_name="library_folders")
+ op.drop_table("library_folders")
diff --git a/backend/migrations/versions/20251112_add_oauth_fields.py b/backend/migrations/versions/20251112_add_oauth_fields.py
new file mode 100644
index 0000000..6ed1eab
--- /dev/null
+++ b/backend/migrations/versions/20251112_add_oauth_fields.py
@@ -0,0 +1,29 @@
+"""add oauth provider columns to users"""
+from __future__ import annotations
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = "20251112_add_oauth_fields"
+down_revision = "20251110_add_library_folders"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ op.add_column("users", sa.Column("oauth_provider", sa.String(length=50), nullable=True))
+ op.add_column("users", sa.Column("oauth_account_id", sa.String(length=255), nullable=True))
+ op.create_index(
+ "ix_users_oauth_provider_account",
+ "users",
+ ["oauth_provider", "oauth_account_id"],
+ unique=True,
+ )
+
+
+def downgrade() -> None:
+ op.drop_index("ix_users_oauth_provider_account", table_name="users")
+ op.drop_column("users", "oauth_account_id")
+ op.drop_column("users", "oauth_provider")
diff --git a/frontend/src/app/auth/sso/page.tsx b/frontend/src/app/auth/sso/page.tsx
new file mode 100644
index 0000000..037e2ad
--- /dev/null
+++ b/frontend/src/app/auth/sso/page.tsx
@@ -0,0 +1,48 @@
+"use client";
+
+import { useEffect, useState } from "react";
+import { useRouter, useSearchParams } from "next/navigation";
+import { Loader2 } from "lucide-react";
+
+import { setAccessToken } from "@/lib/auth";
+
+export default function OAuthBridgePage() {
+ const router = useRouter();
+ const searchParams = useSearchParams();
+ const [status, setStatus] = useState("正在处理登录,请稍候…");
+
+ useEffect(() => {
+ const errorParam = searchParams.get("error");
+ const token = searchParams.get("token");
+ const nextPath = searchParams.get("next") || "/";
+
+ if (errorParam) {
+ setStatus(`登录失败:${errorParam}`);
+ return;
+ }
+
+ if (!token) {
+ setStatus("未收到登录凭据,请重新尝试。");
+ return;
+ }
+
+ setAccessToken(token);
+ setStatus("登录成功,正在跳转…");
+ const timer = window.setTimeout(() => {
+ router.replace(nextPath);
+ }, 600);
+
+ return () => window.clearTimeout(timer);
+ }, [router, searchParams]);
+
+ return (
+
+ );
+}
diff --git a/frontend/src/app/library/page.tsx b/frontend/src/app/library/page.tsx
index e12b206..646fff1 100644
--- a/frontend/src/app/library/page.tsx
+++ b/frontend/src/app/library/page.tsx
@@ -2,11 +2,19 @@
import Link from "next/link";
import { useCallback, useEffect, useMemo, useState } from "react";
+import { useRouter } from "next/navigation";
import DashboardShell from "@/components/layout/dashboard-shell";
-import { fetchUploadedPapers, type UploadedPaperDTO } from "@/lib/api-client";
+import {
+ deleteUploadedPaper,
+ fetchUploadedPapers,
+ updateUploadedPaperFolder,
+ type LibraryFolderDTO,
+ type UploadedPaperDTO,
+} from "@/lib/api-client";
import { getAccessToken } from "@/lib/auth";
import { BACKEND_URL } from "@/lib/config";
+import { useLibraryFolders } from "@/hooks/use-library-folders";
function formatTitle(filename: string): string {
const withoutExt = filename.replace(/\.[^/.]+$/, "");
@@ -103,8 +111,12 @@ function getAdaptiveTitleClass(title: string): string {
return "text-lg sm:text-xl";
}
+type FolderFilter = "all" | "unfiled" | number;
+
+const FOLDER_COLORS = ["#0ea5e9", "#2563eb", "#10b981", "#f97316", "#f43f5e", "#a855f7", "#eab308"];
+
export default function LibraryPage() {
- const token = getAccessToken();
+ const router = useRouter();
const [papers, setPapers] = useState([]);
const [isLoading, setIsLoading] = useState(true);
const [isRefreshing, setIsRefreshing] = useState(false);
@@ -112,17 +124,65 @@ export default function LibraryPage() {
const [page, setPage] = useState(1);
const [total, setTotal] = useState(0);
const [totalPages, setTotalPages] = useState(0);
- const PAGE_SIZE = 12;
+ const [activeFilter, setActiveFilter] = useState("all");
+ const [selectedPaper, setSelectedPaper] = useState(null);
+ const [isFolderModalOpen, setIsFolderModalOpen] = useState(false);
+ const [newFolderName, setNewFolderName] = useState("");
+ const [newFolderColor, setNewFolderColor] = useState(FOLDER_COLORS[0]);
+ const [folderModalError, setFolderModalError] = useState(null);
+ const [libraryToken, setLibraryToken] = useState(null);
+ const [authChecked, setAuthChecked] = useState(false);
+ const [gridMode, setGridMode] = useState<"lg" | "xl">("xl");
+ const [folderActionMessage, setFolderActionMessage] = useState(null);
+ const [isUpdatingFolder, setIsUpdatingFolder] = useState(false);
+ const [isDeletingPaper, setIsDeletingPaper] = useState(false);
+ const [deleteMessage, setDeleteMessage] = useState(null);
+
+ const {
+ folders,
+ unfiledCount,
+ isLoading: isFolderLoading,
+ error: folderError,
+ refresh: refreshFolders,
+ create: createFolder,
+ } = useLibraryFolders(libraryToken);
const downloadBase = useMemo(
() => (BACKEND_URL.endsWith("/") ? BACKEND_URL.slice(0, -1) : BACKEND_URL),
[],
);
+ useEffect(() => {
+ if (typeof window !== "undefined") {
+ setLibraryToken(getAccessToken());
+ }
+ setAuthChecked(true);
+ }, []);
+
+ useEffect(() => {
+ if (typeof window === "undefined") {
+ return;
+ }
+ const detectMode = () => {
+ setGridMode(window.innerWidth >= 1280 ? "xl" : "lg");
+ };
+ detectMode();
+ window.addEventListener("resize", detectMode);
+ return () => window.removeEventListener("resize", detectMode);
+ }, []);
+
+ const pageSize = gridMode === "xl" ? 10 : 8;
+
+ const resolveFolderParam = useCallback((filter: FolderFilter) => {
+ if (filter === "all") return undefined;
+ if (filter === "unfiled") return 0;
+ return filter;
+ }, []);
+
const loadPage = useCallback(
- async (targetPage: number, { showLoading = true }: { showLoading?: boolean } = {}) => {
- const authToken = token;
- if (!authToken) {
+ async (targetPage: number, filter: FolderFilter, { showLoading = true }: { showLoading?: boolean } = {}) => {
+ const tokenValue = libraryToken;
+ if (!tokenValue) {
return;
}
@@ -132,14 +192,20 @@ export default function LibraryPage() {
setError(null);
try {
- const data = await fetchUploadedPapers(authToken, {
+ const folderParam = resolveFolderParam(filter);
+ const data = await fetchUploadedPapers(tokenValue, {
page: targetPage,
- pageSize: PAGE_SIZE,
+ pageSize,
+ folderId: typeof folderParam === "number" ? folderParam : undefined,
});
setPapers(data.items);
setPage(data.page);
setTotal(data.total);
setTotalPages(data.total_pages);
+ setSelectedPaper((current) => {
+ if (!current) return null;
+ return data.items.find((item) => item.id === current.id) ?? null;
+ });
} catch (err) {
const message = err instanceof Error ? err.message : "获取上传记录失败";
setError(message);
@@ -149,11 +215,15 @@ export default function LibraryPage() {
}
}
},
- [PAGE_SIZE, token],
+ [libraryToken, pageSize, resolveFolderParam],
);
useEffect(() => {
- if (!token) {
+ if (!authChecked) {
+ return;
+ }
+
+ if (!libraryToken) {
setIsLoading(false);
setPapers([]);
setTotal(0);
@@ -162,11 +232,20 @@ export default function LibraryPage() {
return;
}
- void loadPage(1);
- }, [loadPage, token]);
+ void loadPage(1, activeFilter);
+ }, [activeFilter, authChecked, loadPage, libraryToken]);
+
+ useEffect(() => {
+ setSelectedPaper(null);
+ }, [activeFilter]);
+
+ useEffect(() => {
+ setFolderActionMessage(null);
+ setDeleteMessage(null);
+ }, [selectedPaper]);
async function handleRefresh() {
- if (!token) {
+ if (!libraryToken) {
return;
}
@@ -174,7 +253,8 @@ export default function LibraryPage() {
setError(null);
try {
- await loadPage(page, { showLoading: false });
+ await loadPage(page, activeFilter, { showLoading: false });
+ await refreshFolders();
} finally {
setIsRefreshing(false);
}
@@ -187,16 +267,151 @@ export default function LibraryPage() {
}
if (direction === "prev" && page > 1) {
- void loadPage(page - 1);
+ void loadPage(page - 1, activeFilter);
return;
}
if (direction === "next" && totalPageCount > 0 && page < totalPageCount) {
- void loadPage(page + 1);
+ void loadPage(page + 1, activeFilter);
+ }
+ }
+
+ const totalLibraryCount = useMemo(
+ () => folders.reduce((sum, folder) => sum + (folder.paper_count ?? 0), 0) + unfiledCount,
+ [folders, unfiledCount],
+ );
+
+ const folderEntries = useMemo(() => {
+ const entries: Array<{ id: FolderFilter; name: string; count: number; color?: string | null }> = [
+ { id: "all", name: "全部文献", count: totalLibraryCount },
+ { id: "unfiled", name: "未分类", count: unfiledCount },
+ ...folders.map((folder) => ({
+ id: folder.id as FolderFilter,
+ name: folder.name,
+ color: folder.color ?? undefined,
+ count: folder.paper_count ?? 0,
+ })),
+ ];
+ return entries;
+ }, [folders, totalLibraryCount, unfiledCount]);
+
+ const activeFolderInfo = useMemo(() => {
+ if (activeFilter === "all") {
+ return {
+ name: "全部文献",
+ count: totalLibraryCount,
+ description: "查看你保存的所有论文,自动去重展示。",
+ };
+ }
+ if (activeFilter === "unfiled") {
+ return {
+ name: "未分类",
+ count: unfiledCount,
+ description: "尚未归档的论文会暂存于此,方便快速整理。",
+ };
+ }
+ const folder = folders.find((item) => item.id === activeFilter);
+ return {
+ name: folder?.name ?? "文献分类",
+ count: folder?.paper_count ?? 0,
+ color: folder?.color,
+ description: "该分类内的论文独立分页和管理。",
+ };
+ }, [activeFilter, folders, totalLibraryCount, unfiledCount]);
+
+ const handleFolderSelect = (filter: FolderFilter) => {
+ setActiveFilter(filter);
+ setPage(1);
+ void loadPage(1, filter);
+ };
+
+ async function handleCreateFolder() {
+ if (!libraryToken) {
+ setFolderModalError("请先登录后再创建文件夹");
+ return;
+ }
+ const trimmed = newFolderName.trim();
+ if (!trimmed) {
+ setFolderModalError("请填写文件夹名称");
+ return;
+ }
+
+ setFolderModalError(null);
+ try {
+ await createFolder({ name: trimmed, color: newFolderColor });
+ await refreshFolders();
+ setIsFolderModalOpen(false);
+ setNewFolderName("");
+ } catch (err) {
+ const message = err instanceof Error ? err.message : "创建文件夹失败";
+ setFolderModalError(message);
+ }
+ }
+
+ async function handleAssignFolder(paperId: number, targetFolderId: number | null) {
+ if (!libraryToken) {
+ setFolderActionMessage("请先登录后再管理文献");
+ return;
+ }
+
+ setIsUpdatingFolder(true);
+ setFolderActionMessage(null);
+ try {
+ await updateUploadedPaperFolder(libraryToken, paperId, targetFolderId);
+ await loadPage(page, activeFilter, { showLoading: false });
+ await refreshFolders();
+ setFolderActionMessage("✓ 已更新文献所在文件夹");
+ } catch (err) {
+ const message = err instanceof Error ? err.message : "更新文献文件夹失败";
+ setFolderActionMessage(message);
+ } finally {
+ setIsUpdatingFolder(false);
+ }
+ }
+
+ async function handleDeletePaper(paperId: number) {
+ if (!libraryToken) {
+ setDeleteMessage("请先登录后再删除文献");
+ return;
+ }
+
+ const confirmed = window.confirm("确定要删除这篇文献吗?此操作不可撤销。");
+ if (!confirmed) {
+ return;
+ }
+
+ setIsDeletingPaper(true);
+ setDeleteMessage(null);
+ try {
+ await deleteUploadedPaper(libraryToken, paperId);
+ setDeleteMessage("✓ 文献已删除");
+ setSelectedPaper(null);
+ await refreshFolders();
+ await loadPage(1, activeFilter);
+ } catch (err) {
+ const message = err instanceof Error ? err.message : "删除文献失败";
+ setDeleteMessage(message);
+ } finally {
+ setIsDeletingPaper(false);
}
}
- if (!token) {
+ if (!authChecked) {
+ return (
+
+
+
+ );
+ }
+
+ if (!libraryToken) {
return (
-
-
-
-
上传论文
-
- 最近上传的论文会显示在这里,你可以快速下载或重新整理它们。
-
-
-
+
+
-
- {error && (
-
- {error}
+ {folderError &&
{folderError}
}
+
+ {folderEntries.map((entry) => {
+ const isActive = activeFilter === entry.id;
+ return (
+
+ );
+ })}
+ {isFolderLoading && (
+
加载文件夹中…
+ )}
- )}
-
- {isLoading ? (
-
- {Array.from({ length: 10 }).map((_, index) => (
-
-
-
-
+
+
+
+
+
+
+
+ {activeFolderInfo.name}
+
+
{activeFolderInfo.description}
+
+
+
+
+ 上传新论文
+
- ))}
-
- ) : papers.length === 0 ? (
-
-
📄
-
-
还没有上传的论文
-
- 试着上传第一篇 PDF 文献,创建你的个人文献库。
-
-
- 去上传
-
+
+ 共 {activeFolderInfo.count} 篇
+ 已为你自动去重显示
+
- ) : (
-
- {papers.map((paper) => {
- const title = formatTitle(paper.original_filename);
- const downloadUrl = resolveDownloadUrl(downloadBase, paper.file_url);
- return (
-
+ {error}
+
+ )}
+
+ {papers.length > 0 && (
+
handlePageChange("prev")}
+ onNext={() => handlePageChange("next")}
+ disablePrev={page <= 1 || total === 0}
+ disableNext={total === 0 || (totalPages > 0 && page >= totalPages)}
+ />
+ )}
+
+ {isLoading ? (
+
+ {Array.from({ length: pageSize }).map((_, index) => (
+
- {/* PDF 图标区域 */}
-
+ ) : papers.length === 0 ? (
+
+
📄
+
+
此分类暂无文献
+
+ 上传或保存一篇 PDF,或切换到其它文件夹查看。
+
+
+
+ 去上传
+
+
+ ) : (
+
+ {papers.map((paper) => {
+ const title = formatTitle(paper.original_filename);
+ const downloadUrl = resolveDownloadUrl(downloadBase, paper.file_url);
+ const isActive = selectedPaper?.id === paper.id;
+
+ return (
+
setSelectedPaper(paper)}
+ className={`group flex h-full cursor-pointer flex-col rounded-2xl border bg-white/80 p-4 shadow-sm transition hover:-translate-y-0.5 hover:shadow-lg dark:bg-slate-900/70 ${
+ isActive ? "border-blue-500 dark:border-blue-400" : "border-slate-200 dark:border-slate-700"
+ }`}
+ >
+
+
+
+ {getThumbnailLabel(title, "PDF")}
+
+
+
+
+
+ {title}
+
+
+ {formatFileSize(paper.file_size)} · 上传于 {formatDateTime(paper.uploaded_at)}
+
+
-
-
- {/* 标题 */}
-
- {title}
-
-
- {/* 文件大小 */}
-
- {formatFileSize(paper.file_size)}
-
-
- {/* 底部操作栏 */}
-
-
- );
- })}
-
- )}
+
+ );
+ })}
+
+ )}
+
+ setSelectedPaper(null)}
+ onAssignFolder={handleAssignFolder}
+ isAssigning={isUpdatingFolder}
+ actionMessage={folderActionMessage}
+ onDeletePaper={handleDeletePaper}
+ isDeleting={isDeletingPaper}
+ deleteMessage={deleteMessage}
+ />
+
+
- {papers.length > 0 && (
-
-
- 共 {total} 篇 · 每页 {PAGE_SIZE} 篇
-
-
+ {isFolderModalOpen && (
+
+
+
新建文件夹
+
为你的论文创建一个独立分类,方便管理。
+
+
+
+
颜色标记
+
+ {FOLDER_COLORS.map((color) => (
+
+
+ {folderModalError &&
{folderModalError}
}
+
+
-
- 第 {page} / {Math.max(totalPages || 1, 1)} 页
-
+
+ )}
+
+ );
+}
+
+function MetadataPanel({
+ paper,
+ onClose,
+ folders,
+ unfiledCount,
+ onAssignFolder,
+ isAssigning,
+ actionMessage,
+ onDeletePaper,
+ isDeleting,
+ deleteMessage,
+}: {
+ paper: UploadedPaperDTO | null;
+ onClose: () => void;
+ folders: LibraryFolderDTO[];
+ unfiledCount: number;
+ onAssignFolder: (paperId: number, folderId: number | null) => void;
+ isAssigning: boolean;
+ actionMessage: string | null;
+ onDeletePaper: (paperId: number) => void;
+ isDeleting: boolean;
+ deleteMessage: string | null;
+}) {
+ if (!paper) {
+ return null;
+ }
+
+ const metadata = (paper.metadata_json ?? {}) as Record
;
+ const title = typeof metadata.title === "string" && metadata.title.trim().length > 0 ? metadata.title : formatTitle(paper.original_filename);
+ const author = typeof metadata.author === "string" ? metadata.author : null;
+ const summary = typeof metadata.summary === "string" ? metadata.summary : null;
+ const topics = Array.isArray(metadata.topics)
+ ? metadata.topics.filter((topic): topic is string => typeof topic === "string")
+ : [];
+ const subject = typeof metadata.subject === "string" ? metadata.subject : null;
+ const keywords = typeof metadata.keywords === "string" ? metadata.keywords : null;
+ const pageCount =
+ typeof metadata.page_count === "number"
+ ? metadata.page_count
+ : metadata.page_count && typeof metadata.page_count === "string"
+ ? Number(metadata.page_count)
+ : null;
+
+ return (
+
+
+
文档元信息
+
+
+
{title}
+ {author &&
作者:{author}
}
+
+
文件大小:{formatFileSize(paper.file_size)}
+ {pageCount &&
页数:{pageCount}
}
+
上传时间:{formatDateTime(paper.uploaded_at)}
+ {subject &&
主题:{subject}
}
+ {keywords &&
关键词:{keywords}
}
+
+ {summary && (
+
+ )}
+ {topics.length > 0 && (
+
+ {topics.map((topic) => (
+
+ #{topic}
+
+ ))}
+
+ )}
+
+
+ );
+}
+
+function FolderManagementSection({
+ paper,
+ folders,
+ unfiledCount,
+ onAssignFolder,
+ isAssigning,
+ actionMessage,
+ onDeletePaper,
+ isDeleting,
+ deleteMessage,
+}: {
+ paper: UploadedPaperDTO;
+ folders: LibraryFolderDTO[];
+ unfiledCount: number;
+ onAssignFolder: (paperId: number, folderId: number | null) => void;
+ isAssigning: boolean;
+ actionMessage: string | null;
+ onDeletePaper: (paperId: number) => void;
+ isDeleting: boolean;
+ deleteMessage: string | null;
+}) {
+ const [selectedValue, setSelectedValue] = useState("unfiled");
+
+ useEffect(() => {
+ if (paper.folder_id) {
+ setSelectedValue(paper.folder_id.toString());
+ } else {
+ setSelectedValue("unfiled");
+ }
+ }, [paper]);
+
+ const folderOptions = useMemo(
+ () => [
+ { value: "unfiled", label: `未分类(${unfiledCount})` },
+ ...folders.map((folder) => ({
+ value: folder.id.toString(),
+ label: `${folder.name}(${folder.paper_count ?? 0})`,
+ })),
+ ],
+ [folders, unfiledCount],
+ );
+
+ return (
+
+
文献管理
+
+
+ {actionMessage &&
{actionMessage}
}
+
+ {deleteMessage &&
{deleteMessage}
}
+
+ );
+}
+
+function PageTurner({
+ page,
+ totalPages,
+ pageSize,
+ onPrev,
+ onNext,
+ disablePrev,
+ disableNext,
+}: {
+ page: number;
+ totalPages: number;
+ pageSize?: number;
+ onPrev: () => void;
+ onNext: () => void;
+ disablePrev?: boolean;
+ disableNext?: boolean;
+}) {
+ if (totalPages <= 1) {
+ return null;
+ }
+
+ return (
+
+
+
+
+ 第
+ {page}
+ / {totalPages} 页
+
+ {pageSize && (
+
每页 {pageSize} 篇(2 行排布)
)}
-
+
+
);
}
diff --git a/frontend/src/app/smart-reading/page.tsx b/frontend/src/app/smart-reading/page.tsx
index 7e8ef03..1fbdd60 100644
--- a/frontend/src/app/smart-reading/page.tsx
+++ b/frontend/src/app/smart-reading/page.tsx
@@ -1,6 +1,6 @@
"use client";
-import { useRouter } from "next/navigation";
+import { useRouter, useSearchParams } from "next/navigation";
import { useEffect, useState, useRef, useCallback } from "react";
import { UploadCloud, BookOpenCheck, XCircle, AlertTriangle } from "lucide-react";
@@ -8,7 +8,7 @@ import DashboardShell from "@/components/layout/dashboard-shell";
import AgentChatPanel from "@/components/ai-agent/AgentChatPanel";
import { PDFViewer } from "@/components/pdf-reader";
import { getAccessToken } from "@/lib/auth";
-import { uploadPaper, type UploadedPaperDTO } from "@/lib/api-client";
+import { uploadPaper, fetchUploadedPaper, type UploadedPaperDTO } from "@/lib/api-client";
import { BACKEND_URL } from "@/lib/config";
import type { PDFParseProgress, PDFParseResult, MinerUParseResult } from "@/types/pdf-reader";
@@ -20,9 +20,9 @@ import type { PDFParseProgress, PDFParseResult, MinerUParseResult } from "@/type
*/
export default function SmartReadingPage() {
const router = useRouter();
+ const searchParams = useSearchParams();
const fileInputRef = useRef(null);
- const [, setToken] = useState(null);
const [selectedPdfUrl, setSelectedPdfUrl] = useState(null);
const [uploadedPaper, setUploadedPaper] = useState(null);
const [isUploading, setIsUploading] = useState(false);
@@ -43,11 +43,38 @@ export default function SmartReadingPage() {
const accessToken = getAccessToken();
if (!accessToken) {
router.push("/auth");
- return;
}
- setToken(accessToken);
}, [router]);
+ const preloadLibraryPaper = useCallback(
+ async (paperId: number) => {
+ const authToken = getAccessToken();
+ if (!authToken) {
+ router.push("/auth");
+ return;
+ }
+
+ setUploadError(null);
+ setParseError(null);
+ setParseProgress(null);
+ setParsedDocument(null);
+ setIsParsing(false);
+ setMineruResult(null);
+
+ try {
+ const paper = await fetchUploadedPaper(authToken, paperId);
+ setUploadedPaper(paper);
+ const downloadUrl = new URL(`/api/v1/library/uploads/${paperId}/download`, BACKEND_URL);
+ downloadUrl.searchParams.set("access_token", authToken);
+ setSelectedPdfUrl(downloadUrl.toString());
+ } catch (err) {
+ const message = err instanceof Error ? err.message : "加载文库文档失败";
+ setUploadError(message);
+ }
+ },
+ [router],
+ );
+
// 处理PDF文件上传
/**
* Validate and upload a PDF, then wire resulting metadata into the viewer.
@@ -78,17 +105,17 @@ export default function SmartReadingPage() {
throw new Error("用户未登录");
}
- const uploadedPaper = await uploadPaper(token, file);
+ const uploadedPaper = await uploadPaper(token, file);
- // 构建PDF访问URL,确保使用混合运行时提供的后端地址
- const pdfUrl = new URL(uploadedPaper.file_url, BACKEND_URL).toString();
- setSelectedPdfUrl(pdfUrl);
- setUploadedPaper(uploadedPaper);
- setParseProgress(null);
- setParseError(null);
- setParsedDocument(null);
- setMineruResult(null);
- setIsParsing(false);
+ // 构建PDF访问URL,确保使用混合运行时提供的后端地址
+ const pdfUrl = new URL(uploadedPaper.file_url, BACKEND_URL).toString();
+ setSelectedPdfUrl(pdfUrl);
+ setUploadedPaper(uploadedPaper);
+ setParseProgress(null);
+ setParseError(null);
+ setParsedDocument(null);
+ setMineruResult(null);
+ setIsParsing(false);
console.log("PDF上传成功:", uploadedPaper);
} catch (error) {
@@ -100,6 +127,18 @@ export default function SmartReadingPage() {
}
};
+ useEffect(() => {
+ const paperIdParam = searchParams.get("paperId");
+ if (!paperIdParam) {
+ return;
+ }
+ const numericId = Number(paperIdParam);
+ if (Number.isNaN(numericId)) {
+ return;
+ }
+ void preloadLibraryPaper(numericId);
+ }, [preloadLibraryPaper, searchParams]);
+
const handlePdfError = (error: string) => {
console.error("PDF加载错误:", error);
setParseError(error);
diff --git a/frontend/src/app/watch/page.tsx b/frontend/src/app/watch/page.tsx
new file mode 100644
index 0000000..f933ec1
--- /dev/null
+++ b/frontend/src/app/watch/page.tsx
@@ -0,0 +1,593 @@
+"use client";
+
+import { useMemo, useRef, useState } from "react";
+import { useRouter } from "next/navigation";
+import type { LucideIcon } from "lucide-react";
+import {
+ ArrowRight,
+ BadgeHelp,
+ ChevronLeft,
+ ChevronRight,
+ Clapperboard,
+ Film,
+ Play,
+ Sparkles,
+ Video,
+ Wand2,
+} from "lucide-react";
+
+import DashboardShell from "@/components/layout/dashboard-shell";
+
+type SpotlightPaper = {
+ id: string;
+ title: string;
+ summary: string;
+ tags: string[];
+ vibe: string;
+ duration: string;
+ cover: string;
+};
+
+type CinemaRow = {
+ title: string;
+ description: string;
+ accent: string;
+ icon: LucideIcon;
+ papers: Array<{
+ id: string;
+ title: string;
+ highlight: string;
+ tags: string[];
+ duration: string;
+ }>;
+};
+
+const HERO_BADGES: Array<{ title: string; detail: string; icon: LucideIcon }> = [
+ {
+ title: "章节分镜",
+ detail: "一键拉出 8 个镜头 + 导航字幕",
+ icon: Clapperboard,
+ },
+ {
+ title: "AI 配音",
+ detail: "公式、图表自动补位旁白",
+ icon: Sparkles,
+ },
+ {
+ title: "沉浸注释",
+ detail: "重点段落自动贴边批注",
+ icon: Wand2,
+ },
+];
+
+const HERO_STATS = [
+ { label: "实时镜头生成", value: "2.3s", description: "平均首镜头延迟" },
+ { label: "沉浸场次", value: "1,280+", description: "本周开启" },
+ { label: "长文档通过率", value: "98%", description: "≤200 页 PDF" },
+];
+
+const SPOTLIGHT_PAPERS: SpotlightPaper[] = [
+ {
+ id: "1706.03762",
+ title: "Attention Is All You Need",
+ summary: "用 18 个镜头复盘 Transformer 的诞生,搭配自注意力动效与结构拆解。",
+ tags: ["NLP", "Transformer"],
+ vibe: "分镜式推演",
+ duration: "12 分钟",
+ cover: "https://images.unsplash.com/photo-1520607162513-77705c0f0d4a?auto=format&fit=crop&w=1600&q=80",
+ },
+ {
+ id: "2302.13971",
+ title: "Segment Anything",
+ summary: "逐帧对齐 SAM 掩膜生成过程,辅助动态图层说明 prompt 影响。",
+ tags: ["CV", "图像分割"],
+ vibe: "逐帧多镜头",
+ duration: "10 分钟",
+ cover: "https://images.unsplash.com/photo-1498050108023-c5249f4df085?auto=format&fit=crop&w=1600&q=80",
+ },
+ {
+ id: "2310.02992",
+ title: "Gemini: A Family of LMs",
+ summary: "多模态长镜头,展示音频 / 图像提示链路与跨模态推理策略。",
+ tags: ["多模态", "LLM"],
+ vibe: "全景式讲解",
+ duration: "15 分钟",
+ cover: "https://images.unsplash.com/photo-1518770660439-4636190af475?auto=format&fit=crop&w=1600&q=80",
+ },
+ {
+ id: "2402.03300",
+ title: "Sora Technical Report",
+ summary: "用胶片质感呈现生成视频模型的 3 段训练周期与失真修复策略。",
+ tags: ["生成式视频", "Diffusion"],
+ vibe: "电影化拆解",
+ duration: "9 分钟",
+ cover: "https://images.unsplash.com/photo-1521737604893-d14cc237f11d?auto=format&fit=crop&w=1600&q=80",
+ },
+ {
+ id: "2201.11903",
+ title: "Chain-of-Thought Prompting",
+ summary: "节奏较慢的旁白视角,结合板书式字幕,帮助理解 COT 提示结构。",
+ tags: ["Prompting", "推理"],
+ vibe: "慢速讲堂",
+ duration: "11 分钟",
+ cover: "https://images.unsplash.com/photo-1454496522488-7a8e488e8606?auto=format&fit=crop&w=1600&q=80",
+ },
+];
+
+const CINEMA_ROWS: CinemaRow[] = [
+ {
+ title: "多模态放映厅",
+ description: "图像 + 文本 + 音频联动,逐层拆解感知节点。",
+ accent: "from-sky-500/20 via-blue-500/10 to-transparent",
+ icon: Video,
+ papers: [
+ {
+ id: "2304.08485",
+ title: "LLaVA",
+ highlight: "图像问答与字幕联合导览",
+ tags: ["多模态", "指令调优"],
+ duration: "14 分钟",
+ },
+ {
+ id: "2305.11644",
+ title: "AudioPaLM",
+ highlight: "语音 + 文本统一对齐之旅",
+ tags: ["语音", "统一建模"],
+ duration: "16 分钟",
+ },
+ ],
+ },
+ {
+ title: "LLM 编剧场",
+ description: "聚焦推理、效率与工具编排的长镜头解构。",
+ accent: "from-fuchsia-500/15 via-purple-500/10 to-transparent",
+ icon: Film,
+ papers: [
+ {
+ id: "2306.11644",
+ title: "Toolformer",
+ highlight: "让模型自己挑镜头的工具调度",
+ tags: ["Agent", "工具调用"],
+ duration: "13 分钟",
+ },
+ {
+ id: "2304.03442",
+ title: "QLoRA",
+ highlight: "低比特场景的压感式呈现",
+ tags: ["高效微调", "量化"],
+ duration: "9 分钟",
+ },
+ ],
+ },
+ {
+ title: "视觉工作流",
+ description: "把渲染、生成和编辑的关键帧堆叠成可浏览时间线。",
+ accent: "from-amber-500/15 via-orange-500/10 to-transparent",
+ icon: Clapperboard,
+ papers: [
+ {
+ id: "2312.02144",
+ title: "Gaussian Splatting 2.0",
+ highlight: "实时 3D 场景与光追调度",
+ tags: ["3D", "NeRF"],
+ duration: "8 分钟",
+ },
+ {
+ id: "2403.13035",
+ title: "DreamCraft3D",
+ highlight: "从草图到立体的逐帧生成流程",
+ tags: ["3D 生成", "Diffusion"],
+ duration: "12 分钟",
+ },
+ ],
+ },
+];
+
+const FALLBACK_ARXIV_ID = "1706.03762";
+
+export default function WatchPage() {
+ return (
+
+
+
+ );
+}
+
+function HeroSection() {
+ const router = useRouter();
+ const [input, setInput] = useState("");
+ const [error, setError] = useState(null);
+ const [isLaunching, setIsLaunching] = useState(false);
+
+ function handleSubmit(event: React.FormEvent) {
+ event.preventDefault();
+ const normalized = normalizeArxivInput(input);
+ if (!normalized) {
+ setError("请输入有效的 arXiv 链接或编号");
+ return;
+ }
+ setError(null);
+ setIsLaunching(true);
+ window.open(`https://arxiv.org/abs/${normalized}`, "_blank", "noopener,noreferrer");
+ setTimeout(() => {
+ setIsLaunching(false);
+ }, 600);
+ }
+
+ return (
+
+
+
+
+
+
+
+
+
+ 即时把 PDF 变成电影感播放
+
+
+ 输入任意 arXiv 链接,
3 秒钟开播一场“论文放映”
+
+
+ InsightReading 的“看论文”模式会自动解析章节结构、生成镜头字幕、搭配旁白,把冗长 PDF 拆成一场更容易吸收的沉浸式放映。
+
+
+
+
+
+ {HERO_BADGES.map((badge) => {
+ const Icon = badge.icon;
+ return (
+
+
+
+
+
+
{badge.title}
+
{badge.detail}
+
+
+ );
+ })}
+
+
+
+
+
+ IMMERSIVE LIVEBOARD
+
+
放映中的体验
+
+ {HERO_STATS.map((stat) => (
+
+
{stat.label}
+
{stat.value}
+
{stat.description}
+
+ ))}
+
+
+
支持来源
+
+ arXiv / Semantic Scholar / OpenAlex / 自建文库。自动去重,统一字幕模版。
+
+
+
+
+
+ );
+}
+
+function PaperShowcaseCarousel() {
+ const containerRef = useRef(null);
+ const [activeIndex, setActiveIndex] = useState(0);
+
+ const cards = useMemo(() => SPOTLIGHT_PAPERS, []);
+
+ function scroll(direction: "prev" | "next") {
+ const node = containerRef.current;
+ if (!node) return;
+ const delta = direction === "next" ? node.clientWidth * 0.9 : -node.clientWidth * 0.9;
+ node.scrollBy({ left: delta, behavior: "smooth" });
+ }
+
+ return (
+
+
+
+
Spotlight
+
热门放映场次
+
+ 精选近两周播放率最高的论文场次,已为每篇文档准备好字幕、旁白和章节要点。左右滑动即可预览每个镜头的气质。
+
+
+
+
+
+
+
+
+ {
+ const node = containerRef.current;
+ if (!node) return;
+ const midpoint = node.scrollLeft + node.clientWidth / 2;
+ let closestIndex = 0;
+ let closestDistance = Number.POSITIVE_INFINITY;
+ node.childNodes.forEach((child, index) => {
+ if (!(child instanceof HTMLElement)) {
+ return;
+ }
+ const childCenter = child.offsetLeft + child.clientWidth / 2;
+ const distance = Math.abs(childCenter - midpoint);
+ if (distance < closestDistance) {
+ closestDistance = distance;
+ closestIndex = index;
+ }
+ });
+ setActiveIndex(closestIndex);
+ }}
+ className="flex snap-x snap-mandatory gap-6 overflow-x-auto pb-4"
+ >
+ {cards.map((paper, index) => {
+ const isActive = index === activeIndex;
+ return (
+
+
+
+
+
+
+
片长 {paper.duration}
+
+
+
+
+
ArXiv · {paper.id}
+
{paper.title}
+
{paper.summary}
+
+
+ {paper.tags.map((tag) => (
+
+ {tag}
+
+ ))}
+
+
+
+
+ );
+ })}
+
+
+ );
+}
+
+function CinemaReelGrid() {
+ return (
+
+
+
+
Immersive Tracks
+
主题影厅
+
+ 不同研究方向配不同镜头语言:多模态看特效,LLM 看脚本,视觉模型看调色。挑一个影厅,直接点播对应的论文合集。
+
+
+
+
+
+
+ {CINEMA_ROWS.map((row) => {
+ const Icon = row.icon;
+ return (
+
+
+
+
+
+
+
+ Movie Track
+
+
{row.title}
+
{row.description}
+
+
+
+
+ {row.papers.map((paper) => (
+
+
+ #{paper.id}
+
+
{paper.title}
+
{paper.highlight}
+
+ {paper.tags.map((tag) => (
+
+ {tag}
+
+ ))}
+
+
+
片长 {paper.duration}
+
+
+
+ ))}
+
+
+
+ );
+ })}
+
+
+ );
+}
+
+function normalizeArxivInput(raw: string): string | null {
+ if (!raw) {
+ return null;
+ }
+ const trimmed = raw.trim();
+ if (!trimmed) {
+ return null;
+ }
+
+ const sanitized = trimmed.replace(/\.pdf$/i, "");
+ const match = sanitized.match(
+ /(?:arxiv(?:\.org)?\/(?:abs|pdf)\/|arxiv:)?([A-Za-z-]+\/\d{7}|\d{4}\.\d{4,5})(v\d+)?/i,
+ );
+ if (!match) {
+ return null;
+ }
+
+ const core = match[1]?.toLowerCase();
+ if (!core) {
+ return null;
+ }
+
+ const version = match[2]?.toLowerCase() ?? "";
+ return `${core}${version}`;
+}
diff --git a/frontend/src/components/Recommendations.tsx b/frontend/src/components/Recommendations.tsx
index e3b6afe..a13859a 100644
--- a/frontend/src/components/Recommendations.tsx
+++ b/frontend/src/components/Recommendations.tsx
@@ -2,7 +2,6 @@
import { useCallback, useEffect, useMemo, useState } from "react";
-import { BACKEND_URL } from "@/lib/config";
import { getAccessToken } from "@/lib/auth";
import {
fetchCurrentUser,
@@ -12,6 +11,8 @@ import {
} from "@/lib/api-client";
import { searchPapers } from "@/lib/api/academic";
import type { Paper as AcademicPaper } from "@/lib/types/academic";
+import { useLibraryFolders } from "@/hooks/use-library-folders";
+import { buildBackendUrl } from "@/lib/url";
interface AuthorDetail {
name?: string | null;
@@ -49,6 +50,14 @@ const DEFAULT_TAGS = ["Machine Learning", "Natural Language Processing"];
const MAX_SAVED_LIBRARY_PAGES = 5;
const UPLOADS_PAGE_SIZE = 60;
const TITLE_CHAR_PATTERN = /[\p{L}\p{N}]/gu;
+type FolderSelection = number | "unfiled";
+
+interface PaperImportResponse {
+ paper: {
+ id: number;
+ };
+ uploaded?: UploadedPaperDTO | null;
+}
function sanitizeTitleKey(value?: string | null): string | null {
if (!value) {
@@ -233,27 +242,6 @@ function mapAcademicPaper(paper: AcademicPaper): Paper {
};
}
-/** 构造后端请求的完整 URL(自动处理开发/生产环境) */
-function buildBackendUrl(path: string): string {
- let base = BACKEND_URL?.trim() || "";
-
- // If a public/browser-facing backend URL isn't provided via env (NEXT_PUBLIC_BACKEND_URL),
- // fall back to using the current origin in the browser so fetches target the same host/port
- // the page was served from. On the server (SSR/build) the value from `config.ts` will
- // provide the internal backend address (e.g. `http://backend:8000`) when available.
- if (typeof window !== "undefined") {
- if (!base) {
- const { protocol, hostname, port } = window.location;
- base = `${protocol}//${hostname}${port ? `:${port}` : ""}`;
- }
- }
-
- // 确保 path 拼接时没有多余斜杠
- base = base.endsWith("/") ? base.slice(0, -1) : base;
- path = path.startsWith("/") ? path : `/${path}`;
- return `${base}${path}`;
-}
-
async function fetchRecommendationPapers(query: string, sources: string[]) {
const activeSources = sources.length ? sources : ["arxiv"];
const response = await searchPapers({
@@ -270,6 +258,7 @@ async function fetchRecommendationPapers(query: string, sources: string[]) {
}
export default function Recommendations() {
+ const token = getAccessToken();
const [tagInput, setTagInput] = useState("");
const [tags, setTags] = useState(DEFAULT_TAGS);
const [papers, setPapers] = useState([]);
@@ -288,6 +277,19 @@ export default function Recommendations() {
const [savedPaperKeys, setSavedPaperKeys] = useState>(new Set());
const [savedPapersLoading, setSavedPapersLoading] = useState(false);
const [savedPapersError, setSavedPapersError] = useState(null);
+ const {
+ folders,
+ isLoading: foldersLoading,
+ refresh: refreshFolders,
+ create: createFolder,
+ } = useLibraryFolders(token);
+ const [folderPickerPaper, setFolderPickerPaper] = useState(null);
+ const [selectedFolderId, setSelectedFolderId] = useState("unfiled");
+ const [folderPickerError, setFolderPickerError] = useState(null);
+ const [isSavingToLibrary, setIsSavingToLibrary] = useState(false);
+ const [newFolderInline, setNewFolderInline] = useState("");
+ const [isCreatingFolderInline, setIsCreatingFolderInline] = useState(false);
+ const [inlineFolderError, setInlineFolderError] = useState(null);
const visiblePapers = useMemo(() => {
if (!hideSavedPapers) {
@@ -302,6 +304,12 @@ export default function Recommendations() {
});
}, [hideSavedPapers, papers, savedPaperKeys]);
+ useEffect(() => {
+ if (folderPickerPaper) {
+ setSelectedFolderId(folders[0]?.id ?? "unfiled");
+ }
+ }, [folderPickerPaper, folders]);
+
const hiddenCount = hideSavedPapers ? papers.length - visiblePapers.length : 0;
const activeTagLabel = activeTag ?? tags[0] ?? "未设置";
const visibleCountLabel =
@@ -397,6 +405,117 @@ export default function Recommendations() {
}
}
+ const handleOpenFolderPicker = useCallback(
+ (paper: Paper) => {
+ setFolderPickerPaper(paper);
+ setFolderPickerError(null);
+ setSelectedFolderId(folders[0]?.id ?? "unfiled");
+ setInlineFolderError(null);
+ setNewFolderInline("");
+ },
+ [folders],
+ );
+
+ const handleConfirmSave = useCallback(async () => {
+ if (!folderPickerPaper) {
+ return;
+ }
+ const authToken = getAccessToken();
+ if (!authToken) {
+ setFolderPickerError("请先登录后再保存论文");
+ return;
+ }
+
+ setIsSavingToLibrary(true);
+ setFolderPickerError(null);
+
+ try {
+ let pdfToSend = folderPickerPaper.link ?? folderPickerPaper.url ?? "";
+ if (pdfToSend.includes("arxiv.org/abs/")) {
+ pdfToSend = pdfToSend.replace("/abs/", "/pdf/");
+ if (!pdfToSend.endsWith(".pdf")) {
+ pdfToSend = `${pdfToSend}.pdf`;
+ }
+ }
+
+ const payload = {
+ title: folderPickerPaper.title,
+ abstract: folderPickerPaper.summary ?? folderPickerPaper.abstract,
+ pdf_url: pdfToSend || undefined,
+ folder_id: typeof selectedFolderId === "number" ? selectedFolderId : undefined,
+ };
+
+ const response = await fetch(buildBackendUrl("/api/v1/papers/import"), {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${authToken}`,
+ },
+ body: JSON.stringify(payload),
+ });
+
+ if (!response.ok) {
+ const detail = (await response.json().catch(() => ({}))) as { detail?: string };
+ throw new Error(detail.detail ?? "保存论文失败");
+ }
+
+ const importData = (await response.json().catch(() => null)) as PaperImportResponse | null;
+ const uploadedId = importData?.uploaded?.id;
+
+ if (uploadedId) {
+ try {
+ await fetch(buildBackendUrl(`/api/v1/library/uploads/${uploadedId}/ensure-local`), {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ Authorization: `Bearer ${authToken}`,
+ },
+ body: JSON.stringify({ pdf_url: pdfToSend || undefined }),
+ });
+ } catch (downloadErr) {
+ console.error("Force download failed:", downloadErr);
+ setFolderPickerError("论文已保存,但下载 PDF 失败,请稍后在文库中重试。");
+ }
+ }
+
+ await loadSavedPaperKeys();
+ await refreshFolders();
+ setFolderPickerPaper(null);
+ setSelectedFolderId("unfiled");
+ alert('✓ 已保存到你的文献库!\n可以在"我的文库"页面查看。');
+ } catch (err) {
+ const message = err instanceof Error ? err.message : "保存论文失败,请重试";
+ setFolderPickerError(message);
+ } finally {
+ setIsSavingToLibrary(false);
+ }
+ }, [folderPickerPaper, loadSavedPaperKeys, refreshFolders, selectedFolderId]);
+
+ const handleCreateFolderInline = useCallback(async () => {
+ if (!newFolderInline.trim()) {
+ setInlineFolderError("请输入文件夹名称");
+ return;
+ }
+ if (!token) {
+ setInlineFolderError("请先登录后再创建文件夹");
+ return;
+ }
+
+ setInlineFolderError(null);
+ setIsCreatingFolderInline(true);
+ try {
+ const created = await createFolder({ name: newFolderInline.trim() });
+ await refreshFolders();
+ setNewFolderInline("");
+ setSelectedFolderId(created.id);
+ } catch (err) {
+ const message = err instanceof Error ? err.message : "创建文件夹失败";
+ setInlineFolderError(message);
+ } finally {
+ setIsCreatingFolderInline(false);
+ }
+ }, [createFolder, newFolderInline, refreshFolders, token]);
+
function handleTagUpdate() {
const newTag = tagInput.trim();
if (newTag) {
@@ -730,165 +849,7 @@ export default function Recommendations() {
+ {folderPickerPaper && (
+
+
+
保存到我的文库
+
请选择要保存的文件夹,或创建一个新的分类。
+
+ {folderPickerPaper.title}
+
+
+
+ {folders.map((folder) => (
+
+ ))}
+ {foldersLoading && (
+
文件夹加载中…
+ )}
+
+
+
快速创建文件夹
+
+ setNewFolderInline(event.target.value)}
+ placeholder="输入文件夹名称"
+ className="flex-1 rounded-lg border border-slate-200 px-3 py-2 text-sm text-slate-800 focus:border-blue-500 focus:outline-none focus:ring-2 focus:ring-blue-100 dark:border-slate-600 dark:bg-slate-800 dark:text-slate-100 dark:focus:ring-blue-500/30"
+ />
+
+
+ {inlineFolderError &&
{inlineFolderError}
}
+
+ {folderPickerError && (
+
{folderPickerError}
+ )}
+
+
+
+
+
+
+ )}
);
}
diff --git a/frontend/src/components/auth-form.tsx b/frontend/src/components/auth-form.tsx
index 185e156..2f3f95c 100644
--- a/frontend/src/components/auth-form.tsx
+++ b/frontend/src/components/auth-form.tsx
@@ -2,6 +2,7 @@
import { useEffect, useMemo, useState, type FormEvent } from "react";
import { useRouter } from "next/navigation";
+import { Github } from "lucide-react";
import {
fetchCurrentUser,
@@ -12,6 +13,7 @@ import {
type UserDTO,
} from "@/lib/api-client";
import { clearAccessToken, getAccessToken, setAccessToken } from "@/lib/auth";
+import { buildBackendUrl } from "@/lib/url";
type Mode = "login" | "register";
@@ -126,6 +128,16 @@ export function AuthForm() {
setSuccess("已安全退出。");
}
+ function handleGitHubLogin() {
+ if (typeof window === "undefined") {
+ return;
+ }
+ const nextPath = window.location.pathname || "/";
+ const loginUrl = new URL(buildBackendUrl("/api/v1/auth/github/login"));
+ loginUrl.searchParams.set("next", nextPath);
+ window.location.href = loginUrl.toString();
+ }
+
return (
@@ -223,6 +235,19 @@ export function AuthForm() {
+
+
或使用第三方账号
+
+
+
{currentUser && (
diff --git a/frontend/src/components/layout/dashboard-shell.tsx b/frontend/src/components/layout/dashboard-shell.tsx
index 472889c..1ad6230 100644
--- a/frontend/src/components/layout/dashboard-shell.tsx
+++ b/frontend/src/components/layout/dashboard-shell.tsx
@@ -69,6 +69,7 @@ interface NavItem {
label: string;
icon: React.ReactNode;
children?: string[];
+ path?: string;
}
const NAV_ITEMS: NavItem[] = [
@@ -96,7 +97,8 @@ const NAV_ITEMS: NavItem[] = [
- )
+ ),
+ path: "/watch",
},
{
label: "设置",
@@ -128,6 +130,7 @@ const PATHNAME_TO_CHILD: Record
= {
"/upload": "上传文件",
"/smart-reading": "智能阅读",
"/library": "我的文库",
+ "/watch": "看论文",
};
interface DashboardShellProps {
@@ -572,6 +575,9 @@ export default function DashboardShell({
}
// 设置该项为激活状态
setActiveNavItem(item.label);
+ if (!hasChildren && item.path) {
+ router.push(item.path);
+ }
return;
}
@@ -579,6 +585,9 @@ export default function DashboardShell({
setActiveNavItem(item.label);
if (!hasChildren) {
+ if (item.path) {
+ router.push(item.path);
+ }
return;
}
// 切换子菜单的开关状态
diff --git a/frontend/src/components/pdf-reader/PDFJSViewer.tsx b/frontend/src/components/pdf-reader/PDFJSViewer.tsx
index 88bb07f..e42319e 100644
--- a/frontend/src/components/pdf-reader/PDFJSViewer.tsx
+++ b/frontend/src/components/pdf-reader/PDFJSViewer.tsx
@@ -330,7 +330,7 @@ export default function PDFJSViewer({
}`}
title={showOverlay ? "隐藏智能选择" : "显示智能选择"}
>
- {showOverlay ? "🎯 智能选择" : "⭕ 普通模式"}
+ {showOverlay ? "智能选择" : "普通模式"}
@@ -513,9 +513,7 @@ function PDFPageCanvas({
if (textLayer) {
textLayer.innerHTML = "";
- // Set the required CSS variable for textLayer
- textLayer.style.setProperty('--scale-factor', viewport.scale.toString());
-
+ textLayer.style.setProperty("--scale-factor", `${viewport.scale}`);
try {
const textContent = await page.getTextContent();
if (isStale()) {
diff --git a/frontend/src/hooks/use-library-folders.ts b/frontend/src/hooks/use-library-folders.ts
new file mode 100644
index 0000000..e3efcca
--- /dev/null
+++ b/frontend/src/hooks/use-library-folders.ts
@@ -0,0 +1,114 @@
+import { useCallback, useEffect, useMemo, useState } from "react";
+
+import {
+ createLibraryFolder,
+ deleteLibraryFolder,
+ fetchLibraryFolders,
+ type LibraryFolderDTO,
+ type LibraryFolderListResponse,
+ type LibraryFolderPayload,
+ updateLibraryFolder,
+} from "@/lib/api-client";
+
+interface UseLibraryFoldersResult {
+ folders: LibraryFolderDTO[];
+ unfiledCount: number;
+ isLoading: boolean;
+ error: string | null;
+ refresh: () => Promise
;
+ create: (payload: LibraryFolderPayload) => Promise;
+ rename: (folderId: number, payload: LibraryFolderPayload) => Promise;
+ remove: (folderId: number) => Promise;
+}
+
+export function useLibraryFolders(token: string | null): UseLibraryFoldersResult {
+ const [folders, setFolders] = useState([]);
+ const [unfiledCount, setUnfiledCount] = useState(0);
+ const [isLoading, setIsLoading] = useState(false);
+ const [error, setError] = useState(null);
+
+ const hasToken = useMemo(() => Boolean(token), [token]);
+
+ const handleResponse = useCallback((data: LibraryFolderListResponse) => {
+ setFolders(
+ data.folders.map((folder) => ({
+ ...folder,
+ paper_count: folder.paper_count ?? 0,
+ })),
+ );
+ setUnfiledCount(data.unfiled_count ?? 0);
+ }, []);
+
+ const refresh = useCallback(async () => {
+ if (!hasToken || !token) {
+ setFolders([]);
+ setUnfiledCount(0);
+ setError(null);
+ return;
+ }
+
+ setIsLoading(true);
+ setError(null);
+ try {
+ const data = await fetchLibraryFolders(token);
+ handleResponse(data);
+ } catch (err) {
+ const message = err instanceof Error ? err.message : "加载文件夹失败";
+ setError(message);
+ } finally {
+ setIsLoading(false);
+ }
+ }, [handleResponse, hasToken, token]);
+
+ useEffect(() => {
+ void refresh();
+ }, [refresh]);
+
+ const create = useCallback(
+ async (payload: LibraryFolderPayload) => {
+ if (!token) {
+ throw new Error("当前未登录,无法创建文件夹");
+ }
+ const folder = await createLibraryFolder(token, payload);
+ setFolders((prev) => [...prev, { ...folder, paper_count: folder.paper_count ?? 0 }]);
+ return folder;
+ },
+ [token],
+ );
+
+ const rename = useCallback(
+ async (folderId: number, payload: LibraryFolderPayload) => {
+ if (!token) {
+ throw new Error("当前未登录,无法更新文件夹");
+ }
+ const updated = await updateLibraryFolder(token, folderId, payload);
+ setFolders((prev) =>
+ prev.map((folder) => (folder.id === folderId ? { ...updated, paper_count: updated.paper_count ?? folder.paper_count ?? 0 } : folder)),
+ );
+ return updated;
+ },
+ [token],
+ );
+
+ const remove = useCallback(
+ async (folderId: number) => {
+ if (!token) {
+ throw new Error("当前未登录,无法删除文件夹");
+ }
+ await deleteLibraryFolder(token, folderId);
+ setFolders((prev) => prev.filter((folder) => folder.id !== folderId));
+ },
+ [token],
+ );
+
+ return {
+ folders,
+ unfiledCount,
+ isLoading,
+ error,
+ refresh,
+ create,
+ rename,
+ remove,
+ };
+}
diff --git a/frontend/src/lib/api-client.ts b/frontend/src/lib/api-client.ts
index 6eead2a..ff80d63 100644
--- a/frontend/src/lib/api-client.ts
+++ b/frontend/src/lib/api-client.ts
@@ -43,6 +43,25 @@ export interface PasswordChangePayload {
new_password: string;
}
+export interface LibraryFolderDTO {
+ id: number;
+ name: string;
+ color?: string | null;
+ paper_count?: number;
+ created_at: string;
+ updated_at: string;
+}
+
+export interface LibraryFolderListResponse {
+ folders: LibraryFolderDTO[];
+ unfiled_count: number;
+}
+
+export interface LibraryFolderPayload {
+ name: string;
+ color?: string | null;
+}
+
export interface UploadedPaperDTO {
id: number;
original_filename: string;
@@ -50,6 +69,9 @@ export interface UploadedPaperDTO {
content_type: string;
file_size: number;
file_hash?: string | null;
+ folder_id?: number | null;
+ metadata_json?: Record | null;
+ folder?: LibraryFolderDTO | null;
uploaded_at: string;
}
@@ -61,6 +83,10 @@ export interface UploadedPaperListResponse {
total_pages: number;
}
+export interface LibraryFolderAssignmentPayload {
+ folder_id?: number | null;
+}
+
export type AnnotationType = "highlight" | "note";
export interface AnnotationRect {
@@ -263,9 +289,16 @@ export async function deleteAccount(token: string): Promise<{ message: string }>
return response.json();
}
-export async function uploadPaper(token: string, file: File): Promise {
+export async function uploadPaper(
+ token: string,
+ file: File,
+ options?: { folderId?: number | null },
+): Promise {
const formData = new FormData();
formData.append("file", file);
+ if (options?.folderId && options.folderId > 0) {
+ formData.append("folder_id", options.folderId.toString());
+ }
const response = await fetch(`${BACKEND_URL}/api/v1/papers/upload`, {
method: "POST",
@@ -285,7 +318,7 @@ export async function uploadPaper(token: string, file: File): Promise {
const searchParams = new URLSearchParams();
if (params?.page && params.page > 0) {
@@ -294,6 +327,9 @@ export async function fetchUploadedPapers(
if (params?.pageSize && params.pageSize > 0) {
searchParams.set("page_size", params.pageSize.toString());
}
+ if (typeof params?.folderId === "number") {
+ searchParams.set("folder_id", params.folderId.toString());
+ }
const queryString = searchParams.toString();
const url =
queryString.length > 0
@@ -315,6 +351,131 @@ export async function fetchUploadedPapers(
return response.json();
}
+export async function fetchUploadedPaper(token: string, paperId: number): Promise {
+ const response = await fetch(`${BACKEND_URL}/api/v1/papers/uploads/${paperId}`, {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ cache: "no-store",
+ });
+
+ if (!response.ok) {
+ const detail = (await response.json().catch(() => ({}))) as { detail?: string };
+ throw new Error(detail.detail ?? "获取文档详情失败");
+ }
+
+ return response.json();
+}
+
+export async function fetchLibraryFolders(token: string): Promise {
+ const response = await fetch(`${BACKEND_URL}/api/v1/library/folders`, {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ cache: "no-store",
+ });
+
+ if (!response.ok) {
+ const detail = (await response.json().catch(() => ({}))) as { detail?: string };
+ throw new Error(detail.detail ?? "获取文库文件夹失败");
+ }
+
+ return response.json();
+}
+
+export async function createLibraryFolder(
+ token: string,
+ payload: LibraryFolderPayload,
+): Promise {
+ const response = await fetch(`${BACKEND_URL}/api/v1/library/folders`, {
+ method: "POST",
+ headers: {
+ Authorization: `Bearer ${token}`,
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify(payload),
+ });
+
+ if (!response.ok) {
+ const detail = (await response.json().catch(() => ({}))) as { detail?: string };
+ throw new Error(detail.detail ?? "创建文件夹失败");
+ }
+
+ return response.json();
+}
+
+export async function updateLibraryFolder(
+ token: string,
+ folderId: number,
+ payload: LibraryFolderPayload,
+): Promise {
+ const response = await fetch(`${BACKEND_URL}/api/v1/library/folders/${folderId}`, {
+ method: "PATCH",
+ headers: {
+ Authorization: `Bearer ${token}`,
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify(payload),
+ });
+
+ if (!response.ok) {
+ const detail = (await response.json().catch(() => ({}))) as { detail?: string };
+ throw new Error(detail.detail ?? "更新文件夹失败");
+ }
+
+ return response.json();
+}
+
+export async function deleteLibraryFolder(token: string, folderId: number): Promise {
+ const response = await fetch(`${BACKEND_URL}/api/v1/library/folders/${folderId}`, {
+ method: "DELETE",
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ });
+
+ if (!response.ok) {
+ const detail = (await response.json().catch(() => ({}))) as { detail?: string };
+ throw new Error(detail.detail ?? "删除文件夹失败");
+ }
+}
+
+export async function updateUploadedPaperFolder(
+ token: string,
+ paperId: number,
+ folderId: number | null,
+): Promise {
+ const response = await fetch(`${BACKEND_URL}/api/v1/library/uploads/${paperId}/folder`, {
+ method: "PATCH",
+ headers: {
+ Authorization: `Bearer ${token}`,
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify({ folder_id: folderId }),
+ });
+
+ if (!response.ok) {
+ const detail = (await response.json().catch(() => ({}))) as { detail?: string };
+ throw new Error(detail.detail ?? "更新文献所在文件夹失败");
+ }
+
+ return response.json();
+}
+
+export async function deleteUploadedPaper(token: string, paperId: number): Promise {
+ const response = await fetch(`${BACKEND_URL}/api/v1/library/uploads/${paperId}`, {
+ method: "DELETE",
+ headers: {
+ Authorization: `Bearer ${token}`,
+ },
+ });
+
+ if (!response.ok) {
+ const detail = (await response.json().catch(() => ({}))) as { detail?: string };
+ throw new Error(detail.detail ?? "删除文献失败");
+ }
+}
+
export async function createPaperAnnotation(
token: string,
paperId: number,
diff --git a/frontend/src/lib/api/academic.ts b/frontend/src/lib/api/academic.ts
index 32d397e..e48a784 100644
--- a/frontend/src/lib/api/academic.ts
+++ b/frontend/src/lib/api/academic.ts
@@ -1,8 +1,9 @@
-import { BACKEND_URL } from '../config';
import { ChatMessage, IntelligentSearchResponse, Paper } from '../types/academic';
+import { buildBackendUrl } from '@/lib/url';
-const API_BASE = BACKEND_URL ?? 'http://localhost:8000';
-const API_V1_BASE = `${API_BASE}/api/v1`;
+const API_PREFIX = "/api/v1";
+
+const buildApiUrl = (path: string) => buildBackendUrl(`${API_PREFIX}${path}`);
export type SearchMode = 'standard' | 'ai';
@@ -31,10 +32,11 @@ export interface SearchResponse {
export async function searchPapers(params: SearchParams): Promise {
try {
- console.log('Fetching from:', `${API_V1_BASE}/academic/search`);
+ const url = buildApiUrl("/academic/search");
+ console.log('Fetching from:', url);
console.log('Request params:', params);
- const response = await fetch(`${API_V1_BASE}/academic/search`, {
+ const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
@@ -72,6 +74,7 @@ export async function intelligentSearch(
token?: string | null,
): Promise {
try {
+ const url = buildApiUrl("/academic/intelligent-search");
const headers: Record = {
'Content-Type': 'application/json',
};
@@ -80,19 +83,31 @@ export async function intelligentSearch(
headers.Authorization = `Bearer ${token}`;
}
- const response = await fetch(`${API_V1_BASE}/academic/intelligent-search`, {
+ const response = await fetch(url, {
method: 'POST',
headers,
body: JSON.stringify(params),
});
if (!response.ok) {
- const errorText = await response.text();
- console.error('Intelligent search failed:', errorText);
- throw new Error(`智能搜索失败: ${response.status} ${response.statusText}`);
+ let errorPayload: unknown = null;
+ let errorText = "";
+ try {
+ errorPayload = await response.json();
+ } catch {
+ errorText = await response.text();
+ }
+ console.error("Intelligent search failed:", errorPayload || errorText);
+
+ const detail: string | undefined =
+ typeof errorPayload === "object" && errorPayload !== null && "detail" in errorPayload
+ ? (errorPayload as { detail?: string }).detail
+ : undefined;
+
+ throw new Error(detail ?? `智能搜索失败: ${response.status} ${response.statusText}`);
}
- return await response.json();
+ return await response.json();
} catch (error) {
console.error('智能搜索请求失败:', error);
throw error;
diff --git a/frontend/src/lib/url.ts b/frontend/src/lib/url.ts
new file mode 100644
index 0000000..e3b81f0
--- /dev/null
+++ b/frontend/src/lib/url.ts
@@ -0,0 +1,18 @@
+import { BACKEND_URL } from "@/lib/config";
+
+export function buildBackendUrl(path: string): string {
+ const normalizedPath = path.startsWith("/") ? path : `/${path}`;
+ const base = (BACKEND_URL ?? "").trim().replace(/\/$/, "");
+
+ if (base.length > 0) {
+ return `${base}${normalizedPath}`;
+ }
+
+ if (typeof window !== "undefined") {
+ const { protocol, hostname, port } = window.location;
+ const origin = `${protocol}//${hostname}${port ? `:${port}` : ""}`;
+ return `${origin}${normalizedPath}`;
+ }
+
+ return normalizedPath;
+}