Skip to content

Commit 40d6eb0

Browse files
committed
feat: export with spreadsheet
1 parent a483f46 commit 40d6eb0

File tree

11 files changed

+704
-406
lines changed

11 files changed

+704
-406
lines changed

routers/activities_v2_router.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -410,11 +410,17 @@ async def modify_activity_status_v2(
410410
detail="Activity status can only be modified from pending to effective or refused",
411411
)
412412

413-
await volunteer.validate_check_permission(user, target_activity)
413+
await volunteer.validate_check_permission(user, target_activity, strict=True)
414414

415415
result = await db.zvms_new.get_collection("activities").update_one(
416416
{"_id": validate_object_id(activity_id)},
417-
{"$set": {"status": status.status}},
417+
{
418+
"$set": {
419+
"status": status.status,
420+
"approver": str(user["id"]),
421+
"updatedAt": datetime.now(),
422+
}
423+
},
418424
)
419425

420426
if result.modified_count == 0:

routers/exports_router.py

Lines changed: 49 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,24 @@
44
from fastapi import APIRouter, HTTPException, Depends, Request
55
import tempfile
66

7-
from config import BASE_ON_CAMPUS, ON_TO_OFF_RATE, MAX_EXCEED_DISCOUNT, BASE_OFF_CAMPUS, OFF_TO_ON_RATE
7+
from config import (
8+
BASE_ON_CAMPUS,
9+
ON_TO_OFF_RATE,
10+
MAX_EXCEED_DISCOUNT,
11+
BASE_OFF_CAMPUS,
12+
OFF_TO_ON_RATE,
13+
)
814
from typings.export import ExportFormat, ExportTask, ExportStatus, ExportVariant
915
from util.calculate import calculate_user_time
1016
from fastapi.responses import FileResponse
1117
from io import BytesIO
1218
import base64
1319
import json
14-
from util.object_id import get_current_user, validate_object_id, compulsory_temporary_token
20+
from util.object_id import (
21+
get_current_user,
22+
validate_object_id,
23+
compulsory_temporary_token,
24+
)
1525
from datetime import datetime
1626
from database import db
1727
import pandas as pd
@@ -26,6 +36,7 @@ class CreateExport(BaseModel):
2636
allow_cache: bool = False
2737
include_description: bool = False
2838

39+
2940
tokens = []
3041

3142

@@ -54,14 +65,22 @@ async def process_task(task_id: str):
5465
)
5566
else:
5667
user_time = await calculate_user_time(
57-
str(user["_id"]), allow_cache=task["allow_cache"],
68+
str(user["_id"]),
69+
allow_cache=task["allow_cache"],
5870
attach_description=include_description,
5971
)
6072
more_on_campus = min(
61-
round(max(user_time["off-campus"] - BASE_OFF_CAMPUS, 1) * OFF_TO_ON_RATE, 0), MAX_EXCEED_DISCOUNT
73+
round(
74+
max(user_time["off-campus"] - BASE_OFF_CAMPUS, 1) * OFF_TO_ON_RATE,
75+
0,
76+
),
77+
MAX_EXCEED_DISCOUNT,
6278
)
6379
more_off_campus = min(
64-
round(max(user_time["on-campus"] - BASE_ON_CAMPUS, 1) * ON_TO_OFF_RATE, 0), MAX_EXCEED_DISCOUNT
80+
round(
81+
max(user_time["on-campus"] - BASE_ON_CAMPUS, 1) * ON_TO_OFF_RATE, 0
82+
),
83+
MAX_EXCEED_DISCOUNT,
6584
)
6685
user_time["on-campus"] += more_on_campus
6786
user_time["off-campus"] += more_off_campus
@@ -83,7 +102,9 @@ async def process_task(task_id: str):
83102
"On Campus": user_time["on-campus"],
84103
"Off Campus": user_time["off-campus"],
85104
"Social Practice": user_time["social-practice"],
86-
"Description": "" if not include_description else user_time.get("description", ""),
105+
"Description": ""
106+
if not include_description
107+
else user_time.get("description", ""),
87108
}
88109
result.append(doc)
89110
task["percentage"] = (idx + 1) / len(users) * 100
@@ -240,6 +261,7 @@ async def get_export(task_id: str):
240261
task["_id"] = str(task["_id"])
241262
return {"code": 200, "status": "ok", "data": task}
242263

264+
243265
@router.post("/reports")
244266
async def request_download_reports(
245267
user=Depends(compulsory_temporary_token),
@@ -248,14 +270,15 @@ async def request_download_reports(
248270
raise HTTPException(status_code=403, detail="Permission denied")
249271
global tokens
250272
token_data = {
251-
'token': uuid.uuid4().hex,
252-
'granted_to': user['id'],
253-
'expires_at': datetime.now().timestamp() + 300 # 5 minutes
273+
"token": uuid.uuid4().hex,
274+
"granted_to": user["id"],
275+
"expires_at": datetime.now().timestamp() + 300, # 5 minutes
254276
}
255277
token_data_encoded = base64.b64encode(json.dumps(token_data).encode()).decode()
256278
tokens.append(token_data)
257279
return token_data_encoded
258280

281+
259282
@router.get("/reports/download")
260283
async def download_reports(
261284
request: Request,
@@ -267,14 +290,14 @@ async def download_reports(
267290
except Exception:
268291
raise HTTPException(status_code=400, detail="Invalid token format")
269292

270-
if not any(t['token'] == token_data['token'] for t in tokens):
293+
if not any(t["token"] == token_data["token"] for t in tokens):
271294
raise HTTPException(status_code=403, detail="Invalid or expired token")
272295

273-
if datetime.now().timestamp() > token_data['expires_at']:
296+
if datetime.now().timestamp() > token_data["expires_at"]:
274297
raise HTTPException(status_code=403, detail="Token expired")
275298

276299
# Remove the token after use
277-
tokens = [t for t in tokens if t['token'] != token_data['token']]
300+
tokens = [t for t in tokens if t["token"] != token_data["token"]]
278301

279302
# response to `export.tar.gz`, which is a tar.gz file containing all reports and already exists in the server
280303
file_path = "./data/export.tar.gz"
@@ -284,9 +307,7 @@ async def download_reports(
284307
file_path,
285308
media_type="application/gzip",
286309
filename="export.tar.gz",
287-
headers={
288-
"Content-Disposition": "attachment; filename=export.tar.gz"
289-
}
310+
headers={"Content-Disposition": "attachment; filename=export.tar.gz"},
290311
)
291312

292313

@@ -303,16 +324,19 @@ async def get_export_file(task_id: str, language: str = "en"):
303324
) as tmp:
304325
result = pd.DataFrame(task["result"]).sort_values("_id")
305326
if language == "zh-CN":
306-
result.rename(columns={
307-
"_id": "数据库 ID",
308-
"Name": "姓名",
309-
"ID": "学号",
310-
"Group": "班级",
311-
"On Campus": "校内义工时长",
312-
"Off Campus": "校外义工时长",
313-
"Social Practice": "社会实践时长",
314-
"Description": "描述",
315-
}, inplace=True)
327+
result.rename(
328+
columns={
329+
"_id": "数据库 ID",
330+
"Name": "姓名",
331+
"ID": "学号",
332+
"Group": "班级",
333+
"On Campus": "校内义工时长",
334+
"Off Campus": "校外义工时长",
335+
"Social Practice": "社会实践时长",
336+
"Description": "描述",
337+
},
338+
inplace=True,
339+
)
316340
if task["format"] == "excel":
317341
result.to_excel(tmp.name, index=False)
318342
elif task["format"] == "csv":

routers/times_router.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@
55
from util.calculate import calculate_user_time
66
from util.object_id import (
77
optional_current_user,
8-
validate_object_id, get_current_user,
8+
validate_object_id,
9+
get_current_user,
910
)
1011
from database import db
1112

@@ -19,7 +20,7 @@ async def read_times(
1920
perpage: int = 5,
2021
sort: str = "id",
2122
asc: bool = True,
22-
user = Depends(get_current_user),
23+
user=Depends(get_current_user),
2324
):
2425
"""
2526
Query users

routers/users_v2_router.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -179,10 +179,14 @@ async def get_user_time_statistics_v2(
179179
"social-practice": {
180180
"value": user_time["social-practice"],
181181
"group": find_percentile_threshold(
182-
group_indicators["social-practice"], user_time["social-practice"], "social-practice"
182+
group_indicators["social-practice"],
183+
user_time["social-practice"],
184+
"social-practice",
183185
),
184186
"grade": find_percentile_threshold(
185-
grade_indicators["social-practice"], user_time["social-practice"], "social-practice"
187+
grade_indicators["social-practice"],
188+
user_time["social-practice"],
189+
"social-practice",
186190
),
187191
},
188192
}

tasks/compute_time.py

Lines changed: 97 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515
)
1616
from database import db
1717
from util.calculate import calculate_user_time
18-
from util.statement import create_statement_from_kernel_data
18+
from util.statement import create_statement_from_kernel_data, TRANSLATIONS
19+
from utils import validate_object_id
1920

2021

2122
async def compute_time():
@@ -35,51 +36,111 @@ async def compute_ay_time():
3536
users = await db.zvms.get_collection("users").find({}).to_list(None)
3637
now = datetime.now()
3738
ay = now.year if now.month >= 9 else now.year - 1
38-
soy = datetime.now().replace(month=8, day=1, year=ay, hour=0, minute=0, second=0, microsecond=0)
39-
eoy = datetime.now().replace(month=7, day=31, year=ay + 1, hour=23, minute=59, second=59, microsecond=999999)
39+
soy = datetime.now().replace(
40+
month=8, day=1, year=ay, hour=0, minute=0, second=0, microsecond=0
41+
)
42+
eoy = datetime.now().replace(
43+
month=7, day=31, year=ay + 1, hour=23, minute=59, second=59, microsecond=999999
44+
)
4045
for user in users:
4146
user_id = str(user["_id"])
4247
entry = user_id[:4]
43-
result = await calculate_user_time(user_id, date_start=soy, date_end=eoy, allow_cache=False)
44-
await db.zvms_new.get_collection("time_academic_year").delete_many({"user": user_id})
45-
await db.zvms_new.get_collection("time_academic_year").insert_one({
46-
"user": user_id,
47-
"entry": entry,
48-
"academic_year": ay,
49-
"start_of_year": soy,
50-
"end_of_year": eoy,
51-
**result
52-
})
48+
result = await calculate_user_time(
49+
user_id, date_start=soy, date_end=eoy, allow_cache=False
50+
)
51+
await db.zvms_new.get_collection("time_academic_year").delete_many(
52+
{"user": user_id}
53+
)
54+
await db.zvms_new.get_collection("time_academic_year").insert_one(
55+
{
56+
"user": user_id,
57+
"entry": entry,
58+
"academic_year": ay,
59+
"start_of_year": soy,
60+
"end_of_year": eoy,
61+
**result,
62+
}
63+
)
64+
5365

5466
async def generate_reports():
55-
classes = await db.zvms.get_collection("groups").find({"type": "class"}).to_list(None)
56-
if not os.path.exists('./export'):
57-
os.makedirs('./export')
67+
classes = (
68+
await db.zvms.get_collection("groups").find({"type": "class"}).to_list(None)
69+
)
70+
await db.zvms_new.get_collection("daily_exports").delete_many({})
71+
db_data = await db.zvms_new.get_collection("daily_exports").insert_one(
72+
{"created_at": datetime.now(), "status": "processing", "data": []}
73+
)
74+
db_id = validate_object_id(db_data.inserted_id)
75+
if not os.path.exists("./export"):
76+
os.makedirs("./export")
5877
else:
59-
for item in os.listdir('./export'):
60-
item_path = os.path.join('./export', item)
78+
for item in os.listdir("./export"):
79+
item_path = os.path.join("./export", item)
6180
if os.path.isdir(item_path):
6281
for file in os.listdir(item_path):
6382
file_path = os.path.join(item_path, file)
64-
if file.endswith('.pdf'):
83+
if file.endswith(".pdf"):
6584
os.remove(file_path)
6685
os.rmdir(item_path)
6786
for class_id in classes:
68-
users = await db.zvms.get_collection("users").find({"group": str(class_id["_id"])}).to_list(None)
87+
users = (
88+
await db.zvms.get_collection("users")
89+
.find({"group": str(class_id["_id"])})
90+
.to_list(None)
91+
)
6992
os.makedirs(f'./export/{class_id["name"]}', exist_ok=True)
7093
for user in users:
7194
user_id = str(user["id"])
7295
entry = int(user_id[:4])
73-
soy = datetime.now().replace(month=8, day=1, year=entry, hour=0, minute=0, second=0, microsecond=0)
74-
eoy = datetime.now().replace(month=7, day=31, year=entry + 3, hour=23, minute=59, second=59, microsecond=999999)
75-
await create_statement_from_kernel_data(str(user['_id']), (soy, eoy), filename=f'./export/{class_id['name']}/{user['id']}_{user["name"]}.pdf', language='zh')
76-
print(f"Exported statement for {user['name']} in class {class_id['name']}.")
96+
soy = datetime.now().replace(
97+
month=8, day=1, year=entry, hour=0, minute=0, second=0, microsecond=0
98+
)
99+
eoy = datetime.now().replace(
100+
month=7,
101+
day=31,
102+
year=entry + 3,
103+
hour=23,
104+
minute=59,
105+
second=59,
106+
microsecond=999999,
107+
)
108+
filename, spreadsheet = await create_statement_from_kernel_data(
109+
str(user["_id"]),
110+
(soy, eoy),
111+
filename=f'./export/{class_id['name']}/{user['id']}_{user["name"]}.pdf',
112+
language="zh",
113+
)
114+
await db.zvms_new.get_collection("daily_exports").update_one(
115+
{"_id": db_id},
116+
{
117+
"$push": {
118+
"data": {
119+
**spreadsheet,
120+
"filename": filename,
121+
}
122+
}
123+
},
124+
)
125+
print(
126+
f"Exported statement for {user['name']} in class {class_id['name']} (saved to {filename})."
127+
)
77128
print(f"Exported statements for class {class_id['name']}.")
78129
# Create a tar.gz archive of the export directory
79-
if os.path.exists('./data/export.tar.gz'):
80-
os.remove('./data/export.tar.gz')
81-
with tarfile.open('./data/export.tar.gz', 'w:gz') as tar:
82-
tar.add('./export', arcname='export')
130+
spreadsheet_items = await db.zvms_new.get_collection("daily_exports").find_one(
131+
{"_id": db_id}
132+
)
133+
spreadsheet_items = spreadsheet_items.get("data", [])
134+
df = pd.DataFrame(spreadsheet_items)
135+
df.rename(columns=TRANSLATIONS["zh"]["spreadsheet"], inplace=True)
136+
if os.path.exists("./export/summary.xlsx"):
137+
os.remove("./export/summary.xlsx")
138+
df.to_excel(f"./export/summary.xlsx", index=False)
139+
if os.path.exists("./data/export.tar.gz"):
140+
os.remove("./data/export.tar.gz")
141+
with tarfile.open("./data/export.tar.gz", "w:gz") as tar:
142+
tar.add("./export", arcname="export")
143+
return "./data/export.tar.gz"
83144

84145

85146
def describe_percentile(items: np.ndarray, step: int, bound: float) -> dict[str, float]:
@@ -142,7 +203,14 @@ def process_time_data(time_df: pd.DataFrame) -> pd.DataFrame:
142203
1,
143204
)
144205
time_df["social-practice"] = time_df["social_practice"]
145-
time_df["diff"] = BASE_ON_CAMPUS + BASE_OFF_CAMPUS + BASE_SOCIAL_PRACTICE - time_df['on-campus'] - time_df['off-campus'] - time_df['social-practice']
206+
time_df["diff"] = (
207+
BASE_ON_CAMPUS
208+
+ BASE_OFF_CAMPUS
209+
+ BASE_SOCIAL_PRACTICE
210+
- time_df["on-campus"]
211+
- time_df["off-campus"]
212+
- time_df["social-practice"]
213+
)
146214
return time_df
147215

148216

0 commit comments

Comments
 (0)