Add routes to get weekly totals
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
from fastapi import APIRouter
|
||||
from mft.routes import auth, categories, expense, health
|
||||
from mft.routes import auth, categories, expense, health, statistics
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
@@ -7,3 +7,4 @@ api_router.include_router(health.router, tags=["health"])
|
||||
api_router.include_router(auth.router, prefix="/auth", tags=["auth"])
|
||||
api_router.include_router(categories.router, tags=["categories"])
|
||||
api_router.include_router(expense.router, tags=["expenses"])
|
||||
api_router.include_router(statistics.router, tags=["statistics"])
|
||||
|
||||
219
mft/routes/statistics.py
Normal file
219
mft/routes/statistics.py
Normal file
@@ -0,0 +1,219 @@
|
||||
import re
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from fastapi import APIRouter, HTTPException, status, Depends, Query
|
||||
from pydantic import BaseModel
|
||||
from mft.auth import verify_token
|
||||
from mft.database import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class CategoryTotal(BaseModel):
|
||||
"""Per-category total within a time period."""
|
||||
|
||||
cid: int
|
||||
name: str
|
||||
total: int
|
||||
|
||||
|
||||
class WeekTotal(BaseModel):
|
||||
"""Weekly totals and per-category breakdown."""
|
||||
|
||||
week: str # ISO week format (YYYY-Www)
|
||||
from_date: str # ISO date format (YYYY-MM-DD)
|
||||
to_date: str # ISO date format (YYYY-MM-DD)
|
||||
total: int
|
||||
by_category: list[CategoryTotal]
|
||||
|
||||
|
||||
def parse_iso_week(week_str: str) -> datetime:
|
||||
"""
|
||||
Parse ISO week format (YYYY-Www) to datetime at start of week (Monday).
|
||||
|
||||
Args:
|
||||
week_str: String in YYYY-Www format (e.g., "2026-W01")
|
||||
|
||||
Returns:
|
||||
datetime object for Monday of that week (00:00:00 UTC)
|
||||
|
||||
Raises:
|
||||
ValueError: If format is invalid
|
||||
"""
|
||||
if not re.match(r"^\d{4}-W\d{2}$", week_str):
|
||||
raise ValueError(f"Invalid ISO week format: {week_str}")
|
||||
|
||||
try:
|
||||
# Parse ISO week: %G=ISO year, %V=ISO week, %u=day of week (1=Mon)
|
||||
return datetime.strptime(f"{week_str}-1", "%G-W%V-%u").replace(
|
||||
tzinfo=timezone.utc
|
||||
)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid ISO week: {week_str}") from e
|
||||
|
||||
|
||||
def get_iso_week_string(dt: datetime) -> str:
|
||||
"""
|
||||
Convert datetime to ISO week format string.
|
||||
|
||||
Args:
|
||||
dt: datetime object
|
||||
|
||||
Returns:
|
||||
ISO week string (YYYY-Www)
|
||||
"""
|
||||
iso_cal = dt.isocalendar()
|
||||
return f"{iso_cal.year}-W{iso_cal.week:02d}"
|
||||
|
||||
|
||||
def get_default_week_range() -> tuple[datetime, datetime]:
|
||||
"""
|
||||
Get default week range: The most recent 4 weeks, which includes the ongoing week.
|
||||
|
||||
Returns:
|
||||
Tuple of (from_date, to_date) where range is [from, to)
|
||||
from_date is Monday of 3 weeks ago
|
||||
to_date is Monday of next week (start of week after current)
|
||||
"""
|
||||
now = datetime.now(timezone.utc)
|
||||
current_week_monday = now - timedelta(days=now.weekday())
|
||||
current_week_monday = current_week_monday.replace(
|
||||
hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
|
||||
from_date = current_week_monday - timedelta(days=21)
|
||||
to_date = current_week_monday + timedelta(days=7)
|
||||
|
||||
return from_date, to_date
|
||||
|
||||
|
||||
@router.get("/statistics/totals", response_model=list[WeekTotal])
|
||||
def get_weekly_totals(
|
||||
granularity: str = Query(
|
||||
"weekly", description="Time granularity (only 'weekly' supported)"
|
||||
),
|
||||
from_week: str | None = Query(None, description="Start week (YYYY-Www, inclusive)"),
|
||||
to_week: str | None = Query(None, description="End week (YYYY-Www, exclusive)"),
|
||||
uid: int = Depends(verify_token),
|
||||
):
|
||||
"""
|
||||
Get totals aggregated by time period.
|
||||
|
||||
Currently only supports weekly granularity with ISO week format (YYYY-Www).
|
||||
If from_week and to_week are omitted, defaults to the most recent 4 weeks
|
||||
(including the current ongoing week).
|
||||
|
||||
Args:
|
||||
granularity: Time granularity (must be "weekly")
|
||||
from_week: Start week in ISO format (inclusive)
|
||||
to_week: End week in ISO format (exclusive)
|
||||
uid: User ID from authentication token
|
||||
|
||||
Returns:
|
||||
List of weekly totals with global and per-category breakdowns
|
||||
|
||||
Raises:
|
||||
400: Invalid parameters (unsupported granularity, format errors, from >= to)
|
||||
"""
|
||||
# Validate granularity
|
||||
if granularity != "weekly":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Only 'weekly' granularity is currently supported",
|
||||
)
|
||||
|
||||
# Validate both-or-neither for week parameters
|
||||
if (from_week is None) != (to_week is None):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Both 'from_week' and 'to_week' must be provided, or both omitted",
|
||||
)
|
||||
|
||||
try:
|
||||
if from_week is None:
|
||||
from_date, to_date = get_default_week_range()
|
||||
else:
|
||||
from_date = parse_iso_week(from_week)
|
||||
to_date = parse_iso_week(to_week)
|
||||
|
||||
if from_date >= to_date:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="'from_week' must be before 'to_week'",
|
||||
)
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e),
|
||||
) from e
|
||||
|
||||
from_ts = from_date.isoformat()
|
||||
to_ts = to_date.isoformat()
|
||||
|
||||
with get_db() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# FIXME: this could be better with a GROUP BY/SUM query but it works
|
||||
# well enough for small datasets
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT e.ts, e.value, e.cid, c.name
|
||||
FROM expense e
|
||||
JOIN category c ON e.cid = c.id
|
||||
WHERE e.ts >= ? AND e.ts < ?
|
||||
ORDER BY e.ts
|
||||
""",
|
||||
(from_ts, to_ts),
|
||||
)
|
||||
|
||||
rows = cursor.fetchall()
|
||||
|
||||
weeks_data = {}
|
||||
for row in rows:
|
||||
ts_str, value, cid, category_name = row
|
||||
ts = datetime.fromisoformat(ts_str)
|
||||
week_str = get_iso_week_string(ts)
|
||||
|
||||
if week_str not in weeks_data:
|
||||
weeks_data[week_str] = {
|
||||
"total": 0,
|
||||
"categories": {},
|
||||
}
|
||||
|
||||
weeks_data[week_str]["total"] += value
|
||||
|
||||
if cid not in weeks_data[week_str]["categories"]:
|
||||
weeks_data[week_str]["categories"][cid] = {
|
||||
"name": category_name,
|
||||
"total": 0,
|
||||
}
|
||||
weeks_data[week_str]["categories"][cid]["total"] += value
|
||||
|
||||
# Build response with all weeks in range (including empty ones)
|
||||
result = []
|
||||
current = from_date
|
||||
while current < to_date:
|
||||
week_str = get_iso_week_string(current)
|
||||
week_data = weeks_data.get(week_str, {"total": 0, "categories": {}})
|
||||
|
||||
week_end = current + timedelta(days=7)
|
||||
|
||||
by_category = [
|
||||
CategoryTotal(cid=cid, name=cat["name"], total=cat["total"])
|
||||
for cid, cat in week_data["categories"].items()
|
||||
]
|
||||
# Sort by category name for consistent output
|
||||
by_category.sort(key=lambda x: x.name)
|
||||
|
||||
result.append(
|
||||
WeekTotal(
|
||||
week=week_str,
|
||||
from_date=current.date().isoformat(),
|
||||
to_date=week_end.date().isoformat(),
|
||||
total=week_data["total"],
|
||||
by_category=by_category,
|
||||
)
|
||||
)
|
||||
|
||||
current = week_end
|
||||
|
||||
return result
|
||||
Reference in New Issue
Block a user