Initial commit: MyMidas personal finance tracker
Full-stack self-hosted finance app with FastAPI backend and React frontend. Features: - Accounts, transactions, budgets, investments with GBP base currency - CSV import with auto-detection for 10 UK bank formats - ML predictions: spending forecast, net worth projection, Monte Carlo - 7 selectable themes (Obsidian, Arctic, Midnight, Vault, Terminal, Synthwave, Ledger) - Receipt/document attachments on transactions (JPEG, PNG, WebP, PDF) - AES-256-GCM field encryption, RS256 JWT, TOTP 2FA, RLS, audit log - Encrypted nightly backups + key rotation script - Mobile-responsive layout with bottom nav Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
commit
61a7884ee5
127 changed files with 13323 additions and 0 deletions
236
backend/app/api/v1/accounts.py
Normal file
236
backend/app/api/v1/accounts.py
Normal file
|
|
@ -0,0 +1,236 @@
|
|||
import uuid
|
||||
|
||||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.audit import write_audit
|
||||
from app.dependencies import get_current_user, get_db
|
||||
from app.schemas.account import AccountCreate, AccountResponse, AccountUpdate
|
||||
from app.services.account_service import (
|
||||
AccountError,
|
||||
create_account,
|
||||
delete_account,
|
||||
get_account,
|
||||
get_net_worth,
|
||||
list_accounts,
|
||||
update_account,
|
||||
)
|
||||
|
||||
MAX_IMPORT_FILE_BYTES = 10 * 1024 * 1024 # 10 MB
|
||||
MAX_IMPORT_ROWS = 50_000
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("", response_model=list[AccountResponse])
|
||||
async def get_accounts(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
return await list_accounts(db, user.id)
|
||||
|
||||
|
||||
@router.post("", response_model=AccountResponse, status_code=201)
|
||||
async def create(
|
||||
body: AccountCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
result = await create_account(db, user.id, body)
|
||||
await write_audit(db, user_id=user.id, action="account_create")
|
||||
await db.commit()
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/net-worth")
|
||||
async def net_worth(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
return await get_net_worth(db, user.id, user.base_currency)
|
||||
|
||||
|
||||
@router.get("/{account_id}", response_model=AccountResponse)
|
||||
async def get_one(
|
||||
account_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
try:
|
||||
account = await get_account(db, account_id, user.id)
|
||||
from app.services.account_service import _to_response
|
||||
return _to_response(account)
|
||||
except AccountError as e:
|
||||
raise HTTPException(status_code=e.status_code, detail=e.detail)
|
||||
|
||||
|
||||
@router.put("/{account_id}", response_model=AccountResponse)
|
||||
async def update(
|
||||
account_id: uuid.UUID,
|
||||
body: AccountUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
try:
|
||||
result = await update_account(db, account_id, user.id, body)
|
||||
await write_audit(db, user_id=user.id, action="account_update", resource_type="account", resource_id=account_id)
|
||||
await db.commit()
|
||||
return result
|
||||
except AccountError as e:
|
||||
raise HTTPException(status_code=e.status_code, detail=e.detail)
|
||||
|
||||
|
||||
@router.post("/{account_id}/import/preview")
|
||||
async def import_preview(
|
||||
account_id: uuid.UUID,
|
||||
file: UploadFile = File(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
"""Upload a CSV and get back the detected format, column mapping, and a sample of parsed rows."""
|
||||
from app.services.csv_detector import parse_csv_content, detect_format
|
||||
|
||||
try:
|
||||
await get_account(db, account_id, user.id)
|
||||
except AccountError as e:
|
||||
raise HTTPException(status_code=e.status_code, detail=e.detail)
|
||||
|
||||
content = await file.read(MAX_IMPORT_FILE_BYTES + 1)
|
||||
if len(content) > MAX_IMPORT_FILE_BYTES:
|
||||
raise HTTPException(status_code=413, detail="File too large (max 10 MB)")
|
||||
try:
|
||||
headers, rows = parse_csv_content(content)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
if not headers:
|
||||
raise HTTPException(status_code=400, detail="Could not read CSV headers")
|
||||
|
||||
mapping = detect_format(headers)
|
||||
|
||||
# Build 5-row preview using the detected mapping
|
||||
preview = []
|
||||
for row in rows[:5]:
|
||||
entry: dict = {
|
||||
"date_raw": row.get(mapping.date, ""),
|
||||
"description_raw": row.get(mapping.description, ""),
|
||||
}
|
||||
if mapping.is_split():
|
||||
debit_str = row.get(mapping.debit or "", "").replace(",", "").replace("£", "").strip()
|
||||
credit_str = row.get(mapping.credit or "", "").replace(",", "").replace("£", "").strip()
|
||||
try:
|
||||
debit = float(debit_str) if debit_str else 0.0
|
||||
credit = float(credit_str) if credit_str else 0.0
|
||||
entry["amount_raw"] = credit - debit
|
||||
except ValueError:
|
||||
entry["amount_raw"] = None
|
||||
else:
|
||||
raw = row.get(mapping.amount or "", "").replace(",", "").replace("£", "").strip()
|
||||
try:
|
||||
entry["amount_raw"] = float(raw) if raw else None
|
||||
except ValueError:
|
||||
entry["amount_raw"] = None
|
||||
if mapping.balance:
|
||||
entry["balance_raw"] = row.get(mapping.balance, "")
|
||||
preview.append(entry)
|
||||
|
||||
return {
|
||||
"detected_format": mapping.detected_format,
|
||||
"headers": headers,
|
||||
"mapping": {
|
||||
"date": mapping.date,
|
||||
"description": mapping.description,
|
||||
"amount": mapping.amount,
|
||||
"debit": mapping.debit,
|
||||
"credit": mapping.credit,
|
||||
"balance": mapping.balance,
|
||||
"reference": mapping.reference,
|
||||
},
|
||||
"total_rows": len(rows),
|
||||
"preview": preview,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/{account_id}/import")
|
||||
async def import_csv_to_account(
|
||||
account_id: uuid.UUID,
|
||||
file: UploadFile = File(...),
|
||||
date_col: str = Form(...),
|
||||
description_col: str = Form(...),
|
||||
amount_col: str = Form(default=""),
|
||||
debit_col: str = Form(default=""),
|
||||
credit_col: str = Form(default=""),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
from app.services.csv_detector import parse_csv_content
|
||||
from app.services.transaction_service import import_csv
|
||||
from app.core.audit import write_audit
|
||||
|
||||
try:
|
||||
await get_account(db, account_id, user.id)
|
||||
except AccountError as e:
|
||||
raise HTTPException(status_code=e.status_code, detail=e.detail)
|
||||
|
||||
content = await file.read(MAX_IMPORT_FILE_BYTES + 1)
|
||||
if len(content) > MAX_IMPORT_FILE_BYTES:
|
||||
raise HTTPException(status_code=413, detail="File too large (max 10 MB)")
|
||||
try:
|
||||
_, rows = parse_csv_content(content)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
if len(rows) > MAX_IMPORT_ROWS:
|
||||
raise HTTPException(status_code=400, detail=f"File contains too many rows (max {MAX_IMPORT_ROWS:,})")
|
||||
|
||||
use_split = bool(debit_col and credit_col)
|
||||
parsed_rows = []
|
||||
|
||||
for row in rows:
|
||||
date_val = row.get(date_col, "").strip()
|
||||
desc_val = row.get(description_col, "").strip() or "Imported transaction"
|
||||
|
||||
if use_split:
|
||||
debit_str = row.get(debit_col, "").replace(",", "").replace("£", "").strip()
|
||||
credit_str = row.get(credit_col, "").replace(",", "").replace("£", "").strip()
|
||||
try:
|
||||
debit = float(debit_str) if debit_str else 0.0
|
||||
credit = float(credit_str) if credit_str else 0.0
|
||||
amount = credit - debit
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
raw = row.get(amount_col, "").replace(",", "").replace("£", "").strip()
|
||||
try:
|
||||
amount = float(raw) if raw else None
|
||||
except ValueError:
|
||||
continue
|
||||
if amount is None:
|
||||
continue
|
||||
|
||||
if not date_val:
|
||||
continue
|
||||
|
||||
parsed_rows.append({"date": date_val, "description": desc_val, "amount": str(amount)})
|
||||
|
||||
if not parsed_rows:
|
||||
raise HTTPException(status_code=400, detail="No valid rows found after applying column mapping")
|
||||
|
||||
result = await import_csv(db, user.id, account_id, parsed_rows, user.base_currency)
|
||||
await write_audit(db, user_id=user.id, action="import_data", metadata=result)
|
||||
await db.commit()
|
||||
return result
|
||||
|
||||
|
||||
@router.delete("/{account_id}", status_code=204)
|
||||
async def delete(
|
||||
account_id: uuid.UUID,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
try:
|
||||
await delete_account(db, account_id, user.id)
|
||||
await write_audit(db, user_id=user.id, action="account_delete", resource_type="account", resource_id=account_id)
|
||||
await db.commit()
|
||||
except AccountError as e:
|
||||
raise HTTPException(status_code=e.status_code, detail=e.detail)
|
||||
Loading…
Add table
Add a link
Reference in a new issue