Initial commit: MyMidas personal finance tracker
Full-stack self-hosted finance app with FastAPI backend and React frontend. Features: - Accounts, transactions, budgets, investments with GBP base currency - CSV import with auto-detection for 10 UK bank formats - ML predictions: spending forecast, net worth projection, Monte Carlo - 7 selectable themes (Obsidian, Arctic, Midnight, Vault, Terminal, Synthwave, Ledger) - Receipt/document attachments on transactions (JPEG, PNG, WebP, PDF) - AES-256-GCM field encryption, RS256 JWT, TOTP 2FA, RLS, audit log - Encrypted nightly backups + key rotation script - Mobile-responsive layout with bottom nav Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
commit
61a7884ee5
127 changed files with 13323 additions and 0 deletions
0
backend/app/services/__init__.py
Normal file
0
backend/app/services/__init__.py
Normal file
195
backend/app/services/account_service.py
Normal file
195
backend/app/services/account_service.py
Normal file
|
|
@ -0,0 +1,195 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import encrypt_field, decrypt_field
|
||||
from app.db.models.account import Account
|
||||
from app.db.models.transaction import Transaction
|
||||
from app.schemas.account import AccountCreate, AccountUpdate
|
||||
|
||||
# Account types that are liabilities (balance is negative contribution to net worth)
|
||||
LIABILITY_TYPES = {"credit_card", "loan", "mortgage"}
|
||||
|
||||
|
||||
class AccountError(Exception):
|
||||
def __init__(self, detail: str, status_code: int = 400):
|
||||
self.detail = detail
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
def _encrypt(value: str | None) -> bytes | None:
|
||||
if value is None:
|
||||
return None
|
||||
return encrypt_field(value)
|
||||
|
||||
|
||||
def _decrypt(data: bytes | None) -> str | None:
|
||||
if not data:
|
||||
return None
|
||||
return decrypt_field(data)
|
||||
|
||||
|
||||
def _to_response(account: Account) -> dict:
|
||||
return {
|
||||
"id": account.id,
|
||||
"name": _decrypt(account.name_enc) or "",
|
||||
"institution": _decrypt(account.institution_enc),
|
||||
"type": account.type,
|
||||
"currency": account.currency,
|
||||
"current_balance": account.current_balance,
|
||||
"credit_limit": account.credit_limit,
|
||||
"interest_rate": account.interest_rate,
|
||||
"is_active": account.is_active,
|
||||
"include_in_net_worth": account.include_in_net_worth,
|
||||
"color": account.color,
|
||||
"icon": account.icon,
|
||||
"notes": _decrypt(account.notes_enc),
|
||||
"created_at": account.created_at,
|
||||
"updated_at": account.updated_at,
|
||||
}
|
||||
|
||||
|
||||
async def create_account(
|
||||
db: AsyncSession,
|
||||
user_id: uuid.UUID,
|
||||
data: AccountCreate,
|
||||
) -> dict:
|
||||
now = datetime.now(timezone.utc)
|
||||
account = Account(
|
||||
user_id=user_id,
|
||||
name_enc=encrypt_field(data.name),
|
||||
institution_enc=_encrypt(data.institution),
|
||||
type=data.type,
|
||||
currency=data.currency,
|
||||
current_balance=data.opening_balance,
|
||||
credit_limit=data.credit_limit,
|
||||
interest_rate=data.interest_rate,
|
||||
include_in_net_worth=data.include_in_net_worth,
|
||||
color=data.color,
|
||||
icon=data.icon,
|
||||
notes_enc=_encrypt(data.notes),
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(account)
|
||||
await db.flush()
|
||||
return _to_response(account)
|
||||
|
||||
|
||||
async def list_accounts(db: AsyncSession, user_id: uuid.UUID) -> list[dict]:
|
||||
result = await db.execute(
|
||||
select(Account).where(
|
||||
Account.user_id == user_id,
|
||||
Account.deleted_at.is_(None),
|
||||
).order_by(Account.created_at)
|
||||
)
|
||||
return [_to_response(a) for a in result.scalars()]
|
||||
|
||||
|
||||
async def get_account(db: AsyncSession, account_id: uuid.UUID, user_id: uuid.UUID) -> Account:
|
||||
result = await db.execute(
|
||||
select(Account).where(
|
||||
Account.id == account_id,
|
||||
Account.user_id == user_id,
|
||||
Account.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
account = result.scalar_one_or_none()
|
||||
if not account:
|
||||
raise AccountError("Account not found", status_code=404)
|
||||
return account
|
||||
|
||||
|
||||
async def update_account(
|
||||
db: AsyncSession,
|
||||
account_id: uuid.UUID,
|
||||
user_id: uuid.UUID,
|
||||
data: AccountUpdate,
|
||||
) -> dict:
|
||||
account = await get_account(db, account_id, user_id)
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
if data.name is not None:
|
||||
account.name_enc = encrypt_field(data.name)
|
||||
if data.institution is not None:
|
||||
account.institution_enc = _encrypt(data.institution)
|
||||
if data.opening_balance is not None:
|
||||
account.current_balance = data.opening_balance
|
||||
if data.credit_limit is not None:
|
||||
account.credit_limit = data.credit_limit
|
||||
if data.interest_rate is not None:
|
||||
account.interest_rate = data.interest_rate
|
||||
if data.include_in_net_worth is not None:
|
||||
account.include_in_net_worth = data.include_in_net_worth
|
||||
if data.is_active is not None:
|
||||
account.is_active = data.is_active
|
||||
if data.color is not None:
|
||||
account.color = data.color
|
||||
if data.icon is not None:
|
||||
account.icon = data.icon
|
||||
if data.notes is not None:
|
||||
account.notes_enc = _encrypt(data.notes)
|
||||
|
||||
account.updated_at = now
|
||||
await db.flush()
|
||||
return _to_response(account)
|
||||
|
||||
|
||||
async def delete_account(
|
||||
db: AsyncSession,
|
||||
account_id: uuid.UUID,
|
||||
user_id: uuid.UUID,
|
||||
) -> None:
|
||||
account = await get_account(db, account_id, user_id)
|
||||
account.deleted_at = datetime.now(timezone.utc)
|
||||
account.updated_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def recalculate_balance(db: AsyncSession, account_id: uuid.UUID) -> None:
|
||||
"""Recompute current_balance from all non-deleted transactions."""
|
||||
result = await db.execute(
|
||||
select(func.sum(Transaction.amount)).where(
|
||||
Transaction.account_id == account_id,
|
||||
Transaction.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
total = result.scalar_one_or_none() or Decimal("0")
|
||||
|
||||
account = await db.get(Account, account_id)
|
||||
if account:
|
||||
account.current_balance = total
|
||||
account.updated_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def get_net_worth(db: AsyncSession, user_id: uuid.UUID, base_currency: str) -> dict:
|
||||
accounts = await db.execute(
|
||||
select(Account).where(
|
||||
Account.user_id == user_id,
|
||||
Account.include_in_net_worth == True,
|
||||
Account.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
total_assets = Decimal("0")
|
||||
total_liabilities = Decimal("0")
|
||||
|
||||
for account in accounts.scalars():
|
||||
# TODO Phase 3: convert to base_currency via FX rates
|
||||
bal = account.current_balance
|
||||
if account.type in LIABILITY_TYPES:
|
||||
total_liabilities += abs(bal)
|
||||
else:
|
||||
total_assets += bal
|
||||
|
||||
return {
|
||||
"total_assets": total_assets,
|
||||
"total_liabilities": total_liabilities,
|
||||
"net_worth": total_assets - total_liabilities,
|
||||
"base_currency": base_currency,
|
||||
}
|
||||
258
backend/app/services/auth_service.py
Normal file
258
backend/app/services/auth_service.py
Normal file
|
|
@ -0,0 +1,258 @@
|
|||
"""
|
||||
Authentication service: register, login, TOTP, sessions, brute-force protection.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import uuid
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from jose import JWTError
|
||||
from redis.asyncio import Redis
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import get_settings
|
||||
from app.core.security import (
|
||||
create_access_token,
|
||||
create_refresh_token,
|
||||
decrypt_field,
|
||||
decode_token,
|
||||
encrypt_field,
|
||||
generate_backup_codes,
|
||||
generate_csrf_token,
|
||||
generate_totp_qr_png,
|
||||
generate_totp_secret,
|
||||
hash_password,
|
||||
hash_token,
|
||||
verify_password,
|
||||
verify_totp,
|
||||
)
|
||||
from app.db.models.session import Session
|
||||
from app.db.models.user import User
|
||||
|
||||
|
||||
class AuthError(Exception):
|
||||
def __init__(self, detail: str, status_code: int = 401):
|
||||
self.detail = detail
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
async def _lockout_key(email: str) -> str:
|
||||
return f"lockout:{email}"
|
||||
|
||||
|
||||
async def _check_and_record_failure(redis: Redis, email: str, settings) -> None:
|
||||
key = await _lockout_key(email)
|
||||
attempts = await redis.incr(key)
|
||||
if attempts == 1:
|
||||
await redis.expire(key, settings.lockout_base_seconds)
|
||||
if attempts >= settings.max_login_attempts:
|
||||
lockout_seconds = settings.lockout_base_seconds * (2 ** (attempts - settings.max_login_attempts))
|
||||
await redis.expire(key, min(lockout_seconds, 86400)) # cap at 24h
|
||||
|
||||
|
||||
async def _is_locked_out(redis: Redis, email: str) -> bool:
|
||||
key = await _lockout_key(email)
|
||||
val = await redis.get(key)
|
||||
if val is None:
|
||||
return False
|
||||
settings = get_settings()
|
||||
return int(val) >= settings.max_login_attempts
|
||||
|
||||
|
||||
async def register_user(db: AsyncSession, email: str, password: str, display_name: str) -> User:
|
||||
settings = get_settings()
|
||||
|
||||
# Single-user: block registration if user already exists
|
||||
if not settings.allow_registration:
|
||||
count = await db.scalar(select(func.count()).select_from(User).where(User.deleted_at.is_(None)))
|
||||
if count and count > 0:
|
||||
raise AuthError("Registration is disabled", status_code=403)
|
||||
|
||||
existing = await db.scalar(select(User).where(User.email == email))
|
||||
if existing:
|
||||
raise AuthError("Email already registered", status_code=409)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
user = User(
|
||||
email=email,
|
||||
password_hash=hash_password(password),
|
||||
display_name=display_name,
|
||||
base_currency=settings.base_currency,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(user)
|
||||
await db.flush()
|
||||
return user
|
||||
|
||||
|
||||
async def authenticate_user(
|
||||
db: AsyncSession,
|
||||
redis: Redis,
|
||||
email: str,
|
||||
password: str,
|
||||
ip: str | None,
|
||||
user_agent: str | None,
|
||||
) -> tuple[User, str, str] | tuple[User, None, None]:
|
||||
"""
|
||||
Returns (user, access_token, refresh_token) if no TOTP required,
|
||||
or (user, None, None) if TOTP challenge needed.
|
||||
Raises AuthError on failure.
|
||||
"""
|
||||
settings = get_settings()
|
||||
|
||||
if await _is_locked_out(redis, email):
|
||||
raise AuthError("Account temporarily locked due to too many failed attempts", status_code=429)
|
||||
|
||||
user = await db.scalar(
|
||||
select(User).where(User.email == email, User.deleted_at.is_(None))
|
||||
)
|
||||
if not user or not verify_password(password, user.password_hash):
|
||||
await _check_and_record_failure(redis, email, settings)
|
||||
raise AuthError("Invalid email or password")
|
||||
|
||||
# Clear lockout on success
|
||||
await redis.delete(await _lockout_key(email))
|
||||
|
||||
if user.totp_enabled:
|
||||
return user, None, None # Caller creates challenge token
|
||||
|
||||
tokens = await _create_session(db, user, ip, user_agent)
|
||||
return user, *tokens
|
||||
|
||||
|
||||
async def _create_session(
|
||||
db: AsyncSession,
|
||||
user: User,
|
||||
ip: str | None,
|
||||
user_agent: str | None,
|
||||
) -> tuple[str, str]:
|
||||
settings = get_settings()
|
||||
access_token = create_access_token(str(user.id))
|
||||
refresh_token = create_refresh_token(str(user.id))
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
session = Session(
|
||||
user_id=user.id,
|
||||
token_hash=hash_token(access_token),
|
||||
ip_address=ip,
|
||||
user_agent=user_agent,
|
||||
last_active_at=now,
|
||||
expires_at=now + timedelta(days=settings.refresh_token_expire_days),
|
||||
created_at=now,
|
||||
)
|
||||
db.add(session)
|
||||
await db.flush()
|
||||
|
||||
# Update user login info
|
||||
user.last_login_at = now
|
||||
user.last_login_ip = ip
|
||||
user.updated_at = now
|
||||
|
||||
return access_token, refresh_token
|
||||
|
||||
|
||||
async def complete_totp_login(
|
||||
db: AsyncSession,
|
||||
challenge_token: str,
|
||||
totp_code: str,
|
||||
ip: str | None,
|
||||
user_agent: str | None,
|
||||
) -> tuple[str, str]:
|
||||
try:
|
||||
payload = decode_token(challenge_token, token_type="totp_challenge")
|
||||
user_id = uuid.UUID(payload["sub"])
|
||||
except (JWTError, ValueError, KeyError):
|
||||
raise AuthError("Invalid or expired challenge token")
|
||||
|
||||
user = await db.get(User, user_id)
|
||||
if not user or not user.totp_enabled or not user.totp_secret_enc:
|
||||
raise AuthError("Invalid challenge")
|
||||
|
||||
secret = decrypt_field(bytes.fromhex(user.totp_secret_enc) if isinstance(user.totp_secret_enc, str) else user.totp_secret_enc)
|
||||
if not verify_totp(secret, totp_code):
|
||||
raise AuthError("Invalid TOTP code")
|
||||
|
||||
return await _create_session(db, user, ip, user_agent)
|
||||
|
||||
|
||||
def create_totp_challenge_token(user_id: uuid.UUID) -> str:
|
||||
from app.core.security import create_access_token
|
||||
from datetime import timedelta
|
||||
from datetime import datetime, timezone
|
||||
from app.config import get_settings
|
||||
from jose import jwt
|
||||
from pathlib import Path
|
||||
|
||||
settings = get_settings()
|
||||
now = datetime.now(timezone.utc)
|
||||
payload = {
|
||||
"sub": str(user_id),
|
||||
"iat": now,
|
||||
"exp": now + timedelta(minutes=5),
|
||||
"type": "totp_challenge",
|
||||
}
|
||||
private_key = Path(settings.jwt_private_key_file).read_text()
|
||||
return jwt.encode(payload, private_key, algorithm=settings.jwt_algorithm)
|
||||
|
||||
|
||||
async def setup_totp(user: User, db: AsyncSession) -> tuple[str, str, list[str]]:
|
||||
"""Generate TOTP secret, QR code, and backup codes. Does not enable TOTP yet."""
|
||||
secret = generate_totp_secret()
|
||||
qr_png = generate_totp_qr_png(secret, user.email)
|
||||
backup_codes = generate_backup_codes(8)
|
||||
return secret, base64.b64encode(qr_png).decode(), backup_codes
|
||||
|
||||
|
||||
async def enable_totp(user: User, db: AsyncSession, secret: str, code: str) -> None:
|
||||
if not verify_totp(secret, code):
|
||||
raise AuthError("Invalid TOTP code — setup failed", status_code=400)
|
||||
|
||||
encrypted = encrypt_field(secret)
|
||||
user.totp_secret_enc = encrypted.hex()
|
||||
user.totp_enabled = True
|
||||
user.updated_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def disable_totp(user: User, db: AsyncSession, password: str) -> None:
|
||||
if not verify_password(password, user.password_hash):
|
||||
raise AuthError("Incorrect password", status_code=400)
|
||||
user.totp_secret_enc = None
|
||||
user.totp_enabled = False
|
||||
user.totp_backup_codes_enc = None
|
||||
user.updated_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def revoke_session(db: AsyncSession, session_id: uuid.UUID, user_id: uuid.UUID) -> None:
|
||||
session = await db.get(Session, session_id)
|
||||
if not session or session.user_id != user_id:
|
||||
raise AuthError("Session not found", status_code=404)
|
||||
session.revoked_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def revoke_all_sessions(db: AsyncSession, user_id: uuid.UUID, except_token_hash: str | None = None) -> None:
|
||||
from sqlalchemy import update
|
||||
stmt = (
|
||||
update(Session)
|
||||
.where(Session.user_id == user_id, Session.revoked_at.is_(None))
|
||||
)
|
||||
if except_token_hash:
|
||||
stmt = stmt.where(Session.token_hash != except_token_hash)
|
||||
stmt = stmt.values(revoked_at=datetime.now(timezone.utc))
|
||||
await db.execute(stmt)
|
||||
|
||||
|
||||
async def get_sessions(db: AsyncSession, user_id: uuid.UUID) -> list[Session]:
|
||||
result = await db.execute(
|
||||
select(Session).where(
|
||||
Session.user_id == user_id,
|
||||
Session.revoked_at.is_(None),
|
||||
Session.expires_at > datetime.now(timezone.utc),
|
||||
).order_by(Session.created_at.desc())
|
||||
)
|
||||
return list(result.scalars())
|
||||
137
backend/app/services/budget_service.py
Normal file
137
backend/app/services/budget_service.py
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
import uuid
|
||||
from datetime import date, datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from sqlalchemy import and_, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db.models.budget import Budget
|
||||
from app.db.models.category import Category
|
||||
from app.db.models.transaction import Transaction
|
||||
from app.schemas.budget import BudgetCreate, BudgetSummaryItem, BudgetUpdate
|
||||
|
||||
|
||||
def _period_bounds(period: str, ref: date) -> tuple[date, date]:
|
||||
if period == "weekly":
|
||||
start = ref - relativedelta(days=ref.weekday())
|
||||
end = start + relativedelta(days=6)
|
||||
elif period == "monthly":
|
||||
start = ref.replace(day=1)
|
||||
end = (start + relativedelta(months=1)) - relativedelta(days=1)
|
||||
elif period == "quarterly":
|
||||
q = (ref.month - 1) // 3
|
||||
start = date(ref.year, q * 3 + 1, 1)
|
||||
end = (start + relativedelta(months=3)) - relativedelta(days=1)
|
||||
else: # yearly
|
||||
start = date(ref.year, 1, 1)
|
||||
end = date(ref.year, 12, 31)
|
||||
return start, end
|
||||
|
||||
|
||||
async def create_budget(db: AsyncSession, user_id: uuid.UUID, data: BudgetCreate) -> Budget:
|
||||
now = datetime.now(timezone.utc)
|
||||
budget = Budget(
|
||||
id=uuid.uuid4(),
|
||||
user_id=user_id,
|
||||
category_id=data.category_id,
|
||||
name=data.name,
|
||||
amount=data.amount,
|
||||
currency=data.currency,
|
||||
period=data.period,
|
||||
start_date=data.start_date,
|
||||
end_date=data.end_date,
|
||||
rollover=data.rollover,
|
||||
alert_threshold=data.alert_threshold,
|
||||
is_active=True,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(budget)
|
||||
await db.flush()
|
||||
await db.refresh(budget)
|
||||
return budget
|
||||
|
||||
|
||||
async def list_budgets(db: AsyncSession, user_id: uuid.UUID, active_only: bool = True) -> list[Budget]:
|
||||
q = select(Budget).where(Budget.user_id == user_id)
|
||||
if active_only:
|
||||
q = q.where(Budget.is_active == True) # noqa: E712
|
||||
q = q.order_by(Budget.name)
|
||||
result = await db.execute(q)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def get_budget(db: AsyncSession, user_id: uuid.UUID, budget_id: uuid.UUID) -> Budget | None:
|
||||
result = await db.execute(
|
||||
select(Budget).where(Budget.id == budget_id, Budget.user_id == user_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def update_budget(db: AsyncSession, budget: Budget, data: BudgetUpdate) -> Budget:
|
||||
for field, value in data.model_dump(exclude_unset=True).items():
|
||||
setattr(budget, field, value)
|
||||
budget.updated_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
await db.refresh(budget)
|
||||
return budget
|
||||
|
||||
|
||||
async def delete_budget(db: AsyncSession, budget: Budget) -> None:
|
||||
await db.delete(budget)
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def get_budget_summary(db: AsyncSession, user_id: uuid.UUID) -> list[BudgetSummaryItem]:
|
||||
budgets = await list_budgets(db, user_id, active_only=True)
|
||||
today = date.today()
|
||||
items: list[BudgetSummaryItem] = []
|
||||
|
||||
for budget in budgets:
|
||||
period_start, period_end = _period_bounds(budget.period, today)
|
||||
|
||||
# Fetch category name
|
||||
cat_result = await db.execute(select(Category).where(Category.id == budget.category_id))
|
||||
category = cat_result.scalar_one_or_none()
|
||||
cat_name = category.name if category else "Unknown"
|
||||
|
||||
# Sum actual spending in this period
|
||||
spent_result = await db.execute(
|
||||
select(func.coalesce(func.sum(func.abs(Transaction.amount)), Decimal("0")))
|
||||
.where(
|
||||
and_(
|
||||
Transaction.user_id == user_id,
|
||||
Transaction.category_id == budget.category_id,
|
||||
Transaction.type == "expense",
|
||||
Transaction.status != "void",
|
||||
Transaction.date >= period_start,
|
||||
Transaction.date <= period_end,
|
||||
Transaction.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
spent = Decimal(str(spent_result.scalar() or 0))
|
||||
remaining = budget.amount - spent
|
||||
pct = (spent / budget.amount * 100) if budget.amount > 0 else Decimal("0")
|
||||
|
||||
items.append(
|
||||
BudgetSummaryItem(
|
||||
budget_id=budget.id,
|
||||
budget_name=budget.name,
|
||||
category_id=budget.category_id,
|
||||
category_name=cat_name,
|
||||
period=budget.period,
|
||||
budget_amount=budget.amount,
|
||||
spent_amount=spent,
|
||||
remaining_amount=remaining,
|
||||
percent_used=pct.quantize(Decimal("0.01")),
|
||||
is_over_budget=spent > budget.amount,
|
||||
alert_triggered=pct >= budget.alert_threshold,
|
||||
currency=budget.currency,
|
||||
period_start=period_start,
|
||||
period_end=period_end,
|
||||
)
|
||||
)
|
||||
|
||||
return items
|
||||
135
backend/app/services/category_service.py
Normal file
135
backend/app/services/category_service.py
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db.models.category import Category
|
||||
|
||||
SYSTEM_CATEGORIES = [
|
||||
# Income
|
||||
{"name": "Salary", "type": "income", "icon": "briefcase", "color": "#22c55e"},
|
||||
{"name": "Freelance", "type": "income", "icon": "laptop", "color": "#22c55e"},
|
||||
{"name": "Investment Income", "type": "income", "icon": "trending-up", "color": "#22c55e"},
|
||||
{"name": "Rental Income", "type": "income", "icon": "home", "color": "#22c55e"},
|
||||
{"name": "Benefits", "type": "income", "icon": "shield", "color": "#22c55e"},
|
||||
{"name": "Other Income", "type": "income", "icon": "plus-circle", "color": "#22c55e"},
|
||||
# Expenses — Housing
|
||||
{"name": "Rent / Mortgage", "type": "expense", "icon": "home", "color": "#6366f1"},
|
||||
{"name": "Council Tax", "type": "expense", "icon": "landmark", "color": "#6366f1"},
|
||||
{"name": "Home Insurance", "type": "expense", "icon": "shield", "color": "#6366f1"},
|
||||
{"name": "Home Maintenance", "type": "expense", "icon": "wrench", "color": "#6366f1"},
|
||||
# Utilities
|
||||
{"name": "Electricity", "type": "expense", "icon": "zap", "color": "#f59e0b"},
|
||||
{"name": "Gas", "type": "expense", "icon": "flame", "color": "#f59e0b"},
|
||||
{"name": "Water", "type": "expense", "icon": "droplets", "color": "#f59e0b"},
|
||||
{"name": "Internet", "type": "expense", "icon": "wifi", "color": "#f59e0b"},
|
||||
{"name": "Phone", "type": "expense", "icon": "smartphone", "color": "#f59e0b"},
|
||||
# Food
|
||||
{"name": "Groceries", "type": "expense", "icon": "shopping-cart", "color": "#ec4899"},
|
||||
{"name": "Eating Out", "type": "expense", "icon": "utensils", "color": "#ec4899"},
|
||||
{"name": "Coffee", "type": "expense", "icon": "coffee", "color": "#ec4899"},
|
||||
{"name": "Takeaway", "type": "expense", "icon": "package", "color": "#ec4899"},
|
||||
# Transport
|
||||
{"name": "Fuel", "type": "expense", "icon": "fuel", "color": "#0ea5e9"},
|
||||
{"name": "Public Transport", "type": "expense", "icon": "bus", "color": "#0ea5e9"},
|
||||
{"name": "Car Insurance", "type": "expense", "icon": "car", "color": "#0ea5e9"},
|
||||
{"name": "Car Maintenance", "type": "expense", "icon": "wrench", "color": "#0ea5e9"},
|
||||
{"name": "Parking", "type": "expense", "icon": "parking-circle", "color": "#0ea5e9"},
|
||||
{"name": "Taxi / Ride share", "type": "expense", "icon": "map-pin", "color": "#0ea5e9"},
|
||||
# Health
|
||||
{"name": "Healthcare", "type": "expense", "icon": "heart-pulse", "color": "#ef4444"},
|
||||
{"name": "Pharmacy", "type": "expense", "icon": "pill", "color": "#ef4444"},
|
||||
{"name": "Gym", "type": "expense", "icon": "dumbbell", "color": "#ef4444"},
|
||||
# Personal
|
||||
{"name": "Clothing", "type": "expense", "icon": "shirt", "color": "#a855f7"},
|
||||
{"name": "Personal Care", "type": "expense", "icon": "sparkles", "color": "#a855f7"},
|
||||
{"name": "Subscriptions", "type": "expense", "icon": "repeat", "color": "#a855f7"},
|
||||
{"name": "Entertainment", "type": "expense", "icon": "tv", "color": "#a855f7"},
|
||||
{"name": "Holidays", "type": "expense", "icon": "plane", "color": "#a855f7"},
|
||||
# Finance
|
||||
{"name": "Loan Repayment", "type": "expense", "icon": "credit-card", "color": "#64748b"},
|
||||
{"name": "Mortgage Payment", "type": "expense", "icon": "building", "color": "#64748b"},
|
||||
{"name": "Bank Charges", "type": "expense", "icon": "landmark", "color": "#64748b"},
|
||||
{"name": "Interest Paid", "type": "expense", "icon": "percent", "color": "#64748b"},
|
||||
# Savings
|
||||
{"name": "Savings", "type": "expense", "icon": "piggy-bank", "color": "#10b981"},
|
||||
{"name": "Investments", "type": "expense", "icon": "trending-up", "color": "#10b981"},
|
||||
# Other
|
||||
{"name": "Gifts", "type": "expense", "icon": "gift", "color": "#f97316"},
|
||||
{"name": "Education", "type": "expense", "icon": "graduation-cap", "color": "#f97316"},
|
||||
{"name": "Other Expense", "type": "expense", "icon": "more-horizontal", "color": "#64748b"},
|
||||
# Transfers
|
||||
{"name": "Transfer", "type": "transfer", "icon": "arrow-left-right", "color": "#94a3b8"},
|
||||
]
|
||||
|
||||
|
||||
async def seed_system_categories(db: AsyncSession) -> None:
|
||||
existing = await db.scalar(
|
||||
select(Category).where(Category.is_system == True).limit(1)
|
||||
)
|
||||
if existing:
|
||||
return
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
for i, cat in enumerate(SYSTEM_CATEGORIES):
|
||||
db.add(Category(
|
||||
user_id=None,
|
||||
name=cat["name"],
|
||||
type=cat["type"],
|
||||
icon=cat.get("icon"),
|
||||
color=cat.get("color"),
|
||||
is_system=True,
|
||||
sort_order=i,
|
||||
created_at=now,
|
||||
))
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def list_categories(db: AsyncSession, user_id: uuid.UUID) -> list[dict]:
|
||||
result = await db.execute(
|
||||
select(Category).where(
|
||||
(Category.user_id == user_id) | (Category.user_id.is_(None))
|
||||
).order_by(Category.type, Category.sort_order, Category.name)
|
||||
)
|
||||
cats = result.scalars().all()
|
||||
return [
|
||||
{
|
||||
"id": str(c.id),
|
||||
"name": c.name,
|
||||
"type": c.type,
|
||||
"icon": c.icon,
|
||||
"color": c.color,
|
||||
"is_system": c.is_system,
|
||||
"parent_id": str(c.parent_id) if c.parent_id else None,
|
||||
"sort_order": c.sort_order,
|
||||
}
|
||||
for c in cats
|
||||
]
|
||||
|
||||
|
||||
async def create_category(
|
||||
db: AsyncSession,
|
||||
user_id: uuid.UUID,
|
||||
name: str,
|
||||
type_: str,
|
||||
icon: str | None = None,
|
||||
color: str | None = None,
|
||||
parent_id: uuid.UUID | None = None,
|
||||
) -> dict:
|
||||
now = datetime.now(timezone.utc)
|
||||
cat = Category(
|
||||
user_id=user_id,
|
||||
name=name,
|
||||
type=type_,
|
||||
icon=icon,
|
||||
color=color,
|
||||
parent_id=parent_id,
|
||||
is_system=False,
|
||||
created_at=now,
|
||||
)
|
||||
db.add(cat)
|
||||
await db.flush()
|
||||
return {"id": str(cat.id), "name": cat.name, "type": cat.type, "icon": cat.icon, "color": cat.color, "is_system": False}
|
||||
237
backend/app/services/csv_detector.py
Normal file
237
backend/app/services/csv_detector.py
Normal file
|
|
@ -0,0 +1,237 @@
|
|||
"""
|
||||
Auto-detect CSV bank export formats and produce a column mapping.
|
||||
Supports: Monzo, Starling, Revolut, Barclays, Lloyds, NatWest/RBS, HSBC, Santander.
|
||||
Falls back to a generic best-effort mapping for unknown formats.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import csv
|
||||
import io
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Literal
|
||||
|
||||
|
||||
@dataclass
|
||||
class CsvMapping:
|
||||
date: str
|
||||
description: str
|
||||
amount: str | None = None # single signed amount column
|
||||
debit: str | None = None # separate debit column (positive value = money out)
|
||||
credit: str | None = None # separate credit column (positive value = money in)
|
||||
balance: str | None = None
|
||||
reference: str | None = None
|
||||
detected_format: str | None = None
|
||||
|
||||
def is_split(self) -> bool:
|
||||
return self.debit is not None and self.credit is not None
|
||||
|
||||
|
||||
KNOWN_FORMATS: list[dict] = [
|
||||
{
|
||||
"name": "Monzo",
|
||||
"detect": lambda h: {"transaction id", "emoji"}.issubset(h),
|
||||
"date": "Date",
|
||||
"description": "Name",
|
||||
"amount": "Amount",
|
||||
"balance": None,
|
||||
"reference": "Notes and #tags",
|
||||
},
|
||||
{
|
||||
"name": "Starling",
|
||||
"detect": lambda h: {"counter party", "spending category"}.issubset(h),
|
||||
"date": "Date",
|
||||
"description": "Counter Party",
|
||||
"amount": "Amount (GBP)",
|
||||
"balance": "Balance (GBP)",
|
||||
"reference": "Reference",
|
||||
},
|
||||
{
|
||||
"name": "Revolut",
|
||||
"detect": lambda h: {"product", "started date", "completed date"}.issubset(h),
|
||||
"date": "Started Date",
|
||||
"description": "Description",
|
||||
"amount": "Amount",
|
||||
"balance": "Balance",
|
||||
"reference": None,
|
||||
},
|
||||
{
|
||||
"name": "Barclays",
|
||||
"detect": lambda h: {"subcategory", "memo", "number"}.issubset(h),
|
||||
"date": "Date",
|
||||
"description": "Memo",
|
||||
"amount": "Amount",
|
||||
"balance": None,
|
||||
"reference": "Subcategory",
|
||||
},
|
||||
{
|
||||
"name": "Lloyds Bank",
|
||||
"detect": lambda h: {"transaction date", "debit amount", "credit amount", "transaction description"}.issubset(h),
|
||||
"date": "Transaction Date",
|
||||
"description": "Transaction Description",
|
||||
"debit": "Debit Amount",
|
||||
"credit": "Credit Amount",
|
||||
"balance": "Balance",
|
||||
"reference": None,
|
||||
},
|
||||
{
|
||||
"name": "Halifax",
|
||||
"detect": lambda h: {"transaction date", "debit amount", "credit amount", "transaction description"}.issubset(h),
|
||||
"date": "Transaction Date",
|
||||
"description": "Transaction Description",
|
||||
"debit": "Debit Amount",
|
||||
"credit": "Credit Amount",
|
||||
"balance": "Balance",
|
||||
"reference": None,
|
||||
},
|
||||
{
|
||||
"name": "NatWest / RBS",
|
||||
"detect": lambda h: {"date", "type", "description", "value", "balance"}.issubset(h) and "value" in h,
|
||||
"date": "Date",
|
||||
"description": "Description",
|
||||
"amount": "Value",
|
||||
"balance": "Balance",
|
||||
"reference": None,
|
||||
},
|
||||
{
|
||||
"name": "HSBC",
|
||||
"detect": lambda h: h == {"date", "description", "amount"} or h == {"date", "description", "debit", "credit", "balance"},
|
||||
"date": "Date",
|
||||
"description": "Description",
|
||||
"amount": "Amount",
|
||||
"balance": None,
|
||||
"reference": None,
|
||||
},
|
||||
{
|
||||
"name": "Santander",
|
||||
"detect": lambda h: {"date", "description", "debit", "credit", "balance"}.issubset(h),
|
||||
"date": "Date",
|
||||
"description": "Description",
|
||||
"debit": "Debit",
|
||||
"credit": "Credit",
|
||||
"balance": "Balance",
|
||||
"reference": None,
|
||||
},
|
||||
{
|
||||
"name": "Nationwide",
|
||||
"detect": lambda h: {"date", "transaction", "payments out", "payments in", "balance"}.issubset(h),
|
||||
"date": "Date",
|
||||
"description": "Transaction",
|
||||
"debit": "Payments Out",
|
||||
"credit": "Payments In",
|
||||
"balance": "Balance",
|
||||
"reference": None,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def _normalise_headers(raw_headers: list[str]) -> dict[str, str]:
|
||||
"""Return {normalised_key: original_header}."""
|
||||
return {h.strip().lower(): h.strip() for h in raw_headers if h}
|
||||
|
||||
|
||||
def detect_format(raw_headers: list[str]) -> CsvMapping:
|
||||
norm = _normalise_headers(raw_headers)
|
||||
norm_set = set(norm.keys())
|
||||
|
||||
for fmt in KNOWN_FORMATS:
|
||||
if fmt["detect"](norm_set):
|
||||
# Map logical names → actual header using case-insensitive lookup
|
||||
def resolve(col: str | None) -> str | None:
|
||||
if col is None:
|
||||
return None
|
||||
return norm.get(col.strip().lower(), col)
|
||||
|
||||
if "debit" in fmt:
|
||||
return CsvMapping(
|
||||
date=resolve(fmt["date"]) or fmt["date"],
|
||||
description=resolve(fmt["description"]) or fmt["description"],
|
||||
debit=resolve(fmt["debit"]),
|
||||
credit=resolve(fmt["credit"]),
|
||||
balance=resolve(fmt.get("balance")),
|
||||
reference=resolve(fmt.get("reference")),
|
||||
detected_format=fmt["name"],
|
||||
)
|
||||
else:
|
||||
return CsvMapping(
|
||||
date=resolve(fmt["date"]) or fmt["date"],
|
||||
description=resolve(fmt["description"]) or fmt["description"],
|
||||
amount=resolve(fmt["amount"]),
|
||||
balance=resolve(fmt.get("balance")),
|
||||
reference=resolve(fmt.get("reference")),
|
||||
detected_format=fmt["name"],
|
||||
)
|
||||
|
||||
# Generic fallback: guess by common column name patterns
|
||||
return _generic_mapping(norm)
|
||||
|
||||
|
||||
def _generic_mapping(norm: dict[str, str]) -> CsvMapping:
|
||||
def find(*candidates: str) -> str | None:
|
||||
for c in candidates:
|
||||
if c in norm:
|
||||
return norm[c]
|
||||
return None
|
||||
|
||||
date_col = find("date", "transaction date", "trans date", "value date", "posting date")
|
||||
desc_col = find("description", "narrative", "details", "memo", "payee", "merchant", "name", "counter party")
|
||||
amt_col = find("amount", "value", "net amount", "transaction amount")
|
||||
debit_col = find("debit", "debit amount", "payments out", "money out", "withdrawal")
|
||||
credit_col = find("credit", "credit amount", "payments in", "money in", "deposit")
|
||||
bal_col = find("balance", "running balance")
|
||||
ref_col = find("reference", "notes", "tags", "category")
|
||||
|
||||
if not date_col:
|
||||
date_col = list(norm.values())[0] if norm else "date"
|
||||
if not desc_col:
|
||||
desc_col = list(norm.values())[1] if len(norm) > 1 else "description"
|
||||
|
||||
if debit_col and credit_col:
|
||||
return CsvMapping(
|
||||
date=date_col,
|
||||
description=desc_col,
|
||||
debit=debit_col,
|
||||
credit=credit_col,
|
||||
balance=bal_col,
|
||||
reference=ref_col,
|
||||
detected_format=None,
|
||||
)
|
||||
|
||||
return CsvMapping(
|
||||
date=date_col,
|
||||
description=desc_col,
|
||||
amount=amt_col or (list(norm.values())[2] if len(norm) > 2 else "amount"),
|
||||
balance=bal_col,
|
||||
reference=ref_col,
|
||||
detected_format=None,
|
||||
)
|
||||
|
||||
|
||||
def parse_csv_content(content: bytes) -> tuple[list[str], list[dict]]:
|
||||
"""Decode and return (headers, rows)."""
|
||||
for enc in ("utf-8-sig", "utf-8", "latin-1"):
|
||||
try:
|
||||
text = content.decode(enc)
|
||||
break
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
else:
|
||||
raise ValueError("Cannot decode file — try saving as UTF-8")
|
||||
|
||||
# Some bank exports (Lloyds, Barclays) include preamble lines before the header
|
||||
lines = text.splitlines()
|
||||
header_idx = 0
|
||||
for i, line in enumerate(lines):
|
||||
if "," in line and len(line.split(",")) >= 2:
|
||||
header_idx = i
|
||||
break
|
||||
|
||||
cleaned = "\n".join(lines[header_idx:])
|
||||
reader = csv.DictReader(io.StringIO(cleaned))
|
||||
headers = [h.strip() for h in (reader.fieldnames or []) if h and h.strip()]
|
||||
rows = []
|
||||
for row in reader:
|
||||
clean_row = {k.strip(): (v.strip() if v else "") for k, v in row.items() if k and k.strip()}
|
||||
if any(clean_row.values()):
|
||||
rows.append(clean_row)
|
||||
|
||||
return headers, rows
|
||||
300
backend/app/services/investment_service.py
Normal file
300
backend/app/services/investment_service.py
Normal file
|
|
@ -0,0 +1,300 @@
|
|||
import uuid
|
||||
from datetime import date, datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db.models.asset import Asset
|
||||
from app.db.models.asset_price import AssetPrice
|
||||
from app.db.models.investment_holding import InvestmentHolding
|
||||
from app.db.models.investment_transaction import InvestmentTransaction
|
||||
from app.schemas.investment import (
|
||||
HoldingCreate,
|
||||
HoldingResponse,
|
||||
InvestmentTxnCreate,
|
||||
PerformanceMetrics,
|
||||
PortfolioSummary,
|
||||
)
|
||||
|
||||
|
||||
async def _get_asset(db: AsyncSession, asset_id: uuid.UUID) -> Asset | None:
|
||||
result = await db.execute(select(Asset).where(Asset.id == asset_id))
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
def _holding_to_response(holding: InvestmentHolding, asset: Asset) -> HoldingResponse:
|
||||
cost_basis_total = holding.quantity * holding.avg_cost_basis
|
||||
current_price = asset.last_price
|
||||
current_value = holding.quantity * current_price if current_price else None
|
||||
unrealised_gain = (current_value - cost_basis_total) if current_value is not None else None
|
||||
unrealised_gain_pct = None
|
||||
if unrealised_gain is not None and cost_basis_total > 0:
|
||||
unrealised_gain_pct = (unrealised_gain / cost_basis_total * 100).quantize(Decimal("0.01"))
|
||||
|
||||
return HoldingResponse(
|
||||
id=holding.id,
|
||||
account_id=holding.account_id,
|
||||
asset_id=holding.asset_id,
|
||||
symbol=asset.symbol,
|
||||
asset_name=asset.name,
|
||||
asset_type=asset.type,
|
||||
quantity=holding.quantity,
|
||||
avg_cost_basis=holding.avg_cost_basis,
|
||||
current_price=current_price,
|
||||
current_value=current_value,
|
||||
cost_basis_total=cost_basis_total,
|
||||
unrealised_gain=unrealised_gain,
|
||||
unrealised_gain_pct=unrealised_gain_pct,
|
||||
currency=holding.currency,
|
||||
price_change_24h=asset.price_change_24h,
|
||||
)
|
||||
|
||||
|
||||
async def get_portfolio(db: AsyncSession, user_id: uuid.UUID) -> PortfolioSummary:
|
||||
result = await db.execute(
|
||||
select(InvestmentHolding).where(
|
||||
InvestmentHolding.user_id == user_id,
|
||||
InvestmentHolding.quantity > 0,
|
||||
)
|
||||
)
|
||||
holdings = result.scalars().all()
|
||||
|
||||
responses = []
|
||||
total_value = Decimal("0")
|
||||
total_cost = Decimal("0")
|
||||
|
||||
for h in holdings:
|
||||
asset = await _get_asset(db, h.asset_id)
|
||||
if not asset:
|
||||
continue
|
||||
r = _holding_to_response(h, asset)
|
||||
responses.append(r)
|
||||
total_cost += r.cost_basis_total
|
||||
if r.current_value is not None:
|
||||
total_value += r.current_value
|
||||
|
||||
total_gain = total_value - total_cost
|
||||
total_gain_pct = (total_gain / total_cost * 100).quantize(Decimal("0.01")) if total_cost > 0 else Decimal("0")
|
||||
|
||||
return PortfolioSummary(
|
||||
total_value=total_value,
|
||||
total_cost=total_cost,
|
||||
total_gain=total_gain,
|
||||
total_gain_pct=total_gain_pct,
|
||||
currency="GBP",
|
||||
holdings=responses,
|
||||
)
|
||||
|
||||
|
||||
async def get_holding(db: AsyncSession, user_id: uuid.UUID, holding_id: uuid.UUID) -> InvestmentHolding | None:
|
||||
result = await db.execute(
|
||||
select(InvestmentHolding).where(
|
||||
InvestmentHolding.id == holding_id,
|
||||
InvestmentHolding.user_id == user_id,
|
||||
)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def create_holding(db: AsyncSession, user_id: uuid.UUID, data: HoldingCreate) -> InvestmentHolding:
|
||||
now = datetime.now(timezone.utc)
|
||||
# Check if holding already exists for this account+asset
|
||||
result = await db.execute(
|
||||
select(InvestmentHolding).where(
|
||||
InvestmentHolding.user_id == user_id,
|
||||
InvestmentHolding.account_id == data.account_id,
|
||||
InvestmentHolding.asset_id == data.asset_id,
|
||||
)
|
||||
)
|
||||
existing = result.scalar_one_or_none()
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
holding = InvestmentHolding(
|
||||
id=uuid.uuid4(),
|
||||
user_id=user_id,
|
||||
account_id=data.account_id,
|
||||
asset_id=data.asset_id,
|
||||
quantity=data.quantity,
|
||||
avg_cost_basis=data.avg_cost_basis,
|
||||
currency=data.currency,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(holding)
|
||||
await db.flush()
|
||||
await db.refresh(holding)
|
||||
return holding
|
||||
|
||||
|
||||
async def add_investment_transaction(
|
||||
db: AsyncSession, user_id: uuid.UUID, data: InvestmentTxnCreate
|
||||
) -> InvestmentTransaction:
|
||||
holding = await get_holding(db, user_id, data.holding_id)
|
||||
if not holding:
|
||||
raise ValueError("Holding not found")
|
||||
|
||||
total = data.quantity * data.price + data.fees
|
||||
|
||||
txn = InvestmentTransaction(
|
||||
id=uuid.uuid4(),
|
||||
user_id=user_id,
|
||||
holding_id=data.holding_id,
|
||||
type=data.type,
|
||||
quantity=data.quantity,
|
||||
price=data.price,
|
||||
fees=data.fees,
|
||||
total_amount=total,
|
||||
currency=data.currency,
|
||||
date=data.date,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
)
|
||||
db.add(txn)
|
||||
|
||||
# Update holding quantity and avg cost basis
|
||||
if data.type == "buy" or data.type == "transfer_in":
|
||||
new_qty = holding.quantity + data.quantity
|
||||
if new_qty > 0:
|
||||
holding.avg_cost_basis = (
|
||||
(holding.quantity * holding.avg_cost_basis + data.quantity * data.price)
|
||||
/ new_qty
|
||||
)
|
||||
holding.quantity = new_qty
|
||||
elif data.type == "sell" or data.type == "transfer_out":
|
||||
holding.quantity = max(Decimal("0"), holding.quantity - data.quantity)
|
||||
elif data.type == "split":
|
||||
if data.price > 0:
|
||||
holding.quantity = holding.quantity * data.quantity
|
||||
holding.avg_cost_basis = holding.avg_cost_basis / data.quantity
|
||||
# dividend and fee don't affect quantity/cost basis
|
||||
|
||||
holding.updated_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
await db.refresh(txn)
|
||||
return txn
|
||||
|
||||
|
||||
async def list_investment_transactions(
|
||||
db: AsyncSession, user_id: uuid.UUID, holding_id: uuid.UUID
|
||||
) -> list[InvestmentTransaction]:
|
||||
result = await db.execute(
|
||||
select(InvestmentTransaction)
|
||||
.where(
|
||||
InvestmentTransaction.user_id == user_id,
|
||||
InvestmentTransaction.holding_id == holding_id,
|
||||
)
|
||||
.order_by(InvestmentTransaction.date.desc())
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def get_performance(db: AsyncSession, user_id: uuid.UUID) -> PerformanceMetrics:
|
||||
portfolio = await get_portfolio(db, user_id)
|
||||
total_return = portfolio.total_gain
|
||||
total_return_pct = portfolio.total_gain_pct
|
||||
return PerformanceMetrics(
|
||||
twrr=None, # full TWRR requires snapshot history — placeholder
|
||||
total_return=total_return,
|
||||
total_return_pct=total_return_pct,
|
||||
currency="GBP",
|
||||
)
|
||||
|
||||
|
||||
async def get_or_create_asset(
|
||||
db: AsyncSession, symbol: str, name: str, asset_type: str,
|
||||
currency: str, data_source: str, data_source_id: str | None,
|
||||
exchange: str | None = None,
|
||||
) -> Asset:
|
||||
result = await db.execute(
|
||||
select(Asset).where(Asset.symbol == symbol.upper(), Asset.data_source == data_source)
|
||||
)
|
||||
existing = result.scalar_one_or_none()
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
asset = Asset(
|
||||
id=uuid.uuid4(),
|
||||
symbol=symbol.upper(),
|
||||
name=name,
|
||||
type=asset_type,
|
||||
currency=currency,
|
||||
exchange=exchange,
|
||||
data_source=data_source,
|
||||
data_source_id=data_source_id,
|
||||
is_active=True,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(asset)
|
||||
await db.flush()
|
||||
await db.refresh(asset)
|
||||
return asset
|
||||
|
||||
|
||||
async def update_asset_price(
|
||||
db: AsyncSession, asset: Asset, price: Decimal, change_24h: Decimal | None
|
||||
) -> None:
|
||||
asset.last_price = price
|
||||
asset.price_change_24h = change_24h
|
||||
asset.last_price_at = datetime.now(timezone.utc)
|
||||
asset.updated_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def upsert_price_history(db: AsyncSession, asset_id: uuid.UUID, rows: list[dict]) -> int:
|
||||
count = 0
|
||||
for row in rows:
|
||||
result = await db.execute(
|
||||
select(AssetPrice).where(AssetPrice.asset_id == asset_id, AssetPrice.date == row["date"])
|
||||
)
|
||||
existing = result.scalar_one_or_none()
|
||||
if existing:
|
||||
existing.open = row["open"]
|
||||
existing.high = row["high"]
|
||||
existing.low = row["low"]
|
||||
existing.close = row["close"]
|
||||
existing.volume = row["volume"]
|
||||
else:
|
||||
db.add(AssetPrice(
|
||||
id=uuid.uuid4(),
|
||||
asset_id=asset_id,
|
||||
date=row["date"],
|
||||
open=row.get("open"),
|
||||
high=row.get("high"),
|
||||
low=row.get("low"),
|
||||
close=row["close"],
|
||||
volume=row.get("volume"),
|
||||
created_at=datetime.now(timezone.utc),
|
||||
))
|
||||
count += 1
|
||||
await db.flush()
|
||||
return count
|
||||
|
||||
|
||||
async def get_price_history(
|
||||
db: AsyncSession, asset_id: uuid.UUID, days: int = 365
|
||||
) -> list[AssetPrice]:
|
||||
from datetime import timedelta
|
||||
cutoff = date.today() - timedelta(days=days)
|
||||
result = await db.execute(
|
||||
select(AssetPrice)
|
||||
.where(AssetPrice.asset_id == asset_id, AssetPrice.date >= cutoff)
|
||||
.order_by(AssetPrice.date.asc())
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def search_assets(db: AsyncSession, query: str) -> list[Asset]:
|
||||
from sqlalchemy import or_, func
|
||||
q = query.strip().upper()
|
||||
result = await db.execute(
|
||||
select(Asset).where(
|
||||
or_(
|
||||
func.upper(Asset.symbol).contains(q),
|
||||
func.upper(Asset.name).contains(q),
|
||||
)
|
||||
).limit(10)
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
116
backend/app/services/price_feed_service.py
Normal file
116
backend/app/services/price_feed_service.py
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
"""
|
||||
Live price fetching: yfinance for stocks/ETFs, CoinGecko for crypto.
|
||||
Falls back gracefully — never raises, always returns None on failure.
|
||||
"""
|
||||
import asyncio
|
||||
from datetime import date, datetime, timezone, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
async def _run_sync(fn, *args):
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(None, fn, *args)
|
||||
|
||||
|
||||
def _fetch_yahoo(symbol: str) -> dict | None:
|
||||
try:
|
||||
import yfinance as yf
|
||||
ticker = yf.Ticker(symbol)
|
||||
info = ticker.fast_info
|
||||
price = getattr(info, "last_price", None) or getattr(info, "regularMarketPrice", None)
|
||||
prev = getattr(info, "previous_close", None)
|
||||
if price is None:
|
||||
return None
|
||||
change_24h = None
|
||||
if prev and prev > 0:
|
||||
change_24h = round((price - prev) / prev * 100, 4)
|
||||
return {
|
||||
"price": Decimal(str(round(price, 8))),
|
||||
"change_24h": Decimal(str(change_24h)) if change_24h is not None else None,
|
||||
"currency": (getattr(info, "currency", None) or "USD").upper(),
|
||||
"name": getattr(info, "long_name", None) or symbol,
|
||||
"exchange": getattr(info, "exchange", None),
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.warning("yahoo_fetch_failed", symbol=symbol, error=str(exc))
|
||||
return None
|
||||
|
||||
|
||||
def _fetch_coingecko(coin_id: str) -> dict | None:
|
||||
try:
|
||||
import requests
|
||||
r = requests.get(
|
||||
f"https://api.coingecko.com/api/v3/simple/price",
|
||||
params={"ids": coin_id, "vs_currencies": "usd,gbp", "include_24hr_change": "true"},
|
||||
timeout=10,
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = r.json().get(coin_id, {})
|
||||
if not data:
|
||||
return None
|
||||
return {
|
||||
"price": Decimal(str(data.get("gbp", data.get("usd", 0)))),
|
||||
"change_24h": Decimal(str(round(data.get("gbp_24h_change", 0), 4))),
|
||||
"currency": "GBP",
|
||||
"name": coin_id,
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.warning("coingecko_fetch_failed", coin_id=coin_id, error=str(exc))
|
||||
return None
|
||||
|
||||
|
||||
def _fetch_yahoo_history(symbol: str, days: int = 365) -> list[dict]:
|
||||
try:
|
||||
import yfinance as yf
|
||||
ticker = yf.Ticker(symbol)
|
||||
hist = ticker.history(period=f"{days}d", interval="1d")
|
||||
rows = []
|
||||
for ts, row in hist.iterrows():
|
||||
rows.append({
|
||||
"date": ts.date(),
|
||||
"open": Decimal(str(round(float(row["Open"]), 8))),
|
||||
"high": Decimal(str(round(float(row["High"]), 8))),
|
||||
"low": Decimal(str(round(float(row["Low"]), 8))),
|
||||
"close": Decimal(str(round(float(row["Close"]), 8))),
|
||||
"volume": Decimal(str(int(row.get("Volume", 0) or 0))),
|
||||
})
|
||||
return rows
|
||||
except Exception as exc:
|
||||
logger.warning("yahoo_history_failed", symbol=symbol, error=str(exc))
|
||||
return []
|
||||
|
||||
|
||||
async def fetch_price(symbol: str, data_source: str, data_source_id: str | None) -> dict | None:
|
||||
if data_source == "coingecko":
|
||||
return await _run_sync(_fetch_coingecko, data_source_id or symbol.lower())
|
||||
return await _run_sync(_fetch_yahoo, symbol)
|
||||
|
||||
|
||||
async def fetch_history(symbol: str, days: int = 365) -> list[dict]:
|
||||
return await _run_sync(_fetch_yahoo_history, symbol, days)
|
||||
|
||||
|
||||
def search_yahoo(query: str) -> list[dict]:
|
||||
try:
|
||||
import yfinance as yf
|
||||
ticker = yf.Ticker(query)
|
||||
info = ticker.fast_info
|
||||
price = getattr(info, "last_price", None)
|
||||
if price:
|
||||
return [{
|
||||
"symbol": query.upper(),
|
||||
"name": getattr(info, "long_name", None) or query.upper(),
|
||||
"type": "stock",
|
||||
"currency": (getattr(info, "currency", None) or "USD").upper(),
|
||||
"exchange": getattr(info, "exchange", None),
|
||||
"data_source": "yahoo_finance",
|
||||
"data_source_id": None,
|
||||
}]
|
||||
except Exception:
|
||||
pass
|
||||
return []
|
||||
356
backend/app/services/report_service.py
Normal file
356
backend/app/services/report_service.py
Normal file
|
|
@ -0,0 +1,356 @@
|
|||
import uuid
|
||||
from datetime import date, datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from sqlalchemy import and_, func, select, text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db.models.account import Account
|
||||
from app.db.models.budget import Budget
|
||||
from app.db.models.category import Category
|
||||
from app.db.models.net_worth_snapshot import NetWorthSnapshot
|
||||
from app.db.models.transaction import Transaction
|
||||
from app.schemas.report import (
|
||||
BudgetVsActualItem,
|
||||
BudgetVsActualReport,
|
||||
CashFlowPoint,
|
||||
CashFlowReport,
|
||||
CategoryBreakdownItem,
|
||||
CategoryBreakdownReport,
|
||||
IncomeExpensePoint,
|
||||
IncomeExpenseReport,
|
||||
NetWorthPoint,
|
||||
NetWorthReport,
|
||||
SpendingTrendPoint,
|
||||
SpendingTrendsReport,
|
||||
)
|
||||
|
||||
LIABILITY_TYPES = {"credit_card", "loan", "mortgage"}
|
||||
|
||||
|
||||
async def _current_net_worth(db: AsyncSession, user_id: uuid.UUID) -> tuple[Decimal, Decimal]:
|
||||
result = await db.execute(
|
||||
select(Account).where(
|
||||
Account.user_id == user_id,
|
||||
Account.include_in_net_worth == True, # noqa: E712
|
||||
Account.is_active == True, # noqa: E712
|
||||
Account.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
accounts = result.scalars().all()
|
||||
assets = Decimal("0")
|
||||
liabilities = Decimal("0")
|
||||
for acc in accounts:
|
||||
bal = acc.current_balance or Decimal("0")
|
||||
if acc.type in LIABILITY_TYPES:
|
||||
liabilities += bal
|
||||
else:
|
||||
assets += bal
|
||||
return assets, liabilities
|
||||
|
||||
|
||||
async def get_net_worth_report(
|
||||
db: AsyncSession, user_id: uuid.UUID, base_currency: str, months: int = 12
|
||||
) -> NetWorthReport:
|
||||
cutoff = date.today() - relativedelta(months=months)
|
||||
result = await db.execute(
|
||||
select(NetWorthSnapshot)
|
||||
.where(NetWorthSnapshot.user_id == user_id, NetWorthSnapshot.date >= cutoff)
|
||||
.order_by(NetWorthSnapshot.date.asc())
|
||||
)
|
||||
snapshots = result.scalars().all()
|
||||
|
||||
points = [
|
||||
NetWorthPoint(
|
||||
date=s.date,
|
||||
total_assets=s.total_assets,
|
||||
total_liabilities=s.total_liabilities,
|
||||
net_worth=s.net_worth,
|
||||
base_currency=s.base_currency,
|
||||
)
|
||||
for s in snapshots
|
||||
]
|
||||
|
||||
assets, liabilities = await _current_net_worth(db, user_id)
|
||||
current_nw = assets - liabilities
|
||||
|
||||
change_30d = Decimal("0")
|
||||
change_30d_pct = Decimal("0")
|
||||
if points:
|
||||
past_nw = points[0].net_worth
|
||||
change_30d = current_nw - past_nw
|
||||
if past_nw != 0:
|
||||
change_30d_pct = (change_30d / abs(past_nw) * 100).quantize(Decimal("0.01"))
|
||||
|
||||
return NetWorthReport(
|
||||
points=points,
|
||||
current_net_worth=current_nw,
|
||||
change_30d=change_30d,
|
||||
change_30d_pct=change_30d_pct,
|
||||
base_currency=base_currency,
|
||||
)
|
||||
|
||||
|
||||
async def get_income_expense_report(
|
||||
db: AsyncSession, user_id: uuid.UUID, months: int = 12
|
||||
) -> IncomeExpenseReport:
|
||||
cutoff = (date.today().replace(day=1) - relativedelta(months=months - 1))
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT
|
||||
TO_CHAR(date, 'YYYY-MM') AS month,
|
||||
SUM(CASE WHEN type = 'income' THEN amount ELSE 0 END) AS income,
|
||||
SUM(CASE WHEN type = 'expense' THEN ABS(amount) ELSE 0 END) AS expenses
|
||||
FROM transactions
|
||||
WHERE user_id = CAST(:uid AS uuid)
|
||||
AND status != 'void'
|
||||
AND deleted_at IS NULL
|
||||
AND date >= :cutoff
|
||||
GROUP BY TO_CHAR(date, 'YYYY-MM')
|
||||
ORDER BY month ASC
|
||||
""").bindparams(uid=str(user_id), cutoff=cutoff)
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
points = []
|
||||
total_income = Decimal("0")
|
||||
total_expenses = Decimal("0")
|
||||
for row in rows:
|
||||
inc = Decimal(str(row.income or 0))
|
||||
exp = Decimal(str(row.expenses or 0))
|
||||
points.append(IncomeExpensePoint(month=row.month, income=inc, expenses=exp, net=inc - exp))
|
||||
total_income += inc
|
||||
total_expenses += exp
|
||||
|
||||
n = len(points) or 1
|
||||
return IncomeExpenseReport(
|
||||
points=points,
|
||||
total_income=total_income,
|
||||
total_expenses=total_expenses,
|
||||
avg_monthly_income=(total_income / n).quantize(Decimal("0.01")),
|
||||
avg_monthly_expenses=(total_expenses / n).quantize(Decimal("0.01")),
|
||||
currency="GBP",
|
||||
)
|
||||
|
||||
|
||||
async def get_cash_flow_report(
|
||||
db: AsyncSession, user_id: uuid.UUID, date_from: date, date_to: date
|
||||
) -> CashFlowReport:
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT
|
||||
date,
|
||||
SUM(CASE WHEN amount > 0 THEN amount ELSE 0 END) AS inflow,
|
||||
SUM(CASE WHEN amount < 0 THEN ABS(amount) ELSE 0 END) AS outflow
|
||||
FROM transactions
|
||||
WHERE user_id = CAST(:uid AS uuid)
|
||||
AND status != 'void'
|
||||
AND deleted_at IS NULL
|
||||
AND date BETWEEN :df AND :dt
|
||||
AND type IN ('income', 'expense')
|
||||
GROUP BY date
|
||||
ORDER BY date ASC
|
||||
""").bindparams(uid=str(user_id), df=date_from, dt=date_to)
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
points = []
|
||||
running = Decimal("0")
|
||||
total_inflow = Decimal("0")
|
||||
total_outflow = Decimal("0")
|
||||
for row in rows:
|
||||
inflow = Decimal(str(row.inflow or 0))
|
||||
outflow = Decimal(str(row.outflow or 0))
|
||||
running += inflow - outflow
|
||||
total_inflow += inflow
|
||||
total_outflow += outflow
|
||||
points.append(
|
||||
CashFlowPoint(
|
||||
date=row.date,
|
||||
inflow=inflow,
|
||||
outflow=outflow,
|
||||
net=inflow - outflow,
|
||||
running_balance=running,
|
||||
)
|
||||
)
|
||||
|
||||
return CashFlowReport(
|
||||
points=points,
|
||||
total_inflow=total_inflow,
|
||||
total_outflow=total_outflow,
|
||||
currency="GBP",
|
||||
)
|
||||
|
||||
|
||||
async def get_category_breakdown(
|
||||
db: AsyncSession,
|
||||
user_id: uuid.UUID,
|
||||
date_from: date,
|
||||
date_to: date,
|
||||
txn_type: str = "expense",
|
||||
) -> CategoryBreakdownReport:
|
||||
result = await db.execute(
|
||||
select(
|
||||
Transaction.category_id,
|
||||
func.sum(func.abs(Transaction.amount)).label("total"),
|
||||
func.count(Transaction.id).label("cnt"),
|
||||
)
|
||||
.where(
|
||||
Transaction.user_id == user_id,
|
||||
Transaction.type == txn_type,
|
||||
Transaction.status != "void",
|
||||
Transaction.date >= date_from,
|
||||
Transaction.date <= date_to,
|
||||
Transaction.deleted_at.is_(None),
|
||||
)
|
||||
.group_by(Transaction.category_id)
|
||||
.order_by(func.sum(func.abs(Transaction.amount)).desc())
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
grand_total = Decimal("0")
|
||||
raw = []
|
||||
for row in rows:
|
||||
amt = Decimal(str(row.total or 0))
|
||||
grand_total += amt
|
||||
if row.category_id:
|
||||
cat_result = await db.execute(select(Category).where(Category.id == row.category_id))
|
||||
category = cat_result.scalar_one_or_none()
|
||||
cat_name = category.name if category else "Uncategorised"
|
||||
else:
|
||||
cat_name = "Uncategorised"
|
||||
raw.append((row.category_id, cat_name, amt, row.cnt))
|
||||
|
||||
items = [
|
||||
CategoryBreakdownItem(
|
||||
category_id=str(cat_id) if cat_id else None,
|
||||
category_name=name,
|
||||
amount=amt,
|
||||
percent=(amt / grand_total * 100).quantize(Decimal("0.01")) if grand_total > 0 else Decimal("0"),
|
||||
transaction_count=cnt,
|
||||
)
|
||||
for cat_id, name, amt, cnt in raw
|
||||
]
|
||||
|
||||
return CategoryBreakdownReport(
|
||||
items=items,
|
||||
total=grand_total,
|
||||
currency="GBP",
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
)
|
||||
|
||||
|
||||
async def get_budget_vs_actual(db: AsyncSession, user_id: uuid.UUID) -> BudgetVsActualReport:
|
||||
from app.services.budget_service import list_budgets, _period_bounds
|
||||
today = date.today()
|
||||
budgets = await list_budgets(db, user_id, active_only=True)
|
||||
|
||||
items = []
|
||||
total_budgeted = Decimal("0")
|
||||
total_actual = Decimal("0")
|
||||
|
||||
for budget in budgets:
|
||||
period_start, period_end = _period_bounds(budget.period, today)
|
||||
cat_result = await db.execute(select(Category).where(Category.id == budget.category_id))
|
||||
category = cat_result.scalar_one_or_none()
|
||||
cat_name = category.name if category else "Unknown"
|
||||
|
||||
spent_result = await db.execute(
|
||||
select(func.coalesce(func.sum(func.abs(Transaction.amount)), Decimal("0")))
|
||||
.where(
|
||||
and_(
|
||||
Transaction.user_id == user_id,
|
||||
Transaction.category_id == budget.category_id,
|
||||
Transaction.type == "expense",
|
||||
Transaction.status != "void",
|
||||
Transaction.date >= period_start,
|
||||
Transaction.date <= period_end,
|
||||
Transaction.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
actual = Decimal(str(spent_result.scalar() or 0))
|
||||
variance = budget.amount - actual
|
||||
pct = (actual / budget.amount * 100).quantize(Decimal("0.01")) if budget.amount > 0 else Decimal("0")
|
||||
|
||||
items.append(
|
||||
BudgetVsActualItem(
|
||||
budget_id=str(budget.id),
|
||||
budget_name=budget.name,
|
||||
category_name=cat_name,
|
||||
budgeted=budget.amount,
|
||||
actual=actual,
|
||||
variance=variance,
|
||||
percent_used=pct,
|
||||
)
|
||||
)
|
||||
total_budgeted += budget.amount
|
||||
total_actual += actual
|
||||
|
||||
return BudgetVsActualReport(
|
||||
items=items,
|
||||
total_budgeted=total_budgeted,
|
||||
total_actual=total_actual,
|
||||
currency="GBP",
|
||||
)
|
||||
|
||||
|
||||
async def get_spending_trends(
|
||||
db: AsyncSession, user_id: uuid.UUID, months: int = 6
|
||||
) -> SpendingTrendsReport:
|
||||
cutoff = (date.today().replace(day=1) - relativedelta(months=months - 1))
|
||||
result = await db.execute(
|
||||
text("""
|
||||
SELECT
|
||||
TO_CHAR(t.date, 'YYYY-MM') AS month,
|
||||
COALESCE(c.name, 'Uncategorised') AS category_name,
|
||||
SUM(ABS(t.amount)) AS amount
|
||||
FROM transactions t
|
||||
LEFT JOIN categories c ON c.id = t.category_id
|
||||
WHERE t.user_id = CAST(:uid AS uuid)
|
||||
AND t.type = 'expense'
|
||||
AND t.status != 'void'
|
||||
AND t.deleted_at IS NULL
|
||||
AND t.date >= :cutoff
|
||||
GROUP BY TO_CHAR(t.date, 'YYYY-MM'), c.name
|
||||
ORDER BY month ASC, amount DESC
|
||||
""").bindparams(uid=str(user_id), cutoff=cutoff)
|
||||
)
|
||||
rows = result.fetchall()
|
||||
|
||||
points = [
|
||||
SpendingTrendPoint(month=row.month, category_name=row.category_name, amount=Decimal(str(row.amount or 0)))
|
||||
for row in rows
|
||||
]
|
||||
categories = list(dict.fromkeys(p.category_name for p in points))
|
||||
|
||||
return SpendingTrendsReport(points=points, categories=categories, currency="GBP")
|
||||
|
||||
|
||||
async def take_net_worth_snapshot(db: AsyncSession, user_id: uuid.UUID, base_currency: str) -> None:
|
||||
today = date.today()
|
||||
existing = await db.execute(
|
||||
select(NetWorthSnapshot).where(
|
||||
NetWorthSnapshot.user_id == user_id,
|
||||
NetWorthSnapshot.date == today,
|
||||
)
|
||||
)
|
||||
if existing.scalar_one_or_none():
|
||||
return
|
||||
|
||||
assets, liabilities = await _current_net_worth(db, user_id)
|
||||
snapshot = NetWorthSnapshot(
|
||||
id=uuid.uuid4(),
|
||||
user_id=user_id,
|
||||
date=today,
|
||||
total_assets=assets,
|
||||
total_liabilities=liabilities,
|
||||
net_worth=assets - liabilities,
|
||||
base_currency=base_currency,
|
||||
breakdown={},
|
||||
created_at=datetime.now(timezone.utc),
|
||||
)
|
||||
db.add(snapshot)
|
||||
await db.flush()
|
||||
308
backend/app/services/transaction_service.py
Normal file
308
backend/app/services/transaction_service.py
Normal file
|
|
@ -0,0 +1,308 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
from sqlalchemy import and_, or_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.core.security import decrypt_field, encrypt_field
|
||||
from app.db.models.transaction import Transaction
|
||||
from app.schemas.transaction import TransactionCreate, TransactionFilter, TransactionUpdate
|
||||
from app.services.account_service import recalculate_balance
|
||||
|
||||
|
||||
class TransactionError(Exception):
|
||||
def __init__(self, detail: str, status_code: int = 400):
|
||||
self.detail = detail
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
def _enc(v: str | None) -> bytes | None:
|
||||
return encrypt_field(v) if v else None
|
||||
|
||||
|
||||
def _dec(v: bytes | None) -> str | None:
|
||||
return decrypt_field(v) if v else None
|
||||
|
||||
|
||||
def _to_response(t: Transaction) -> dict:
|
||||
return {
|
||||
"id": t.id,
|
||||
"account_id": t.account_id,
|
||||
"transfer_account_id": t.transfer_account_id,
|
||||
"category_id": t.category_id,
|
||||
"type": t.type,
|
||||
"status": t.status,
|
||||
"amount": t.amount,
|
||||
"amount_base": t.amount_base,
|
||||
"currency": t.currency,
|
||||
"base_currency": t.base_currency,
|
||||
"exchange_rate": t.exchange_rate,
|
||||
"date": t.date,
|
||||
"description": _dec(t.description_enc) or "",
|
||||
"merchant": _dec(t.merchant_enc),
|
||||
"notes": _dec(t.notes_enc),
|
||||
"tags": t.tags or [],
|
||||
"is_recurring": t.is_recurring,
|
||||
"created_at": t.created_at,
|
||||
"updated_at": t.updated_at,
|
||||
}
|
||||
|
||||
|
||||
async def create_transaction(
|
||||
db: AsyncSession,
|
||||
user_id: uuid.UUID,
|
||||
data: TransactionCreate,
|
||||
base_currency: str,
|
||||
) -> dict:
|
||||
now = datetime.now(timezone.utc)
|
||||
amount = data.amount
|
||||
|
||||
# For transfers, create mirrored entry on destination account
|
||||
txn = Transaction(
|
||||
user_id=user_id,
|
||||
account_id=data.account_id,
|
||||
transfer_account_id=data.transfer_account_id,
|
||||
category_id=data.category_id,
|
||||
type=data.type,
|
||||
status=data.status,
|
||||
amount=amount,
|
||||
amount_base=amount, # Phase 3: convert via FX rate
|
||||
currency=data.currency,
|
||||
base_currency=base_currency,
|
||||
exchange_rate=Decimal("1") if data.currency == base_currency else None,
|
||||
date=data.date,
|
||||
description_enc=encrypt_field(data.description),
|
||||
merchant_enc=_enc(data.merchant),
|
||||
notes_enc=_enc(data.notes),
|
||||
tags=data.tags,
|
||||
is_recurring=data.is_recurring,
|
||||
recurring_rule=data.recurring_rule,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(txn)
|
||||
await db.flush()
|
||||
|
||||
# If transfer, create the counter-entry on the destination account
|
||||
if data.type == "transfer" and data.transfer_account_id:
|
||||
counter = Transaction(
|
||||
user_id=user_id,
|
||||
account_id=data.transfer_account_id,
|
||||
transfer_account_id=data.account_id,
|
||||
category_id=data.category_id,
|
||||
type="transfer",
|
||||
status=data.status,
|
||||
amount=-amount, # opposite sign
|
||||
amount_base=-amount,
|
||||
currency=data.currency,
|
||||
base_currency=base_currency,
|
||||
exchange_rate=Decimal("1") if data.currency == base_currency else None,
|
||||
date=data.date,
|
||||
description_enc=encrypt_field(data.description),
|
||||
merchant_enc=_enc(data.merchant),
|
||||
notes_enc=_enc(data.notes),
|
||||
tags=data.tags,
|
||||
is_recurring=False,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(counter)
|
||||
await db.flush()
|
||||
await recalculate_balance(db, data.transfer_account_id)
|
||||
|
||||
await recalculate_balance(db, data.account_id)
|
||||
return _to_response(txn)
|
||||
|
||||
|
||||
async def list_transactions(
|
||||
db: AsyncSession,
|
||||
user_id: uuid.UUID,
|
||||
filters: TransactionFilter,
|
||||
) -> dict:
|
||||
conditions = [
|
||||
Transaction.user_id == user_id,
|
||||
Transaction.deleted_at.is_(None),
|
||||
]
|
||||
|
||||
if filters.account_id:
|
||||
conditions.append(Transaction.account_id == filters.account_id)
|
||||
if filters.category_id:
|
||||
conditions.append(Transaction.category_id == filters.category_id)
|
||||
if filters.type:
|
||||
conditions.append(Transaction.type == filters.type)
|
||||
if filters.status:
|
||||
conditions.append(Transaction.status == filters.status)
|
||||
if filters.date_from:
|
||||
conditions.append(Transaction.date >= filters.date_from)
|
||||
if filters.date_to:
|
||||
conditions.append(Transaction.date <= filters.date_to)
|
||||
if filters.min_amount is not None:
|
||||
conditions.append(Transaction.amount >= filters.min_amount)
|
||||
if filters.max_amount is not None:
|
||||
conditions.append(Transaction.amount <= filters.max_amount)
|
||||
|
||||
query = select(Transaction).where(and_(*conditions)).order_by(Transaction.date.desc(), Transaction.created_at.desc())
|
||||
|
||||
# Count total
|
||||
from sqlalchemy import func
|
||||
count_result = await db.execute(select(func.count()).select_from(query.subquery()))
|
||||
total = count_result.scalar_one()
|
||||
|
||||
# Paginate
|
||||
offset = (filters.page - 1) * filters.page_size
|
||||
query = query.offset(offset).limit(filters.page_size)
|
||||
result = await db.execute(query)
|
||||
items = [_to_response(t) for t in result.scalars()]
|
||||
|
||||
# Filter by search (post-decrypt — Phase 3 will add FTS)
|
||||
if filters.search:
|
||||
term = filters.search.lower()
|
||||
items = [
|
||||
t for t in items
|
||||
if term in t["description"].lower()
|
||||
or (t["merchant"] and term in t["merchant"].lower())
|
||||
]
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"total": total,
|
||||
"page": filters.page,
|
||||
"page_size": filters.page_size,
|
||||
"pages": max(1, -(-total // filters.page_size)),
|
||||
}
|
||||
|
||||
|
||||
async def get_transaction(db: AsyncSession, txn_id: uuid.UUID, user_id: uuid.UUID) -> Transaction:
|
||||
result = await db.execute(
|
||||
select(Transaction).where(
|
||||
Transaction.id == txn_id,
|
||||
Transaction.user_id == user_id,
|
||||
Transaction.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
txn = result.scalar_one_or_none()
|
||||
if not txn:
|
||||
raise TransactionError("Transaction not found", status_code=404)
|
||||
return txn
|
||||
|
||||
|
||||
async def update_transaction(
|
||||
db: AsyncSession,
|
||||
txn_id: uuid.UUID,
|
||||
user_id: uuid.UUID,
|
||||
data: TransactionUpdate,
|
||||
base_currency: str,
|
||||
) -> dict:
|
||||
txn = await get_transaction(db, txn_id, user_id)
|
||||
now = datetime.now(timezone.utc)
|
||||
old_account_id = txn.account_id
|
||||
|
||||
if data.category_id is not None:
|
||||
txn.category_id = data.category_id
|
||||
if data.status is not None:
|
||||
txn.status = data.status
|
||||
if data.amount is not None:
|
||||
txn.amount = data.amount
|
||||
txn.amount_base = data.amount
|
||||
if data.date is not None:
|
||||
txn.date = data.date
|
||||
if data.description is not None:
|
||||
txn.description_enc = encrypt_field(data.description)
|
||||
if data.merchant is not None:
|
||||
txn.merchant_enc = _enc(data.merchant)
|
||||
if data.notes is not None:
|
||||
txn.notes_enc = _enc(data.notes)
|
||||
if data.tags is not None:
|
||||
txn.tags = data.tags
|
||||
|
||||
txn.updated_at = now
|
||||
await db.flush()
|
||||
await recalculate_balance(db, old_account_id)
|
||||
return _to_response(txn)
|
||||
|
||||
|
||||
async def delete_transaction(db: AsyncSession, txn_id: uuid.UUID, user_id: uuid.UUID) -> None:
|
||||
txn = await get_transaction(db, txn_id, user_id)
|
||||
account_id = txn.account_id
|
||||
txn.deleted_at = datetime.now(timezone.utc)
|
||||
txn.updated_at = datetime.now(timezone.utc)
|
||||
await db.flush()
|
||||
await recalculate_balance(db, account_id)
|
||||
|
||||
|
||||
async def import_csv(
|
||||
db: AsyncSession,
|
||||
user_id: uuid.UUID,
|
||||
account_id: uuid.UUID,
|
||||
rows: list[dict],
|
||||
base_currency: str,
|
||||
) -> dict:
|
||||
"""
|
||||
Import transactions from parsed CSV rows.
|
||||
Each row must have: date, description, amount
|
||||
Optional: merchant, notes, category_name
|
||||
Returns counts of imported vs skipped (duplicates).
|
||||
"""
|
||||
imported = 0
|
||||
skipped = 0
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
for row in rows:
|
||||
# Build dedup hash from date + description + amount
|
||||
raw = f"{row['date']}|{row['description']}|{row['amount']}"
|
||||
import_hash = hashlib.sha256(raw.encode()).hexdigest()
|
||||
|
||||
# Check duplicate
|
||||
exists = await db.scalar(
|
||||
select(Transaction.id).where(
|
||||
Transaction.user_id == user_id,
|
||||
Transaction.import_hash == import_hash,
|
||||
)
|
||||
)
|
||||
if exists:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
amount = Decimal(str(row["amount"]))
|
||||
from datetime import date as date_type
|
||||
import dateutil.parser
|
||||
txn_date = dateutil.parser.parse(str(row["date"])).date()
|
||||
except Exception:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
txn_type = "income" if amount > 0 else "expense"
|
||||
|
||||
txn = Transaction(
|
||||
user_id=user_id,
|
||||
account_id=account_id,
|
||||
type=txn_type,
|
||||
status="cleared",
|
||||
amount=amount,
|
||||
amount_base=amount,
|
||||
currency=row.get("currency", base_currency),
|
||||
base_currency=base_currency,
|
||||
exchange_rate=Decimal("1"),
|
||||
date=txn_date,
|
||||
description_enc=encrypt_field(str(row.get("description", ""))),
|
||||
merchant_enc=_enc(row.get("merchant")),
|
||||
notes_enc=_enc(row.get("notes")),
|
||||
tags=[],
|
||||
is_recurring=False,
|
||||
import_hash=import_hash,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(txn)
|
||||
imported += 1
|
||||
|
||||
await db.flush()
|
||||
if imported > 0:
|
||||
await recalculate_balance(db, account_id)
|
||||
|
||||
return {"imported": imported, "skipped": skipped}
|
||||
Loading…
Add table
Add a link
Reference in a new issue