Add public demo mode with auto-seeding, hourly reset, and Portainer deploy guide
- DEMO_MODE=true env flag: disables password changes and backup endpoints (403), exposes GET /demo/status for frontend detection - Auto-seed on first startup: creates demo user (demo@mymidas.app / demo123) with 6 months of transactions, investments, budgets, subscriptions, and tax payslips; takes a pg_dump snapshot immediately after for hourly restore - Hourly reset: resetter Alpine container with cron restores DB from snapshot and purges uploaded attachments every hour on the hour - Frontend: amber demo banner on all pages, login page shows credentials, password change disabled with notice, backups section replaced with notice - demo/ directory: self-contained docker-compose.yml (ports 4001/8091), .env.example, reset.sh, and step-by-step Portainer DEPLOY.md Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
afb5e99bb2
commit
9897d03d91
17 changed files with 975 additions and 2 deletions
|
|
@ -8,9 +8,12 @@ from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
from fastapi.responses import FileResponse
|
from fastapi.responses import FileResponse
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from app.config import get_settings
|
||||||
from app.dependencies import get_current_user
|
from app.dependencies import get_current_user
|
||||||
from app.db.models.user import User
|
from app.db.models.user import User
|
||||||
|
|
||||||
|
_DEMO_DISABLED = "Backups are disabled in demo mode"
|
||||||
|
|
||||||
router = APIRouter(prefix="/admin", tags=["admin"])
|
router = APIRouter(prefix="/admin", tags=["admin"])
|
||||||
|
|
||||||
BACKUP_DIR = Path(os.environ.get("BACKUP_DIR", "/app/backups"))
|
BACKUP_DIR = Path(os.environ.get("BACKUP_DIR", "/app/backups"))
|
||||||
|
|
@ -44,11 +47,15 @@ def _list_backup_files() -> list[BackupFile]:
|
||||||
|
|
||||||
@router.get("/backups", response_model=list[BackupFile])
|
@router.get("/backups", response_model=list[BackupFile])
|
||||||
async def list_backups(current_user: User = Depends(get_current_user)):
|
async def list_backups(current_user: User = Depends(get_current_user)):
|
||||||
|
if get_settings().is_demo:
|
||||||
|
raise HTTPException(status_code=403, detail=_DEMO_DISABLED)
|
||||||
return _list_backup_files()
|
return _list_backup_files()
|
||||||
|
|
||||||
|
|
||||||
@router.post("/backup", response_model=BackupResult)
|
@router.post("/backup", response_model=BackupResult)
|
||||||
async def trigger_backup(current_user: User = Depends(get_current_user)):
|
async def trigger_backup(current_user: User = Depends(get_current_user)):
|
||||||
|
if get_settings().is_demo:
|
||||||
|
raise HTTPException(status_code=403, detail=_DEMO_DISABLED)
|
||||||
try:
|
try:
|
||||||
proc = await asyncio.create_subprocess_exec(
|
proc = await asyncio.create_subprocess_exec(
|
||||||
"bash", "/app/scripts/backup.sh",
|
"bash", "/app/scripts/backup.sh",
|
||||||
|
|
@ -71,6 +78,8 @@ async def download_backup(
|
||||||
filename: str,
|
filename: str,
|
||||||
current_user: User = Depends(get_current_user),
|
current_user: User = Depends(get_current_user),
|
||||||
):
|
):
|
||||||
|
if get_settings().is_demo:
|
||||||
|
raise HTTPException(status_code=403, detail=_DEMO_DISABLED)
|
||||||
if not BACKUP_PATTERN.match(filename):
|
if not BACKUP_PATTERN.match(filename):
|
||||||
raise HTTPException(status_code=400, detail="Invalid filename")
|
raise HTTPException(status_code=400, detail="Invalid filename")
|
||||||
path = BACKUP_DIR / filename
|
path = BACKUP_DIR / filename
|
||||||
|
|
@ -88,6 +97,8 @@ async def restore_backup(
|
||||||
filename: str,
|
filename: str,
|
||||||
current_user: User = Depends(get_current_user),
|
current_user: User = Depends(get_current_user),
|
||||||
):
|
):
|
||||||
|
if get_settings().is_demo:
|
||||||
|
raise HTTPException(status_code=403, detail=_DEMO_DISABLED)
|
||||||
if not BACKUP_PATTERN.match(filename):
|
if not BACKUP_PATTERN.match(filename):
|
||||||
raise HTTPException(status_code=400, detail="Invalid filename")
|
raise HTTPException(status_code=400, detail="Invalid filename")
|
||||||
path = BACKUP_DIR / filename
|
path = BACKUP_DIR / filename
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ from pydantic import BaseModel, Field
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.config import get_settings
|
||||||
from app.core.audit import write_audit
|
from app.core.audit import write_audit
|
||||||
from app.core.security import hash_password, verify_password
|
from app.core.security import hash_password, verify_password
|
||||||
from app.dependencies import get_current_user, get_db
|
from app.dependencies import get_current_user, get_db
|
||||||
|
|
@ -41,6 +42,8 @@ async def change_password(
|
||||||
db: AsyncSession = Depends(get_db),
|
db: AsyncSession = Depends(get_db),
|
||||||
user=Depends(get_current_user),
|
user=Depends(get_current_user),
|
||||||
):
|
):
|
||||||
|
if get_settings().is_demo:
|
||||||
|
raise HTTPException(status_code=403, detail="Password changes are disabled in demo mode")
|
||||||
if not verify_password(body.current_password, user.password_hash):
|
if not verify_password(body.current_password, user.password_hash):
|
||||||
raise HTTPException(status_code=400, detail="Current password is incorrect")
|
raise HTTPException(status_code=400, detail="Current password is incorrect")
|
||||||
user.password_hash = hash_password(body.new_password)
|
user.password_hash = hash_password(body.new_password)
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ class Settings(BaseSettings):
|
||||||
environment: str = "production"
|
environment: str = "production"
|
||||||
allow_registration: bool = False
|
allow_registration: bool = False
|
||||||
base_currency: str = "GBP"
|
base_currency: str = "GBP"
|
||||||
|
demo_mode: bool = False
|
||||||
|
|
||||||
# JWT — keys read from /run/secrets/ at runtime
|
# JWT — keys read from /run/secrets/ at runtime
|
||||||
jwt_private_key_file: str = "/run/secrets/jwt_private.pem"
|
jwt_private_key_file: str = "/run/secrets/jwt_private.pem"
|
||||||
|
|
@ -48,6 +49,10 @@ class Settings(BaseSettings):
|
||||||
def is_development(self) -> bool:
|
def is_development(self) -> bool:
|
||||||
return self.environment == "development"
|
return self.environment == "development"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_demo(self) -> bool:
|
||||||
|
return self.demo_mode
|
||||||
|
|
||||||
|
|
||||||
@lru_cache
|
@lru_cache
|
||||||
def get_settings() -> Settings:
|
def get_settings() -> Settings:
|
||||||
|
|
|
||||||
0
backend/app/demo/__init__.py
Normal file
0
backend/app/demo/__init__.py
Normal file
492
backend/app/demo/seed.py
Normal file
492
backend/app/demo/seed.py
Normal file
|
|
@ -0,0 +1,492 @@
|
||||||
|
"""Demo seed — runs automatically on first startup when DEMO_MODE=true."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import uuid
|
||||||
|
from datetime import date, datetime, timezone
|
||||||
|
from decimal import Decimal
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.core.security import encrypt_field, hash_password
|
||||||
|
from app.db.models import (
|
||||||
|
Account, Asset, Budget, Category, InvestmentHolding,
|
||||||
|
InvestmentTransaction, ManualCGTDisposal, Payslip,
|
||||||
|
TaxProfile, Transaction, User,
|
||||||
|
)
|
||||||
|
|
||||||
|
DEMO_EMAIL = "demo@mymidas.app"
|
||||||
|
DEMO_PASSWORD = "demo123"
|
||||||
|
|
||||||
|
|
||||||
|
def _now() -> datetime:
|
||||||
|
return datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
|
def _h(*parts) -> str:
|
||||||
|
return hashlib.sha256("|".join(str(p) for p in parts).encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
async def is_seeded(db: AsyncSession) -> bool:
|
||||||
|
return bool(await db.scalar(select(User).where(User.email == DEMO_EMAIL)))
|
||||||
|
|
||||||
|
|
||||||
|
async def seed_demo(db: AsyncSession) -> None:
|
||||||
|
if await is_seeded(db):
|
||||||
|
return
|
||||||
|
|
||||||
|
now = _now()
|
||||||
|
|
||||||
|
# ── User ──────────────────────────────────────────────────────────────
|
||||||
|
user = User(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
email=DEMO_EMAIL,
|
||||||
|
password_hash=hash_password(DEMO_PASSWORD),
|
||||||
|
display_name="Alex Demo",
|
||||||
|
base_currency="GBP",
|
||||||
|
theme="obsidian",
|
||||||
|
locale="en-GB",
|
||||||
|
created_at=now,
|
||||||
|
updated_at=now,
|
||||||
|
)
|
||||||
|
db.add(user)
|
||||||
|
await db.flush()
|
||||||
|
uid = user.id
|
||||||
|
|
||||||
|
# ── System categories ──────────────────────────────────────────────────
|
||||||
|
res = await db.execute(select(Category).where(Category.is_system == True))
|
||||||
|
cats = {c.name: c for c in res.scalars().all()}
|
||||||
|
|
||||||
|
def cid(name: str):
|
||||||
|
return cats[name].id if name in cats else None
|
||||||
|
|
||||||
|
# ── Accounts ──────────────────────────────────────────────────────────
|
||||||
|
def mk_acc(name, institution, kind, balance, color, currency="GBP", credit_limit=None):
|
||||||
|
return Account(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
user_id=uid,
|
||||||
|
name_enc=encrypt_field(name),
|
||||||
|
institution_enc=encrypt_field(institution) if institution else None,
|
||||||
|
type=kind,
|
||||||
|
currency=currency,
|
||||||
|
current_balance=Decimal(str(balance)),
|
||||||
|
credit_limit=Decimal(str(credit_limit)) if credit_limit else None,
|
||||||
|
is_active=True,
|
||||||
|
include_in_net_worth=True,
|
||||||
|
color=color,
|
||||||
|
meta={},
|
||||||
|
created_at=now,
|
||||||
|
updated_at=now,
|
||||||
|
)
|
||||||
|
|
||||||
|
monzo = mk_acc("Monzo Current Account", "Monzo", "checking", "2847.32", "#ff6b35")
|
||||||
|
marcus = mk_acc("Marcus Savings", "Goldman Sachs", "savings", "6234.50", "#22c55e")
|
||||||
|
amex = mk_acc("Amex Gold", "American Express", "credit_card", "-342.18", "#f59e0b", credit_limit=5000)
|
||||||
|
freetrade = mk_acc("Freetrade Stocks & Shares ISA", "Freetrade", "investment", "0", "#6366f1")
|
||||||
|
|
||||||
|
for acc in [monzo, marcus, amex, freetrade]:
|
||||||
|
db.add(acc)
|
||||||
|
await db.flush()
|
||||||
|
|
||||||
|
# ── Transaction helpers ────────────────────────────────────────────────
|
||||||
|
def txn(account, txn_type, amount, desc, merchant, cat_name, txn_date,
|
||||||
|
is_recurring=False, recurring_rule=None):
|
||||||
|
return Transaction(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
user_id=uid,
|
||||||
|
account_id=account.id,
|
||||||
|
type=txn_type,
|
||||||
|
status="cleared",
|
||||||
|
amount=Decimal(str(amount)),
|
||||||
|
amount_base=Decimal(str(amount)),
|
||||||
|
currency="GBP",
|
||||||
|
base_currency="GBP",
|
||||||
|
date=txn_date,
|
||||||
|
description_enc=encrypt_field(desc),
|
||||||
|
merchant_enc=encrypt_field(merchant) if merchant else None,
|
||||||
|
category_id=cid(cat_name),
|
||||||
|
tags=[],
|
||||||
|
is_recurring=is_recurring,
|
||||||
|
recurring_rule=recurring_rule,
|
||||||
|
attachment_refs=[],
|
||||||
|
import_hash=_h(uid, account.id, txn_date, amount, desc),
|
||||||
|
meta={},
|
||||||
|
created_at=now,
|
||||||
|
updated_at=now,
|
||||||
|
)
|
||||||
|
|
||||||
|
def rr(freq, amount, next_exp, last_paid):
|
||||||
|
return {
|
||||||
|
"frequency": freq,
|
||||||
|
"typical_amount": float(amount),
|
||||||
|
"next_expected": next_exp,
|
||||||
|
"last_paid": last_paid,
|
||||||
|
"confidence": 0.97,
|
||||||
|
"manually_set": False,
|
||||||
|
"detected_at": now.isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
def next_month(y, m):
|
||||||
|
return (y + 1, 1) if m == 12 else (y, m + 1)
|
||||||
|
|
||||||
|
def d(y, m, day):
|
||||||
|
return date(y, m, min(day, [31,28,31,30,31,30,31,31,30,31,30,31][m-1]))
|
||||||
|
|
||||||
|
txns = []
|
||||||
|
|
||||||
|
# ── Monthly recurring transactions (Oct 2025 – Mar 2026) ──────────────
|
||||||
|
months = [(2025, 10), (2025, 11), (2025, 12), (2026, 1), (2026, 2), (2026, 3)]
|
||||||
|
|
||||||
|
for year, month in months:
|
||||||
|
ny, nm = next_month(year, month)
|
||||||
|
n1 = d(ny, nm, 1).isoformat()
|
||||||
|
this1 = d(year, month, 1).isoformat()
|
||||||
|
|
||||||
|
# Salary
|
||||||
|
txns.append(txn(monzo, "income", 3489.00,
|
||||||
|
"Demo Corp Ltd - Salary", "Demo Corp Ltd", "Salary",
|
||||||
|
d(year, month, 1), True, rr("monthly", 3489.00, n1, this1)))
|
||||||
|
|
||||||
|
# Rent
|
||||||
|
txns.append(txn(monzo, "expense", -1250.00,
|
||||||
|
"DIRECT DEBIT ANYLETS PROPERTY MGMT", "Anylets", "Rent / Mortgage",
|
||||||
|
d(year, month, 1), True, rr("monthly", -1250.00, n1, this1)))
|
||||||
|
|
||||||
|
# Council tax
|
||||||
|
txns.append(txn(monzo, "expense", -145.00,
|
||||||
|
"DIRECT DEBIT LONDON BOROUGH COUNCIL TAX", "London Borough", "Council Tax",
|
||||||
|
d(year, month, 1), True, rr("monthly", -145.00, n1, this1)))
|
||||||
|
|
||||||
|
# Internet
|
||||||
|
txns.append(txn(monzo, "expense", -35.00,
|
||||||
|
"DIRECT DEBIT BT BROADBAND", "BT", "Internet",
|
||||||
|
d(year, month, 2), True, rr("monthly", -35.00, d(ny, nm, 2).isoformat(), d(year, month, 2).isoformat())))
|
||||||
|
|
||||||
|
# Phone
|
||||||
|
txns.append(txn(monzo, "expense", -25.00,
|
||||||
|
"DIRECT DEBIT EE MOBILE", "EE", "Phone",
|
||||||
|
d(year, month, 3), True, rr("monthly", -25.00, d(ny, nm, 3).isoformat(), d(year, month, 3).isoformat())))
|
||||||
|
|
||||||
|
# Energy (variable)
|
||||||
|
energy = [-102, -98, -115, -108, -103, -95][months.index((year, month))]
|
||||||
|
txns.append(txn(monzo, "expense", energy,
|
||||||
|
"DIRECT DEBIT OVO ENERGY", "OVO Energy", "Electricity",
|
||||||
|
d(year, month, 5), True, rr("monthly", energy, d(ny, nm, 5).isoformat(), d(year, month, 5).isoformat())))
|
||||||
|
|
||||||
|
# Subscriptions
|
||||||
|
for sub_desc, sub_merch, sub_amt in [
|
||||||
|
("DIRECT DEBIT NETFLIX", "Netflix", -17.99),
|
||||||
|
("DIRECT DEBIT SPOTIFY", "Spotify", -11.99),
|
||||||
|
]:
|
||||||
|
txns.append(txn(monzo, "expense", sub_amt, sub_desc, sub_merch, "Subscriptions",
|
||||||
|
d(year, month, 5), True, rr("monthly", sub_amt, d(ny, nm, 5).isoformat(), d(year, month, 5).isoformat())))
|
||||||
|
|
||||||
|
for sub_desc, sub_merch, sub_amt in [
|
||||||
|
("DIRECT DEBIT AMAZON PRIME", "Amazon Prime", -8.99),
|
||||||
|
("DIRECT DEBIT APPLE ICLOUD", "Apple iCloud", -2.99),
|
||||||
|
("DIRECT DEBIT GITHUB", "GitHub", -3.99),
|
||||||
|
]:
|
||||||
|
txns.append(txn(monzo, "expense", sub_amt, sub_desc, sub_merch, "Subscriptions",
|
||||||
|
d(year, month, 6), True, rr("monthly", sub_amt, d(ny, nm, 6).isoformat(), d(year, month, 6).isoformat())))
|
||||||
|
|
||||||
|
# Gym
|
||||||
|
txns.append(txn(monzo, "expense", -35.00,
|
||||||
|
"DIRECT DEBIT PUREGYM", "PureGym", "Gym",
|
||||||
|
d(year, month, 7), True, rr("monthly", -35.00, d(ny, nm, 7).isoformat(), d(year, month, 7).isoformat())))
|
||||||
|
|
||||||
|
# TfL top-ups (2/month)
|
||||||
|
tfl = [(-40, 17, -30, 29), (-35, 15, -40, 28), (-40, 16, -35, 30),
|
||||||
|
(-40, 17, -30, 28), (-40, 14, -35, 27), (-40, 17, -30, 29)][months.index((year, month))]
|
||||||
|
txns.append(txn(monzo, "expense", tfl[0], "TfL Travel Top-Up", "Transport for London",
|
||||||
|
"Public Transport", d(year, month, tfl[1])))
|
||||||
|
txns.append(txn(monzo, "expense", tfl[2], "TfL Travel Top-Up", "Transport for London",
|
||||||
|
"Public Transport", d(year, month, tfl[3])))
|
||||||
|
|
||||||
|
# Transfer to savings
|
||||||
|
out_id = uuid.uuid4()
|
||||||
|
inn_id = uuid.uuid4()
|
||||||
|
sav_rr = rr("monthly", -300.00, d(ny, nm, 15).isoformat(), d(year, month, 15).isoformat())
|
||||||
|
txns.append(Transaction(
|
||||||
|
id=out_id, user_id=uid, account_id=monzo.id,
|
||||||
|
transfer_account_id=marcus.id, type="transfer", status="cleared",
|
||||||
|
amount=Decimal("-300.00"), amount_base=Decimal("-300.00"),
|
||||||
|
currency="GBP", base_currency="GBP", date=d(year, month, 15),
|
||||||
|
description_enc=encrypt_field("Transfer to Marcus Savings"),
|
||||||
|
category_id=cid("Transfer"), tags=[], is_recurring=True, recurring_rule=sav_rr,
|
||||||
|
attachment_refs=[], import_hash=_h(uid, monzo.id, year, month, "transfer_out"),
|
||||||
|
meta={}, created_at=now, updated_at=now,
|
||||||
|
))
|
||||||
|
txns.append(Transaction(
|
||||||
|
id=inn_id, user_id=uid, account_id=marcus.id,
|
||||||
|
transfer_account_id=monzo.id, type="transfer", status="cleared",
|
||||||
|
amount=Decimal("300.00"), amount_base=Decimal("300.00"),
|
||||||
|
currency="GBP", base_currency="GBP", date=d(year, month, 15),
|
||||||
|
description_enc=encrypt_field("Transfer from Monzo"),
|
||||||
|
category_id=cid("Transfer"), tags=[], is_recurring=True, recurring_rule=sav_rr,
|
||||||
|
attachment_refs=[], import_hash=_h(uid, marcus.id, year, month, "transfer_in"),
|
||||||
|
meta={}, created_at=now, updated_at=now,
|
||||||
|
))
|
||||||
|
|
||||||
|
# ── Groceries ─────────────────────────────────────────────────────────
|
||||||
|
groceries = [
|
||||||
|
(2025,10, 9, -71.43, "Tesco"), (2025,10, 16, -63.21, "Sainsbury's"),
|
||||||
|
(2025,10, 23, -58.76, "Tesco"), (2025,10, 28, -47.30, "Lidl"),
|
||||||
|
(2025,10, 30, -65.80, "Tesco"), (2025,11, 8, -68.92, "Tesco"),
|
||||||
|
(2025,11, 15, -72.10, "Sainsbury's"), (2025,11, 21, -55.40, "Lidl"),
|
||||||
|
(2025,11, 27, -61.33, "Tesco"), (2025,12, 6, -82.14, "Waitrose"),
|
||||||
|
(2025,12, 13, -65.20, "Sainsbury's"), (2025,12, 20, -71.50, "Tesco"),
|
||||||
|
(2025,12, 27, -90.30, "Waitrose"), (2026, 1, 10, -66.45, "Tesco"),
|
||||||
|
(2026, 1, 17, -59.80, "Lidl"), (2026, 1, 24, -74.20, "Sainsbury's"),
|
||||||
|
(2026, 1, 31, -68.90, "Tesco"), (2026, 2, 7, -73.15, "Tesco"),
|
||||||
|
(2026, 2, 14, -61.40, "Sainsbury's"), (2026, 2, 21, -55.90, "Lidl"),
|
||||||
|
(2026, 2, 28, -80.45, "Waitrose"), (2026, 3, 7, -69.30, "Tesco"),
|
||||||
|
(2026, 3, 14, -58.75, "Lidl"), (2026, 3, 21, -76.20, "Sainsbury's"),
|
||||||
|
(2026, 3, 28, -65.80, "Tesco"),
|
||||||
|
]
|
||||||
|
for y, m, day, amt, merch in groceries:
|
||||||
|
txns.append(txn(monzo, "expense", amt, f"{merch} Groceries", merch, "Groceries", date(y, m, day)))
|
||||||
|
|
||||||
|
# ── Eating out ────────────────────────────────────────────────────────
|
||||||
|
eating_out = [
|
||||||
|
(2025,10, 14, -42.00, "Wagamama"), (2025,10, 19, -28.50, "Deliveroo"),
|
||||||
|
(2025,10, 26, -55.00, "Dishoom"), (2025,11, 8, -35.00, "Nando's"),
|
||||||
|
(2025,11, 22, -48.50, "Carluccio's"), (2025,11, 28, -22.00, "Deliveroo"),
|
||||||
|
(2025,12, 12, -65.00, "Gaucho"), (2025,12, 19, -38.00, "Pizza Express"),
|
||||||
|
(2025,12, 23, -52.00, "Dishoom"), (2026, 1, 10, -29.00, "Deliveroo"),
|
||||||
|
(2026, 1, 17, -45.00, "Wagamama"), (2026, 1, 25, -38.50, "Nando's"),
|
||||||
|
(2026, 2, 6, -42.00, "Dishoom"), (2026, 2, 14, -78.00, "Restaurant"),
|
||||||
|
(2026, 2, 22, -31.50, "Deliveroo"), (2026, 3, 8, -38.00, "Wagamama"),
|
||||||
|
(2026, 3, 15, -25.00, "Deliveroo"), (2026, 3, 22, -47.00, "Nando's"),
|
||||||
|
]
|
||||||
|
for y, m, day, amt, merch in eating_out:
|
||||||
|
txns.append(txn(monzo, "expense", amt, merch, merch, "Eating Out", date(y, m, day)))
|
||||||
|
|
||||||
|
# ── Coffee ────────────────────────────────────────────────────────────
|
||||||
|
coffee = [
|
||||||
|
(2025,10, 12, -4.80, "Costa Coffee"), (2025,10, 17, -8.50, "Pret a Manger"),
|
||||||
|
(2025,10, 24, -5.20, "Starbucks"), (2025,11, 5, -4.50, "Costa Coffee"),
|
||||||
|
(2025,11, 13, -5.20, "Starbucks"), (2025,11, 20, -4.80, "Pret a Manger"),
|
||||||
|
(2025,11, 27, -6.50, "Blank Street"), (2025,12, 4, -5.20, "Starbucks"),
|
||||||
|
(2025,12, 11, -4.80, "Costa Coffee"), (2025,12, 18, -5.00, "Pret a Manger"),
|
||||||
|
(2026, 1, 9, -4.50, "Costa Coffee"), (2026, 1, 16, -5.20, "Starbucks"),
|
||||||
|
(2026, 1, 23, -4.80, "Pret a Manger"), (2026, 2, 5, -5.20, "Starbucks"),
|
||||||
|
(2026, 2, 12, -4.80, "Costa Coffee"), (2026, 2, 20, -5.50, "Blank Street"),
|
||||||
|
(2026, 2, 27, -4.80, "Pret a Manger"), (2026, 3, 5, -4.80, "Costa Coffee"),
|
||||||
|
(2026, 3, 12, -5.20, "Starbucks"), (2026, 3, 19, -4.50, "Pret a Manger"),
|
||||||
|
]
|
||||||
|
for y, m, day, amt, merch in coffee:
|
||||||
|
txns.append(txn(monzo, "expense", amt, merch, merch, "Coffee", date(y, m, day)))
|
||||||
|
|
||||||
|
# ── Shopping / other ──────────────────────────────────────────────────
|
||||||
|
shopping = [
|
||||||
|
(2025,10, 21, -34.99, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2025,10, 31, -12.99, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2025,10, 25, -89.00, monzo, "ASOS", "ASOS", "Clothing"),
|
||||||
|
(2025,11, 14, -89.99, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2025,11, 18, -85.00, monzo, "Dental Practice", "Dental Practice", "Healthcare"),
|
||||||
|
(2025,11, 28, -22.50, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2025,12, 3, -156.00, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2025,12, 5, -650.00, monzo, "EasyJet", "EasyJet", "Holidays"),
|
||||||
|
(2025,12, 5, -380.00, monzo, "Booking.com", "Booking.com", "Holidays"),
|
||||||
|
(2025,12, 15, -210.00, monzo, "Amazon", "AMAZON.CO.UK", "Gifts"),
|
||||||
|
(2025,12, 16, -85.00, monzo, "John Lewis", "John Lewis", "Gifts"),
|
||||||
|
(2025,12, 18, -45.00, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2025,12, 20, -120.00, monzo, "Airbnb", "Airbnb", "Holidays"),
|
||||||
|
(2026, 1, 12, -28.99, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2026, 1, 26, -67.00, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2026, 2, 10, -44.99, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2026, 2, 18, -75.00, monzo, "Uniqlo", "Uniqlo", "Clothing"),
|
||||||
|
(2026, 2, 25, -19.99, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2026, 3, 8, -38.50, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
(2026, 3, 22, -55.00, monzo, "Amazon", "AMAZON.CO.UK", "Other Expense"),
|
||||||
|
# Car insurance (October, annual)
|
||||||
|
(2025,10, 15, -485.00, monzo, "Aviva", "AVIVA CAR INSURANCE ANNUAL", "Car Insurance"),
|
||||||
|
# Amex purchases
|
||||||
|
(2025,10, 20, -320.00, amex, "M&S", "MARKS AND SPENCER", "Groceries"),
|
||||||
|
(2025,11, 8, -145.00, amex, "Restaurant", "RESTAURANT GORDON RAMSAY", "Eating Out"),
|
||||||
|
(2025,12, 22, -89.00, amex, "Apple", "APPLE STORE", "Other Expense"),
|
||||||
|
(2026, 1, 25, -215.00, amex, "Sofitel", "SOFITEL HOTEL", "Holidays"),
|
||||||
|
(2026, 2, 14, -178.50, amex, "Selfridges", "SELFRIDGES", "Clothing"),
|
||||||
|
(2026, 3, 8, -95.00, amex, "Harvey Nichols", "HARVEY NICHOLS", "Personal Care"),
|
||||||
|
# Savings interest (quarterly)
|
||||||
|
(2025,10, 28, 12.50, marcus, "Goldman Sachs", "Marcus Savings Interest", "Investment Income"),
|
||||||
|
(2026, 1, 28, 13.20, marcus, "Goldman Sachs", "Marcus Savings Interest", "Investment Income"),
|
||||||
|
]
|
||||||
|
for row in shopping:
|
||||||
|
y, m, day, amt, acc, merch, desc, cat = row
|
||||||
|
txn_type = "income" if amt > 0 else "expense"
|
||||||
|
txns.append(txn(acc, txn_type, amt, desc, merch, cat, date(y, m, day)))
|
||||||
|
|
||||||
|
for t in txns:
|
||||||
|
db.add(t)
|
||||||
|
await db.flush()
|
||||||
|
|
||||||
|
# ── Budgets ───────────────────────────────────────────────────────────
|
||||||
|
budget_defs = [
|
||||||
|
("Groceries", 300.00, "Groceries"),
|
||||||
|
("Eating Out", 200.00, "Eating Out"),
|
||||||
|
("Transport", 100.00, "Public Transport"),
|
||||||
|
("Entertainment", 80.00, "Entertainment"),
|
||||||
|
("Utilities", 180.00, "Electricity"),
|
||||||
|
("Subscriptions", 60.00, "Subscriptions"),
|
||||||
|
("Shopping", 200.00, "Clothing"),
|
||||||
|
]
|
||||||
|
for bname, amount, cat_name in budget_defs:
|
||||||
|
cat_id = cid(cat_name)
|
||||||
|
if not cat_id:
|
||||||
|
continue
|
||||||
|
db.add(Budget(
|
||||||
|
id=uuid.uuid4(),
|
||||||
|
user_id=uid,
|
||||||
|
category_id=cat_id,
|
||||||
|
name=bname,
|
||||||
|
amount=Decimal(str(amount)),
|
||||||
|
currency="GBP",
|
||||||
|
period="monthly",
|
||||||
|
start_date=date(2025, 10, 1),
|
||||||
|
rollover=False,
|
||||||
|
alert_threshold=Decimal("80"),
|
||||||
|
is_active=True,
|
||||||
|
created_at=now,
|
||||||
|
updated_at=now,
|
||||||
|
))
|
||||||
|
await db.flush()
|
||||||
|
|
||||||
|
# ── Assets ────────────────────────────────────────────────────────────
|
||||||
|
vwrp = Asset(
|
||||||
|
id=uuid.uuid4(), symbol="VWRP.L",
|
||||||
|
name="Vanguard FTSE All-World UCITS ETF", type="etf",
|
||||||
|
currency="GBP", exchange="LSE", data_source="yahoo_finance",
|
||||||
|
last_price=Decimal("107.50"), last_price_at=now,
|
||||||
|
price_change_24h=Decimal("0.85"), is_active=True,
|
||||||
|
created_at=now, updated_at=now,
|
||||||
|
)
|
||||||
|
aapl = Asset(
|
||||||
|
id=uuid.uuid4(), symbol="AAPL", name="Apple Inc.", type="stock",
|
||||||
|
currency="USD", exchange="NASDAQ", data_source="yahoo_finance",
|
||||||
|
last_price=Decimal("212.50"), last_price_at=now,
|
||||||
|
price_change_24h=Decimal("-0.45"), is_active=True,
|
||||||
|
created_at=now, updated_at=now,
|
||||||
|
)
|
||||||
|
btc = Asset(
|
||||||
|
id=uuid.uuid4(), symbol="BTC-USD", name="Bitcoin", type="crypto",
|
||||||
|
currency="USD", exchange=None, data_source="coingecko",
|
||||||
|
last_price=Decimal("84500.00"), last_price_at=now,
|
||||||
|
price_change_24h=Decimal("2.30"), is_active=True,
|
||||||
|
created_at=now, updated_at=now,
|
||||||
|
)
|
||||||
|
for a in [vwrp, aapl, btc]:
|
||||||
|
db.add(a)
|
||||||
|
await db.flush()
|
||||||
|
|
||||||
|
# ── Holdings ──────────────────────────────────────────────────────────
|
||||||
|
# Quantities match investment transactions below — do not double-count.
|
||||||
|
vwrp_h = InvestmentHolding(
|
||||||
|
id=uuid.uuid4(), user_id=uid, account_id=freetrade.id, asset_id=vwrp.id,
|
||||||
|
quantity=Decimal("50"), avg_cost_basis=Decimal("101.21"),
|
||||||
|
currency="GBP", created_at=now, updated_at=now,
|
||||||
|
)
|
||||||
|
aapl_h = InvestmentHolding(
|
||||||
|
id=uuid.uuid4(), user_id=uid, account_id=freetrade.id, asset_id=aapl.id,
|
||||||
|
quantity=Decimal("10"), avg_cost_basis=Decimal("228.50"),
|
||||||
|
currency="USD", created_at=now, updated_at=now,
|
||||||
|
)
|
||||||
|
btc_h = InvestmentHolding(
|
||||||
|
id=uuid.uuid4(), user_id=uid, account_id=freetrade.id, asset_id=btc.id,
|
||||||
|
quantity=Decimal("0.05"), avg_cost_basis=Decimal("69500.00"),
|
||||||
|
currency="USD", created_at=now, updated_at=now,
|
||||||
|
)
|
||||||
|
for h in [vwrp_h, aapl_h, btc_h]:
|
||||||
|
db.add(h)
|
||||||
|
await db.flush()
|
||||||
|
|
||||||
|
# ── Investment transactions (history only — holding quantities already set) ──
|
||||||
|
inv_txns = [
|
||||||
|
# VWRP: 3 buys totalling 50 shares
|
||||||
|
InvestmentTransaction(
|
||||||
|
id=uuid.uuid4(), user_id=uid, holding_id=vwrp_h.id,
|
||||||
|
type="buy", quantity=Decimal("15"), price=Decimal("98.50"),
|
||||||
|
fees=Decimal("0"), total_amount=Decimal("1477.50"),
|
||||||
|
currency="GBP", date=date(2025, 10, 5), created_at=now,
|
||||||
|
),
|
||||||
|
InvestmentTransaction(
|
||||||
|
id=uuid.uuid4(), user_id=uid, holding_id=vwrp_h.id,
|
||||||
|
type="buy", quantity=Decimal("20"), price=Decimal("102.30"),
|
||||||
|
fees=Decimal("0"), total_amount=Decimal("2046.00"),
|
||||||
|
currency="GBP", date=date(2025, 12, 10), created_at=now,
|
||||||
|
),
|
||||||
|
InvestmentTransaction(
|
||||||
|
id=uuid.uuid4(), user_id=uid, holding_id=vwrp_h.id,
|
||||||
|
type="buy", quantity=Decimal("15"), price=Decimal("105.80"),
|
||||||
|
fees=Decimal("0"), total_amount=Decimal("1587.00"),
|
||||||
|
currency="GBP", date=date(2026, 2, 14), created_at=now,
|
||||||
|
),
|
||||||
|
# VWRP dividend
|
||||||
|
InvestmentTransaction(
|
||||||
|
id=uuid.uuid4(), user_id=uid, holding_id=vwrp_h.id,
|
||||||
|
type="dividend", quantity=Decimal("0"), price=Decimal("0"),
|
||||||
|
fees=Decimal("0"), total_amount=Decimal("62.50"),
|
||||||
|
currency="GBP", date=date(2025, 12, 20), created_at=now,
|
||||||
|
),
|
||||||
|
# AAPL buy
|
||||||
|
InvestmentTransaction(
|
||||||
|
id=uuid.uuid4(), user_id=uid, holding_id=aapl_h.id,
|
||||||
|
type="buy", quantity=Decimal("10"), price=Decimal("228.50"),
|
||||||
|
fees=Decimal("0"), total_amount=Decimal("2285.00"),
|
||||||
|
currency="USD", date=date(2025, 10, 15), created_at=now,
|
||||||
|
),
|
||||||
|
# AAPL dividend
|
||||||
|
InvestmentTransaction(
|
||||||
|
id=uuid.uuid4(), user_id=uid, holding_id=aapl_h.id,
|
||||||
|
type="dividend", quantity=Decimal("0"), price=Decimal("0"),
|
||||||
|
fees=Decimal("0"), total_amount=Decimal("23.00"),
|
||||||
|
currency="USD", date=date(2026, 2, 15), created_at=now,
|
||||||
|
),
|
||||||
|
# BTC buy
|
||||||
|
InvestmentTransaction(
|
||||||
|
id=uuid.uuid4(), user_id=uid, holding_id=btc_h.id,
|
||||||
|
type="buy", quantity=Decimal("0.05"), price=Decimal("69500.00"),
|
||||||
|
fees=Decimal("0"), total_amount=Decimal("3475.00"),
|
||||||
|
currency="USD", date=date(2025, 11, 1), created_at=now,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
for it in inv_txns:
|
||||||
|
db.add(it)
|
||||||
|
await db.flush()
|
||||||
|
|
||||||
|
# ── Tax profile & payslips (2025/26 = tax_year 2026) ─────────────────
|
||||||
|
tax_profile = TaxProfile(
|
||||||
|
id=uuid.uuid4(), user_id=uid, tax_year=2026,
|
||||||
|
employer_name_enc=encrypt_field("Demo Corp Ltd"),
|
||||||
|
tax_code="1257L", is_cumulative=True,
|
||||||
|
created_at=now, updated_at=now,
|
||||||
|
)
|
||||||
|
db.add(tax_profile)
|
||||||
|
await db.flush()
|
||||||
|
|
||||||
|
# 6 payslips: April – September 2025
|
||||||
|
for month in range(4, 10):
|
||||||
|
db.add(Payslip(
|
||||||
|
id=uuid.uuid4(), user_id=uid,
|
||||||
|
tax_profile_id=tax_profile.id,
|
||||||
|
period_month=month, period_year=2025,
|
||||||
|
gross_pay=Decimal("4500.00"),
|
||||||
|
income_tax_withheld=Decimal("753.00"),
|
||||||
|
ni_withheld=Decimal("258.00"),
|
||||||
|
net_pay=Decimal("3489.00"),
|
||||||
|
is_p60=False,
|
||||||
|
created_at=now,
|
||||||
|
))
|
||||||
|
|
||||||
|
# Manual CGT disposal — employee share scheme, gain exceeds annual exempt
|
||||||
|
db.add(ManualCGTDisposal(
|
||||||
|
id=uuid.uuid4(), user_id=uid, tax_year=2026,
|
||||||
|
disposal_date=date(2025, 9, 15),
|
||||||
|
asset_description_enc=encrypt_field("Tech Corp Ltd — Employee Share Scheme"),
|
||||||
|
proceeds=Decimal("8500.00"),
|
||||||
|
cost_basis=Decimal("4200.00"),
|
||||||
|
created_at=now,
|
||||||
|
))
|
||||||
|
|
||||||
|
await db.flush()
|
||||||
51
backend/app/demo/snapshot.py
Normal file
51
backend/app/demo/snapshot.py
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
"""Creates and restores the demo database snapshot used for hourly resets."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
SNAPSHOT_PATH = Path(os.environ.get("DEMO_SNAPSHOT_PATH", "/app/demo_snapshot.sql.gz"))
|
||||||
|
|
||||||
|
|
||||||
|
async def create_snapshot() -> None:
|
||||||
|
"""pg_dump the current DB to SNAPSHOT_PATH (gzip compressed)."""
|
||||||
|
db_url = os.environ.get("DATABASE_URL", "")
|
||||||
|
pg_url = db_url.replace("postgresql+asyncpg", "postgresql")
|
||||||
|
|
||||||
|
proc = await asyncio.create_subprocess_shell(
|
||||||
|
f'pg_dump --no-owner --no-acl "{pg_url}" | gzip > "{SNAPSHOT_PATH}"',
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
_, err = await proc.communicate()
|
||||||
|
if proc.returncode != 0:
|
||||||
|
raise RuntimeError(f"Snapshot failed: {err.decode()}")
|
||||||
|
|
||||||
|
|
||||||
|
async def restore_snapshot() -> None:
|
||||||
|
"""Restore DB from SNAPSHOT_PATH, dropping and recreating all user data."""
|
||||||
|
if not SNAPSHOT_PATH.exists():
|
||||||
|
raise FileNotFoundError(f"Snapshot not found: {SNAPSHOT_PATH}")
|
||||||
|
|
||||||
|
db_url = os.environ.get("DATABASE_URL", "")
|
||||||
|
pg_url = db_url.replace("postgresql+asyncpg", "postgresql")
|
||||||
|
|
||||||
|
# Truncate all user-data tables in dependency order, then restore
|
||||||
|
truncate_sql = """
|
||||||
|
TRUNCATE TABLE
|
||||||
|
manual_cgt_disposals, payslips, tax_profiles, tax_rate_configs,
|
||||||
|
investment_transactions, investment_holdings, assets,
|
||||||
|
audit_logs, net_worth_snapshots,
|
||||||
|
transactions, budgets, accounts, categories,
|
||||||
|
sessions, users
|
||||||
|
RESTART IDENTITY CASCADE;
|
||||||
|
"""
|
||||||
|
|
||||||
|
proc = await asyncio.create_subprocess_shell(
|
||||||
|
f'gunzip -c "{SNAPSHOT_PATH}" | psql --single-transaction -v ON_ERROR_STOP=1 "{pg_url}"',
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
_, err = await proc.communicate()
|
||||||
|
if proc.returncode != 0:
|
||||||
|
raise RuntimeError(f"Restore failed: {err.decode()}")
|
||||||
|
|
@ -35,6 +35,18 @@ async def lifespan(app: FastAPI):
|
||||||
await seed_system_categories(db)
|
await seed_system_categories(db)
|
||||||
await db.commit()
|
await db.commit()
|
||||||
|
|
||||||
|
# Demo mode: seed demo data on first startup, then snapshot
|
||||||
|
if settings.is_demo:
|
||||||
|
from app.demo.seed import is_seeded, seed_demo
|
||||||
|
from app.demo.snapshot import create_snapshot
|
||||||
|
async with session_factory() as db:
|
||||||
|
if not await is_seeded(db):
|
||||||
|
await seed_demo(db)
|
||||||
|
await db.commit()
|
||||||
|
logger.info("demo_seed_complete")
|
||||||
|
await create_snapshot()
|
||||||
|
logger.info("demo_snapshot_created")
|
||||||
|
|
||||||
# Background scheduler
|
# Background scheduler
|
||||||
from app.workers.scheduler import start_scheduler, stop_scheduler
|
from app.workers.scheduler import start_scheduler, stop_scheduler
|
||||||
await start_scheduler()
|
await start_scheduler()
|
||||||
|
|
@ -78,6 +90,10 @@ def create_app() -> FastAPI:
|
||||||
async def health():
|
async def health():
|
||||||
return {"status": "ok"}
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
@app.get("/demo/status")
|
||||||
|
async def demo_status():
|
||||||
|
return {"demo_mode": settings.is_demo}
|
||||||
|
|
||||||
# API routers
|
# API routers
|
||||||
from app.api.router import router
|
from app.api.router import router
|
||||||
app.include_router(router, prefix="/api/v1")
|
app.include_router(router, prefix="/api/v1")
|
||||||
|
|
|
||||||
18
demo/.env.example
Normal file
18
demo/.env.example
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
# MyMidas Demo — environment variables
|
||||||
|
# Copy to .env and fill in every value before deploying.
|
||||||
|
|
||||||
|
# ── Encryption ────────────────────────────────────────────────────────────────
|
||||||
|
# 32-byte hex key for AES-256-GCM field encryption.
|
||||||
|
# Generate with: python3 -c "import secrets; print(secrets.token_hex(32))"
|
||||||
|
ENCRYPTION_KEY=
|
||||||
|
|
||||||
|
# ── Database ──────────────────────────────────────────────────────────────────
|
||||||
|
# Strong random password for the demo Postgres user.
|
||||||
|
DB_PASSWORD=
|
||||||
|
|
||||||
|
# ── Redis ─────────────────────────────────────────────────────────────────────
|
||||||
|
REDIS_PASSWORD=
|
||||||
|
|
||||||
|
# ── Environment ───────────────────────────────────────────────────────────────
|
||||||
|
# Keep as "production" (hides /docs, enforces security headers).
|
||||||
|
ENVIRONMENT=production
|
||||||
152
demo/DEPLOY.md
Normal file
152
demo/DEPLOY.md
Normal file
|
|
@ -0,0 +1,152 @@
|
||||||
|
# MyMidas Demo — Deployment Guide (Portainer)
|
||||||
|
|
||||||
|
This guide deploys the public demo instance on a separate server using Portainer Stacks.
|
||||||
|
|
||||||
|
**What you get:** a fully seeded MyMidas instance at port 4001, with demo data and an hourly auto-reset. No manual steps after initial deploy.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Docker and Portainer installed on the demo server
|
||||||
|
- SSH or console access to the demo server (for the initial clone and key generation)
|
||||||
|
- Your reverse proxy pointing a public domain at port `4001` on this server
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 1 — Clone the repo
|
||||||
|
|
||||||
|
On the demo server, clone into your preferred location:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://git.rdx4.com/megaproxy/MyMidas.git
|
||||||
|
cd MyMidas
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 2 — Generate JWT keys
|
||||||
|
|
||||||
|
The demo shares the `secrets/` directory with the main app structure. If you've already generated keys on this server you can skip this.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir -p secrets
|
||||||
|
openssl genrsa -out secrets/jwt_private.pem 4096
|
||||||
|
openssl rsa -in secrets/jwt_private.pem -pubout -out secrets/jwt_public.pem
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 3 — Create the demo .env file
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd demo
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
|
||||||
|
Open `.env` and fill in the three required values:
|
||||||
|
|
||||||
|
| Variable | How to generate |
|
||||||
|
|---|---|
|
||||||
|
| `ENCRYPTION_KEY` | `python3 -c "import secrets; print(secrets.token_hex(32))"` |
|
||||||
|
| `DB_PASSWORD` | Any strong random string |
|
||||||
|
| `REDIS_PASSWORD` | Any strong random string |
|
||||||
|
|
||||||
|
Leave `ENVIRONMENT=production`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 4 — Deploy the stack in Portainer
|
||||||
|
|
||||||
|
1. Open Portainer → **Stacks** → **Add stack**
|
||||||
|
2. Name it `mymidas-demo`
|
||||||
|
3. Select **Repository** as the build method
|
||||||
|
4. Set the repository URL to your Gitea URL and branch `main`
|
||||||
|
5. Set **Compose path** to `demo/docker-compose.yml`
|
||||||
|
6. Under **Environment variables**, add the four variables from your `.env`:
|
||||||
|
- `ENCRYPTION_KEY`
|
||||||
|
- `DB_PASSWORD`
|
||||||
|
- `REDIS_PASSWORD`
|
||||||
|
- `ENVIRONMENT` = `production`
|
||||||
|
7. Click **Deploy the stack**
|
||||||
|
|
||||||
|
> **Alternative (upload method):** If you prefer to upload the compose file directly, paste the contents of `demo/docker-compose.yml` into Portainer's web editor and add the environment variables manually.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 5 — Wait for first-time seeding
|
||||||
|
|
||||||
|
On first startup, the backend will:
|
||||||
|
1. Run database migrations
|
||||||
|
2. Detect that `DEMO_MODE=true` and no users exist
|
||||||
|
3. Seed the full demo dataset (~180 transactions, investments, budgets, tax data)
|
||||||
|
4. Save a compressed snapshot (`demo_snapshot.sql.gz`) for hourly resets
|
||||||
|
|
||||||
|
This takes about 30–60 seconds. Watch progress in Portainer → **Containers** → `mymidas-demo-backend-1` → **Logs**.
|
||||||
|
|
||||||
|
You'll see these log lines when ready:
|
||||||
|
```
|
||||||
|
demo_seed_complete
|
||||||
|
demo_snapshot_created
|
||||||
|
Uvicorn running on http://0.0.0.0:8000
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 6 — Configure your reverse proxy
|
||||||
|
|
||||||
|
Point your public domain at `http://<demo-server-ip>:4001`.
|
||||||
|
|
||||||
|
The frontend serves the React app and proxies all `/api/` calls to the backend internally — so you only need to expose port `4001`.
|
||||||
|
|
||||||
|
**nginx proxy manager example:**
|
||||||
|
- Scheme: `http`
|
||||||
|
- Forward hostname/IP: `<demo-server-ip>`
|
||||||
|
- Forward port: `4001`
|
||||||
|
- Enable websockets: off
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 7 — Verify
|
||||||
|
|
||||||
|
Open your domain in a browser. You should see the MyMidas login page with a yellow demo credentials banner:
|
||||||
|
|
||||||
|
```
|
||||||
|
Email: demo@mymidas.app
|
||||||
|
Password: demo123
|
||||||
|
```
|
||||||
|
|
||||||
|
Log in and confirm the data is populated (accounts, transactions, investments, tax page).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Hourly reset
|
||||||
|
|
||||||
|
The `resetter` container runs a cron job at the top of every hour that:
|
||||||
|
1. Restores the database from the snapshot taken on first boot
|
||||||
|
2. Deletes any files uploaded by demo users
|
||||||
|
|
||||||
|
No action needed — it runs automatically. You can check reset logs in Portainer → **Containers** → `mymidas-demo-resetter-1` → **Logs**.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Updating the demo
|
||||||
|
|
||||||
|
When you push code changes to main:
|
||||||
|
|
||||||
|
1. In Portainer → **Stacks** → `mymidas-demo` → **Editor** → **Update the stack**
|
||||||
|
2. This rebuilds the images and restarts all containers
|
||||||
|
3. On restart, migrations run automatically; the seed check runs and skips if already seeded (snapshot is preserved on the `demo_snapshot` volume)
|
||||||
|
|
||||||
|
> If you want to **force a full re-seed** (e.g. after adding more demo data): in Portainer, delete the `demo_snapshot` volume, then redeploy. The backend will re-seed and take a new snapshot on next startup.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
| Symptom | Check |
|
||||||
|
|---|---|
|
||||||
|
| Blank page / 502 | Backend still starting — wait 60 s and refresh |
|
||||||
|
| Login fails | Seeding still in progress — check backend logs |
|
||||||
|
| Data not resetting | Check resetter logs; confirm `demo_snapshot` volume has the `.sql.gz` file |
|
||||||
|
| "Snapshot not found" in resetter log | Backend may not have finished first-time seed — redeploy backend only |
|
||||||
128
demo/docker-compose.yml
Normal file
128
demo/docker-compose.yml
Normal file
|
|
@ -0,0 +1,128 @@
|
||||||
|
services:
|
||||||
|
backend:
|
||||||
|
build:
|
||||||
|
context: ../backend
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
target: production
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "8091:8000"
|
||||||
|
environment:
|
||||||
|
DATABASE_URL: "postgresql+asyncpg://demo_app:${DB_PASSWORD}@postgres:5432/demodb"
|
||||||
|
REDIS_URL: "redis://:${REDIS_PASSWORD}@redis:6379/0"
|
||||||
|
ENCRYPTION_KEY: "${ENCRYPTION_KEY}"
|
||||||
|
BACKUP_PASSPHRASE: "not-used-in-demo"
|
||||||
|
ENVIRONMENT: "${ENVIRONMENT:-production}"
|
||||||
|
ALLOW_REGISTRATION: "false"
|
||||||
|
BASE_CURRENCY: "GBP"
|
||||||
|
DEMO_MODE: "true"
|
||||||
|
DEMO_SNAPSHOT_PATH: "/app/demo_snapshot.sql.gz"
|
||||||
|
volumes:
|
||||||
|
- ../secrets:/run/secrets:ro
|
||||||
|
- demo_snapshot:/app/demo_snapshot.sql.gz:rw
|
||||||
|
- demo_uploads:/app/uploads
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
redis:
|
||||||
|
condition: service_healthy
|
||||||
|
networks:
|
||||||
|
- frontend_net
|
||||||
|
- backend_net
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 90s
|
||||||
|
|
||||||
|
frontend:
|
||||||
|
build:
|
||||||
|
context: ../frontend
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
target: production
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "4001:3000"
|
||||||
|
networks:
|
||||||
|
- frontend_net
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
read_only: true
|
||||||
|
tmpfs:
|
||||||
|
- /tmp
|
||||||
|
- /var/cache/nginx
|
||||||
|
- /var/run
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: demodb
|
||||||
|
POSTGRES_USER: demo_app
|
||||||
|
POSTGRES_PASSWORD: "${DB_PASSWORD}"
|
||||||
|
volumes:
|
||||||
|
- demo_postgres:/var/lib/postgresql/data
|
||||||
|
- ../postgres/init:/docker-entrypoint-initdb.d:ro
|
||||||
|
networks:
|
||||||
|
- backend_net
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U demo_app -d demodb"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7-alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
command: redis-server --requirepass "${REDIS_PASSWORD}"
|
||||||
|
networks:
|
||||||
|
- backend_net
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "redis-cli", "-a", "${REDIS_PASSWORD}", "ping"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
resetter:
|
||||||
|
image: alpine:3.19
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
DATABASE_URL: "postgresql://demo_app:${DB_PASSWORD}@postgres:5432/demodb"
|
||||||
|
DEMO_SNAPSHOT_PATH: "/snapshot/demo_snapshot.sql.gz"
|
||||||
|
BACKEND_URL: "http://backend:8000"
|
||||||
|
volumes:
|
||||||
|
- demo_snapshot:/snapshot:ro
|
||||||
|
- demo_uploads:/uploads:rw
|
||||||
|
- ./reset.sh:/reset.sh:ro
|
||||||
|
networks:
|
||||||
|
- backend_net
|
||||||
|
depends_on:
|
||||||
|
backend:
|
||||||
|
condition: service_healthy
|
||||||
|
entrypoint: >
|
||||||
|
sh -c "
|
||||||
|
apk add --no-cache postgresql-client curl &&
|
||||||
|
echo '0 * * * * sh /reset.sh >> /var/log/reset.log 2>&1' | crontab - &&
|
||||||
|
crond -f -l 6
|
||||||
|
"
|
||||||
|
security_opt:
|
||||||
|
- no-new-privileges:true
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
demo_postgres:
|
||||||
|
demo_snapshot:
|
||||||
|
demo_uploads:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
frontend_net:
|
||||||
|
driver: bridge
|
||||||
|
backend_net:
|
||||||
|
driver: bridge
|
||||||
|
internal: true
|
||||||
25
demo/reset.sh
Normal file
25
demo/reset.sh
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
#!/bin/sh
|
||||||
|
# Hourly demo reset — restore DB from snapshot, purge uploads, bounce backend.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SNAPSHOT="${DEMO_SNAPSHOT_PATH:-/snapshot/demo_snapshot.sql.gz}"
|
||||||
|
UPLOADS_DIR="${UPLOADS_DIR:-/uploads}"
|
||||||
|
DB_URL="${DATABASE_URL}"
|
||||||
|
BACKEND="${BACKEND_URL:-http://backend:8000}"
|
||||||
|
|
||||||
|
echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] Starting demo reset"
|
||||||
|
|
||||||
|
# 1. Restore database from snapshot
|
||||||
|
if [ ! -f "$SNAPSHOT" ]; then
|
||||||
|
echo "ERROR: Snapshot not found at $SNAPSHOT — skipping reset"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
gunzip -c "$SNAPSHOT" | psql --single-transaction -v ON_ERROR_STOP=1 "$DB_URL"
|
||||||
|
echo " DB restored from snapshot"
|
||||||
|
|
||||||
|
# 2. Purge uploaded files (attachments added by demo users)
|
||||||
|
find "$UPLOADS_DIR" -type f -not -name ".gitkeep" -delete 2>/dev/null || true
|
||||||
|
echo " Uploads purged"
|
||||||
|
|
||||||
|
echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] Demo reset complete"
|
||||||
6
frontend/src/api/demo.ts
Normal file
6
frontend/src/api/demo.ts
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
import { api } from "./client";
|
||||||
|
|
||||||
|
export async function getDemoStatus(): Promise<{ demo_mode: boolean }> {
|
||||||
|
const r = await api.get<{ demo_mode: boolean }>("/demo/status");
|
||||||
|
return r.data;
|
||||||
|
}
|
||||||
16
frontend/src/components/DemoBanner.tsx
Normal file
16
frontend/src/components/DemoBanner.tsx
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
import { FlaskConical, RefreshCw } from "lucide-react";
|
||||||
|
import { useDemoMode } from "@/hooks/useDemoMode";
|
||||||
|
|
||||||
|
export default function DemoBanner() {
|
||||||
|
const isDemo = useDemoMode();
|
||||||
|
if (!isDemo) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="w-full bg-amber-500/15 border-b border-amber-500/30 px-4 py-2 flex items-center gap-2 text-amber-400 text-xs font-medium shrink-0">
|
||||||
|
<FlaskConical className="w-3.5 h-3.5 shrink-0" />
|
||||||
|
<span>Demo mode — all data is synthetic and resets hourly.</span>
|
||||||
|
<RefreshCw className="w-3 h-3 shrink-0 ml-0.5" />
|
||||||
|
<span className="text-amber-400/70">Password changes and backups are disabled.</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
@ -3,6 +3,7 @@ import Sidebar from "./Sidebar";
|
||||||
import TopBar from "./TopBar";
|
import TopBar from "./TopBar";
|
||||||
import MobileNav from "./MobileNav";
|
import MobileNav from "./MobileNav";
|
||||||
import ErrorBoundary from "@/components/ErrorBoundary";
|
import ErrorBoundary from "@/components/ErrorBoundary";
|
||||||
|
import DemoBanner from "@/components/DemoBanner";
|
||||||
|
|
||||||
interface AppShellProps {
|
interface AppShellProps {
|
||||||
children: React.ReactNode;
|
children: React.ReactNode;
|
||||||
|
|
@ -24,6 +25,7 @@ export default function AppShell({ children }: AppShellProps) {
|
||||||
}`}
|
}`}
|
||||||
>
|
>
|
||||||
<TopBar />
|
<TopBar />
|
||||||
|
<DemoBanner />
|
||||||
{/* Extra bottom padding on mobile so content clears the nav bar */}
|
{/* Extra bottom padding on mobile so content clears the nav bar */}
|
||||||
<main className="flex-1 overflow-y-auto p-4 md:p-6 lg:p-8 pb-24 lg:pb-8">
|
<main className="flex-1 overflow-y-auto p-4 md:p-6 lg:p-8 pb-24 lg:pb-8">
|
||||||
<ErrorBoundary>{children}</ErrorBoundary>
|
<ErrorBoundary>{children}</ErrorBoundary>
|
||||||
|
|
|
||||||
11
frontend/src/hooks/useDemoMode.ts
Normal file
11
frontend/src/hooks/useDemoMode.ts
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import { getDemoStatus } from "@/api/demo";
|
||||||
|
|
||||||
|
export function useDemoMode() {
|
||||||
|
const { data } = useQuery({
|
||||||
|
queryKey: ["demo-status"],
|
||||||
|
queryFn: getDemoStatus,
|
||||||
|
staleTime: Infinity,
|
||||||
|
});
|
||||||
|
return data?.demo_mode ?? false;
|
||||||
|
}
|
||||||
|
|
@ -2,11 +2,13 @@ import { useState } from "react";
|
||||||
import { useNavigate } from "react-router-dom";
|
import { useNavigate } from "react-router-dom";
|
||||||
import { login, loginTotp, getMe } from "@/api/auth";
|
import { login, loginTotp, getMe } from "@/api/auth";
|
||||||
import { useAuthStore } from "@/store/authStore";
|
import { useAuthStore } from "@/store/authStore";
|
||||||
import { Coins, Eye, EyeOff, Loader2, ShieldCheck } from "lucide-react";
|
import { Coins, Eye, EyeOff, FlaskConical, Loader2, ShieldCheck } from "lucide-react";
|
||||||
|
import { useDemoMode } from "@/hooks/useDemoMode";
|
||||||
|
|
||||||
export default function LoginPage() {
|
export default function LoginPage() {
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
const { setToken, setTotpEnabled } = useAuthStore();
|
const { setToken, setTotpEnabled } = useAuthStore();
|
||||||
|
const isDemo = useDemoMode();
|
||||||
|
|
||||||
const [email, setEmail] = useState("");
|
const [email, setEmail] = useState("");
|
||||||
const [password, setPassword] = useState("");
|
const [password, setPassword] = useState("");
|
||||||
|
|
@ -83,6 +85,18 @@ export default function LoginPage() {
|
||||||
<span className="text-2xl font-bold">MyMidas</span>
|
<span className="text-2xl font-bold">MyMidas</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{isDemo && (
|
||||||
|
<div className="mb-4 rounded-xl border border-amber-500/30 bg-amber-500/10 px-4 py-3 flex items-start gap-3">
|
||||||
|
<FlaskConical className="w-4 h-4 text-amber-400 mt-0.5 shrink-0" />
|
||||||
|
<div>
|
||||||
|
<p className="text-sm font-semibold text-amber-400 mb-1">Demo instance</p>
|
||||||
|
<p className="text-xs text-amber-400/80 font-mono">Email: <span className="text-amber-300">demo@mymidas.app</span></p>
|
||||||
|
<p className="text-xs text-amber-400/80 font-mono">Password: <span className="text-amber-300">demo123</span></p>
|
||||||
|
<p className="text-xs text-amber-400/60 mt-1">Data resets hourly. Password changes disabled.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
<div className="bg-card border border-border rounded-xl p-8 shadow-xl">
|
<div className="bg-card border border-border rounded-xl p-8 shadow-xl">
|
||||||
{!challengeToken ? (
|
{!challengeToken ? (
|
||||||
<>
|
<>
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,7 @@ import type { AiSettings } from "@/api/settings";
|
||||||
import type { BackupFile } from "@/api/admin";
|
import type { BackupFile } from "@/api/admin";
|
||||||
import { cn } from "@/utils/cn";
|
import { cn } from "@/utils/cn";
|
||||||
import { format } from "date-fns";
|
import { format } from "date-fns";
|
||||||
|
import { useDemoMode } from "@/hooks/useDemoMode";
|
||||||
import {
|
import {
|
||||||
User, Shield, MonitorSmartphone, Download, HardDrive,
|
User, Shield, MonitorSmartphone, Download, HardDrive,
|
||||||
Loader2, CheckCircle, Eye, EyeOff, Trash2,
|
Loader2, CheckCircle, Eye, EyeOff, Trash2,
|
||||||
|
|
@ -169,6 +170,7 @@ function SecuritySection() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function PasswordCard() {
|
function PasswordCard() {
|
||||||
|
const isDemo = useDemoMode();
|
||||||
const [current, setCurrent] = useState("");
|
const [current, setCurrent] = useState("");
|
||||||
const [next, setNext] = useState("");
|
const [next, setNext] = useState("");
|
||||||
const [confirm, setConfirm] = useState("");
|
const [confirm, setConfirm] = useState("");
|
||||||
|
|
@ -187,7 +189,7 @@ function PasswordCard() {
|
||||||
|
|
||||||
const mismatch = next.length > 0 && confirm.length > 0 && next !== confirm;
|
const mismatch = next.length > 0 && confirm.length > 0 && next !== confirm;
|
||||||
const tooShort = next.length > 0 && next.length < 10;
|
const tooShort = next.length > 0 && next.length < 10;
|
||||||
const canSubmit = current && next && confirm && next === confirm && next.length >= 10;
|
const canSubmit = !isDemo && current && next && confirm && next === confirm && next.length >= 10;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cardCls}>
|
<div className={cardCls}>
|
||||||
|
|
@ -196,6 +198,12 @@ function PasswordCard() {
|
||||||
<SectionTitle>Change Password</SectionTitle>
|
<SectionTitle>Change Password</SectionTitle>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{isDemo && (
|
||||||
|
<div className="rounded-lg border border-amber-500/30 bg-amber-500/10 px-3 py-2 text-xs text-amber-400">
|
||||||
|
Password changes are disabled in demo mode.
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
{success && <SuccessBanner message="Password changed successfully" />}
|
{success && <SuccessBanner message="Password changed successfully" />}
|
||||||
{mutation.isError && <ErrorBanner message={(mutation.error as any)?.response?.data?.detail ?? "Password change failed"} />}
|
{mutation.isError && <ErrorBanner message={(mutation.error as any)?.response?.data?.detail ?? "Password change failed"} />}
|
||||||
|
|
||||||
|
|
@ -498,6 +506,21 @@ function SessionsSection() {
|
||||||
// ─── Backups ──────────────────────────────────────────────────────────────────
|
// ─── Backups ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function BackupsSection() {
|
function BackupsSection() {
|
||||||
|
const isDemo = useDemoMode();
|
||||||
|
if (isDemo) {
|
||||||
|
return (
|
||||||
|
<div className={cardCls}>
|
||||||
|
<div className="flex items-center gap-2 mb-1">
|
||||||
|
<HardDrive className="w-4 h-4 text-muted-foreground" />
|
||||||
|
<SectionTitle>Backups</SectionTitle>
|
||||||
|
</div>
|
||||||
|
<div className="rounded-lg border border-amber-500/30 bg-amber-500/10 px-4 py-3 text-sm text-amber-400">
|
||||||
|
Backups are disabled in this demo instance. In a real installation, encrypted nightly backups run automatically and can be downloaded or restored here.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const qc = useQueryClient();
|
const qc = useQueryClient();
|
||||||
const [restoreTarget, setRestoreTarget] = useState<string | null>(null);
|
const [restoreTarget, setRestoreTarget] = useState<string | null>(null);
|
||||||
const [restoreSuccess, setRestoreSuccess] = useState("");
|
const [restoreSuccess, setRestoreSuccess] = useState("");
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue