commit 4b98219bf7c411be4b7b3cee4562347c371c09b5 Author: mega Date: Thu Mar 19 11:32:17 2026 +0000 first commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9952bba --- /dev/null +++ b/.gitignore @@ -0,0 +1,42 @@ +# Dependencies +node_modules/ +.pnp +.pnp.js + +# Build outputs +.next/ +out/ +dist/ +build/ +__pycache__/ +*.pyc +*.pyo +*.egg-info/ +.eggs/ + +# Environment files — NEVER commit these +.env +.env.local +.env.*.local + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + +# Docker +*.log + +# Claude Code local settings +.claude/ + +# TypeScript build cache +*.tsbuildinfo + +# Next.js auto-generated type file +next-env.d.ts diff --git a/ACTION_PLAN.md b/ACTION_PLAN.md new file mode 100644 index 0000000..2027141 --- /dev/null +++ b/ACTION_PLAN.md @@ -0,0 +1,149 @@ +# BMS Action Plan — Post-Audit Fixes + +> Generated: 2026-03-10 +> Based on cross-referencing live site (bmsdemo.rdx4.com) against IMPROVEMENTS.md and source audit. +> Excludes: Clerk auth (intentional public demo), scheduled PDF email (deferred). + +--- + +## Phase 1 — Trivial Fixes (< 30 min, no backend, no context changes) + +These are single-file, low-risk changes. + +| # | File | Issue | Fix | +|---|------|-------|-----| +| 1.1 | `components/layout/sidebar.tsx` | Network is in the "Safety" group alongside Leak and Fire, which is semantically wrong | Move `/network` into the "Infrastructure" group, below Generator | +| 1.2 | `lib/api.ts` | Base path is `/api/backend` and all fetch paths start `/api/...`, resulting in `/api/backend/api/...` — looks like a double-prefix at first glance | Add a one-line comment above `const BASE` explaining the proxy path convention | +| 1.3 | `IMPROVEMENTS.md` | Settings page (`/settings`) is fully built but completely absent from the plan | Add a new entry to Phase 6 or a new Phase 7 "Untracked Additions" section to document it | +| 1.4 | `IMPROVEMENTS.md` | Item 6.11 "mini floor map thumbnail" is marked `[x]` but what was built is a `RoomStatusGrid` (tabular room stats), not a visual rack-grid thumbnail | Update the description to clarify what was actually delivered, or un-tick and add to backlog | + +--- + +## Phase 2 — Settings Page: Wire Up Persistence + +Currently all Settings save buttons have no `onClick` handler and threshold edits are never written anywhere. All pages read from the static `THRESHOLDS` constant in `lib/thresholds.ts`, making the entire Settings page cosmetic. + +This phase makes Settings functional without a backend API — using `localStorage` and a React context so changes persist across refreshes and propagate to all pages at runtime. + +### 2.1 — Create a `ThresholdContext` + +**New file:** `lib/threshold-context.tsx` + +- Wraps the static `THRESHOLDS` object as default values +- On mount, reads overrides from `localStorage` key `bms_thresholds` and merges +- Exposes `thresholds` (the merged values) and `setThresholds(patch)` (writes to localStorage and re-renders) +- Re-export a `useThresholds()` hook + +### 2.2 — Add the provider to the dashboard layout + +**File:** `app/(dashboard)/layout.tsx` + +- Wrap children in `` so all dashboard pages share the same context + +### 2.3 — Update pages to read from context + +Pages that currently import and use `THRESHOLDS` directly need to call `useThresholds()` instead. A grep for `THRESHOLDS` in `app/(dashboard)/` will give the full list. Likely candidates: +- `environmental/page.tsx` (temp/humidity thresholds for heatmap + ASHRAE table) +- `cooling/page.tsx` (filter ΔP, COP, compressor thresholds) +- `power/page.tsx` (rack power warn/crit lines on bar chart) +- `capacity/page.tsx` (radial gauge colour bands) +- `floor-map/page.tsx` (rack tile colour scale) + +`lib/thresholds.ts` stays as the canonical defaults — no change needed there. + +### 2.4 — Wire the Save buttons in `settings/page.tsx` + +**File:** `app/(dashboard)/settings/page.tsx` + +- `ThresholdsTab`: call `setThresholds({ temp: { warn, critical }, humidity: { ... }, power: { ... } })` in the Save onClick +- `ProfileTab` / `NotificationsTab`: these are cosmetic for a demo — write to `localStorage` under `bms_profile` / `bms_notifications` keys so at least the values survive a refresh, even if they don't affect anything functional + +### 2.5 — Add a visible "saved" confirmation + +Currently there is no feedback when Save is clicked. Add a brief `sonner` toast (the project already uses it) on successful save. The `Toaster` is already mounted in the dashboard layout. + +--- + +## Phase 3 — Dashboard: True Mini Floor Map Thumbnail + +Item 6.11 was marked done but delivered a room status table rather than a visual map. + +**File:** `app/(dashboard)/dashboard/page.tsx` + new component `components/dashboard/mini-floor-map.tsx` + +- Replace (or sit alongside) `RoomStatusGrid` in the bottom row with a compact rack-grid tile +- Re-use the colour logic already written in `floor-map/page.tsx` (temp overlay colours by default) +- Each rack tile is a small coloured square (~12×16px), labelled with rack ID on hover tooltip +- CRAC units shown as labelled strips at room edge (same pattern as full floor map) +- Click navigates to `/floor-map` +- Room tabs (Hall A / Hall B) if both rooms are present +- Read from the same `/api/readings/rack-status` data already fetched on dashboard + +This requires no backend changes — just a new presentational component that reuses existing API data. + +--- + +## Phase 4 — Floor Map: Zoom/Pan + CRAC Coverage Shading + +These are the two remaining open items from 6.12. + +### 4.1 — Zoom / Pan + +**File:** `app/(dashboard)/floor-map/page.tsx` + +- Add `react-zoom-pan-pinch` (or equivalent) as a dependency: `pnpm add react-zoom-pan-pinch` +- Wrap the rack grid `div` in a `` / `` block +- Add zoom controls (+ / − / reset buttons) in the map header, above the legend +- Pinch-to-zoom should work on touch devices automatically via the library + +### 4.2 — CRAC Coverage Shading + +**File:** `app/(dashboard)/floor-map/page.tsx` + +- Add a 5th overlay option to the overlay selector: **CRAC Coverage** +- When active, colour each rack tile according to which CRAC unit is its nearest thermal neighbour (assign by row proximity — CRACs at room ends serve the rows closest to them, split at the midpoint) +- Use a per-CRAC colour palette (4–6 distinct hues, low-opacity background fill) +- Show CRAC ID in the legend with its assigned colour +- No backend required — assignment is purely spatial, computed client-side from rack row index and CRAC position + +--- + +## Phase 5 — Environmental: Particle Count (ISO 14644) + +Item 6.10. This is the only remaining `[ ]` item that hasn't been deferred. + +### 5.1 — Simulator + +**File:** backend simulator (not in frontend repo — coordinate with backend) + +- Add a `ParticleBot` (or extend an existing env bot) that emits: + - `particles_0_5um` — count/m³, particles ≥0.5 µm + - `particles_5um` — count/m³, particles ≥5 µm + - Derived ISO class (1–9) per room, changing slowly with occasional spikes + +### 5.2 — Backend API + +- New endpoint: `GET /api/environmental/particles?site_id=&room_id=` +- Returns current counts + derived ISO class per room + +### 5.3 — Frontend + +**File:** `app/(dashboard)/environmental/page.tsx` + `lib/api.ts` + +- Add `fetchParticleStatus(siteId)` to `lib/api.ts` +- Add a new panel on the Environmental page: **"Air Quality — ISO 14644"** + - Per-room ISO class badge (ISO 1–9, colour-coded: green ≤7, amber 8, red 9) + - 0.5 µm and 5 µm count bars with threshold lines (ISO 8 limits: 3,520,000 and 29,300 /m³) + - A small note that DC target is ISO 8 (≤100,000 particles ≥0.5 µm/m³) + - Trend sparkline (last 24h) + +--- + +## Execution Order Summary + +| Phase | Scope | Backend needed? | Effort estimate | +|-------|-------|-----------------|-----------------| +| 1 — Trivial fixes | IMPROVEMENTS.md + sidebar + api.ts comment | No | ~20 min | +| 2 — Settings persistence | New context + localStorage + toast feedback | No | ~2–3 h | +| 3 — Mini floor map | New dashboard component | No | ~2–3 h | +| 4 — Floor map zoom/pan + CRAC shading | react-zoom-pan-pinch + overlay logic | No | ~3–4 h | +| 5 — Particle count | New simulator bot + API endpoint + env panel | Yes | ~3–4 h total | diff --git a/IMPROVEMENTS.md b/IMPROVEMENTS.md new file mode 100644 index 0000000..b108ccd --- /dev/null +++ b/IMPROVEMENTS.md @@ -0,0 +1,271 @@ +# BMS Improvement Plan — Singapore DC01 + +> Read this file at the start of the next session to restore context. +> Generated from full page review (all 9 pages read and analysed). + +--- + +## Phased Execution Plan + +### Phase 1 — Frontend Quick Wins (no backend/simulator changes) +| # | Page | Improvement | Status | +|---|------|-------------|--------| +| 1.1 | Alarms | Escalation timer — colour-ramping counter for unacknowledged critical alarms | [x] | +| 1.2 | Alarms | MTTR stat card — derived from triggered_at → resolved_at | deferred to Phase 3 (needs resolved_at from backend) | +| 1.3 | Assets | Sortable inventory table columns | [x] | +| 1.4 | Environmental | Humidity overlay toggle on heatmap | [x] | +| 1.5 | Environmental | Dew point derived client-side (Magnus formula from temp + humidity) | [x] | +| 1.6 | Environmental | ASHRAE A1 compliance table per rack | [x] | +| 1.8 | Capacity | Stranded power total kW shown prominently | [x] | +| 1.9 | Environmental | Dew point vs. supply air temp chart (client-side derived) | [x] | +| 1.10 | Floor Map | Alarm badge overlay option | [x] | + +### Phase 2 — Simulator Expansion (new bots + topology) +| # | Bot | Status | +|---|-----|--------| +| 2.1 | GeneratorBot — fuel_pct, load_kw, run_hours, state, scenarios: GENERATOR_FAILURE / LOW_FUEL | [x] | +| 2.2 | AtsBot — active_feed, transfer_count, last_transfer_ms, scenario: ATS_TRANSFER | [x] | +| 2.3 | ChillerBot — chw_supply/return_c, flow_gpm, cop, condenser_pressure_bar, scenario: CHILLER_FAULT | [x] | +| 2.4 | VesdaBot — level (normal/alert/action/fire), obscuration_pct, zone_id, scenarios: VESDA_ALERT / VESDA_FIRE | [x] | +| 2.5 | Extend PduBot — per-phase kW + amps (A/B/C), imbalance_pct, scenario: PHASE_IMBALANCE | [x] | +| 2.6 | Extend WaterLeakBot — floor_zone, under_floor, near_crac metadata | [x] | +| 2.7 | Topology update — generators, ats, chillers, vesda zones, extra leak sensors | [x] | + +### Phase 3 — Backend API Expansion +| # | Endpoint | Status | +|---|----------|--------| +| 3.1 | GET /api/generator/status | [x] | +| 3.2 | GET /api/power/ats | [x] | +| 3.3 | GET /api/power/phase | [x] | +| 3.4 | GET /api/power/redundancy | [x] | +| 3.5 | GET /api/cooling/status (chiller) | [x] | +| 3.6 | GET /api/cooling/history (COP + capacity over time) | [x] | +| 3.7 | GET /api/fire/status (VESDA zones) | [x] | +| 3.8 | GET /api/leak/status (with location metadata) | [x] | +| 3.9 | GET /api/power/utility (grid import, tariff, monthly kWh) | [x] | +| 3.10 | GET /api/reports/energy (kWh cost, PUE 30-day trend) | [x] | +| 3.11 | Extend cooling/{crac_id} detail — add airflow_cfm | [x] (was already done in env.py) | + +### Phase 4 — Existing Pages Wired Up (uses Phase 2+3 data) +| # | Page | Improvement | Status | +|---|------|-------------|--------| +| 4.1 | Dashboard | Generator status KPI card | [x] | +| 4.2 | Dashboard | Leak detection KPI card | [x] | +| 4.3 | Dashboard | UPS worst-case runtime card | deferred (UPS runtime already shown on Power page) | +| 4.4 | Power | Generator section | [x] | +| 4.5 | Power | ATS transfer switch panel | [x] | +| 4.6 | Power | PDU branch circuit section | [x] phase imbalance table | +| 4.7 | Power | Phase imbalance warning on UPS cards | [x] | +| 4.8 | Power | Power redundancy level indicator | [x] | +| 4.9 | Cooling | COP trend chart per CRAC | [x] (in CRAC detail sheet) | +| 4.10 | Cooling | Chiller plant summary panel | [x] | +| 4.11 | Cooling | Predictive filter replacement estimate | [x] | +| 4.12 | Cooling | Airflow CFM tile in fleet summary | [x] | +| 4.13 | Environmental | Leak sensor map panel | [x] | +| 4.14 | Environmental | VESDA/smoke status panel | [x] | +| 4.15 | Floor Map | Leak sensor overlay layer | [x] (panel below map) | +| 4.16 | Floor Map | Power feed (A/B) overlay layer | [x] | +| 4.17 | Floor Map | Humidity 3rd overlay | [x] (done in Phase 1) | +| 4.18 | Capacity | N+1 cooling margin indicator | [x] | +| 4.19 | Capacity | Capacity runway chart | [x] | +| 4.20 | Alarms | Generator alarm category | [x] (alarm engine raises gen alarms automatically) | +| 4.21 | Alarms | Leak alarm category with floor map link | [x] (alarm engine already handles leak) | +| 4.22 | Alarms | Fire/VESDA alarm category | [x] (alarm engine raises vesda_level alarms) | +| 4.23 | Assets | PDU as asset type | [x] (PDU phase monitoring section in assets grid) | +| 4.24 | Assets | Rack elevation diagram in RackDetailSheet | [x] (already implemented as RackDiagram) | +| 4.25 | Reports | PUE 30-day trend graph | [x] (daily IT kW trend + PUE estimated) | +| 4.26 | Reports | Energy cost section | [x] | + +### Phase 5 — New Pages +| # | Page | Status | +|---|------|--------| +| 5.1 | Generator & Power Path | [x] | +| 5.2 | Leak Detection | [x] | +| 5.3 | Fire & Life Safety | [x] | + +### Phase 6 — Low Priority & Polish +| # | Item | Status | +|---|------|--------| +| 6.1 | Alarms: assigned-to column + maintenance window suppression | [x] (assigned-to with localStorage) | +| 6.2 | Alarms: root cause correlation | [x] (5-rule RootCausePanel above stat cards) | +| 6.3 | Assets: warranty expiry + lifecycle status | [x] (lifecycle status column added) | +| 6.4 | Assets: CSV import/export for CMDB | [x] (CSV export added) | +| 6.5 | Reports: comparison period (this week vs last) | [x] | +| 6.6 | Reports: scheduled PDF email | [ ] | +| 6.7 | New page: Network Infrastructure | [x] | +| 6.8 | New page: Energy & Sustainability | [x] | +| 6.9 | New page: Maintenance windows | [x] | +| 6.10 | Environmental: particle count (ISO 14644) | [ ] | +| 6.11 | Dashboard: room quick-status grid (Hall A / Hall B avg temp, power, CRAC state) — visual rack-grid thumbnail deferred to backlog | [x] | +| 6.12 | Floor Map: zoom/pan + CRAC coverage shading | [ ] | + +### Phase 7 — Untracked Additions +| # | Item | Status | +|---|------|--------| +| 7.1 | Settings page — Profile, Notifications, Thresholds, Site Config tabs | [x] | +| 7.2 | Floor layout editor — server-side persistence via site_config table (PUT/GET /api/floor-layout) | [x] | +| 7.3 | Rack naming convention updated to SG1A01.xx / SG1B01.xx format across all topology files | [x] | +| 7.4 | 80-rack topology — Hall A and Hall B each have 2 rows × 20 racks | [x] | + +--- + +--- + +## Dashboard (`/dashboard`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Sensor | Add Generator status KPI card (fuel %, run-hours, transfer state) | High | +| 2 | Sensor | Add Water/Leak Detection KPI card — badge showing any active leaks | High | +| 3 | Sensor | Add Raised floor differential pressure widget | Medium | +| 4 | Sensor | Show UPS state in KPI row (mains vs. battery, worst-case runtime) | High | +| 5 | Visual | Dashboard KPI row: add 5th card or replace PUE with site health score | Medium | +| 6 | Visual | Add mini floor map thumbnail as 4th bottom-row panel | Medium | +| 7 | Info | Show carbon intensity / CO2e alongside PUE | Low | +| 8 | Info | Add MTBF / uptime streak counter for critical infrastructure | Low | + +--- + +## Cooling (`/cooling`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Sensor | Add Chiller plant metrics — CHW supply/return temps, flow rate, chiller COP, condenser pressure | High | +| 2 | Sensor | Add Cooling tower stats — approach temp, basin level, blow-down rate, fan speed | Medium | +| 3 | Sensor | Glycol/refrigerant level indicator per CRAC | High | +| 4 | Sensor | Airflow (CFM) per CRAC — not just fan % | Medium | +| 5 | Sensor | Condenser water inlet/outlet temperature for water-cooled units | Medium | +| 6 | Sensor | Raised floor tile differential pressure — 0.04–0.08 in. W.C. target range | High | +| 7 | Sensor | Hot/cold aisle containment breach indicator — door open, blanking panels | Medium | +| 8 | Sensor | Chilled water flow rate (GPM) and heat rejection kW | Medium | +| 9 | Visual | COP trend chart over time per unit (currently only static value) | High | +| 10 | Visual | Fleet summary: add total fleet airflow (CFM) tile | Medium | +| 11 | Visual | Add cooling efficiency vs. IT load scatter/trend chart | Medium | +| 12 | Info | Predictive filter replacement — estimated days until change-out based on dP rate of rise | Medium | + +--- + +## Power (`/power`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Sensor | Add Generator status section — active/standby, fuel %, last test date, load kW | High | +| 2 | Sensor | Add ATS/STS transfer switch status — which feed active (Utility A/B), transfer time | High | +| 3 | Sensor | Add PDU branch circuit monitoring — per-phase kW, amps, trip status | High | +| 4 | Sensor | Power quality metrics — THD, voltage sag/swell events, neutral current | Medium | +| 5 | Sensor | Busway / overhead busbar load per tap-off box | Medium | +| 6 | Sensor | Utility metering — grid import kW, tariff period, cost/kWh, monthly kWh | Medium | +| 7 | Sensor | Phase imbalance per panel/UPS — flag >5% imbalance | High | +| 8 | Visual | UPS cards: add input voltage/frequency per phase, bypass mode status | Medium | +| 9 | Info | Add power redundancy level indicator — N, N+1, 2N — highlight single points of failure | High | +| 10 | Info | Annualised energy cost projection alongside kWh | Low | + +--- + +## Environmental (`/environmental`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Sensor | Add Dew point derived value per room — approaching supply temp = condensation risk | High | +| 2 | Sensor | Add Water/leak detection sensors map — floor, under-floor, drip trays, pipe runs | High | +| 3 | Sensor | Smoke detector / VESDA status panel — aspirating detector alarm levels | High | +| 4 | Sensor | Raised floor pressure differential trend chart | Medium | +| 5 | Sensor | Hot aisle inlet temperature per rack row (return air) | Medium | +| 6 | Sensor | Server inlet temperature sensors from IPMI per device | Medium | +| 7 | Sensor | Particle count (ISO 14644 class) | Low | +| 8 | Visual | Heatmap: add humidity overlay toggle (currently separate chart only) | High | +| 9 | Visual | Add ASHRAE compliance table per rack — flag racks outside A1/A2 envelope | Medium | +| 10 | Visual | Add dew point vs. supply air temp chart with condensation risk zone | Medium | +| 11 | Info | Show absolute humidity (g/kg) alongside RH for ASHRAE compliance | Low | + +--- + +## Floor Map (`/floor-map`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Sensor | Add leak sensor overlay — highlight tiles where water sensors are placed | High | +| 2 | Sensor | Add smoke/VESDA zone overlay | Medium | +| 3 | Sensor | Add PDU/power path overlay — show which feed (A/B) each rack is on | High | +| 4 | Visual | Add 3rd overlay: humidity | Medium | +| 5 | Visual | Add airflow arrows showing cold aisle → rack → hot aisle direction | Low | +| 6 | Visual | Show blank rack slots count on each rack tile (U available) | Medium | +| 7 | Visual | Add rack-level alarm badge as an overlay option | High | +| 8 | Visual | Add zoom/pan for larger floor plans | Medium | +| 9 | Info | Add CRAC coverage radius shading showing which racks each CRAC thermally serves | Medium | + +--- + +## Capacity (`/capacity`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Visual | Add capacity runway chart — at current growth rate, weeks until power/cooling capacity hit | High | +| 2 | Sensor | Add U-space utilisation per rack — units occupied vs. total 42U | Medium | +| 3 | Sensor | Generator fuel capacity as a capacity dimension | Medium | +| 4 | Info | Thermal capacity per CRAC vs. current IT load — N+1 cooling margin | High | +| 5 | Info | Add growth projection input — operator enters expected kW/month to forecast capacity date | Medium | +| 6 | Visual | Cross-room comparison radar chart (Power %, Cooling %, Space %) | Medium | +| 7 | Visual | Show stranded power total in kW (not just per-rack list) | Medium | +| 8 | Sensor | Weight capacity per rack — floor load (kg/m2) | Low | + +--- + +## Alarms (`/alarms`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Sensor | Add Generator alarm category (fuel low, start fail, overload) | High | +| 2 | Sensor | Add Leak alarm category with direct link to leak sensor on floor map | High | +| 3 | Sensor | Add Fire/VESDA alarm category with severity escalation | High | +| 4 | Sensor | Add Network device alarm category (switch down, link fault, LACP failure) | Medium | +| 5 | Visual | Add escalation timer — how long critical alarm unacknowledged, colour ramp | High | +| 6 | Visual | Add MTTR stat card alongside existing stat cards | Medium | +| 7 | Visual | Alarm table: add "Assigned to" column | Low | +| 8 | Visual | Add alarm suppression / maintenance window toggle | Medium | +| 9 | Info | Root cause correlation — surface linked alarms (e.g. rack temp high + CRAC fan low) | Medium | + +--- + +## Assets (`/assets`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Sensor | Per-device power draw from PDU outlet monitoring (not estimated) | High | +| 2 | Sensor | Server inlet temperature from IPMI/iDRAC per device | High | +| 3 | Sensor | Add PDUs as asset type with per-outlet monitoring | High | +| 4 | Sensor | Network device status (switch uptime, port count, active links) | Medium | +| 5 | Visual | Inventory table: add sortable columns (currently unsortable) | High | +| 6 | Visual | Add rack elevation diagram (visual U-space view) in RackDetailSheet | High | +| 7 | Visual | Add device age / warranty expiry column in inventory | Medium | +| 8 | Info | Add DCIM-style lifecycle status — Active / Decomm / Planned | Low | +| 9 | Info | Add asset import/export (CSV) for CMDB sync | Medium | + +--- + +## Reports (`/reports`) + +| # | Type | Improvement | Priority | +|---|------|-------------|----------| +| 1 | Sensor | Add energy cost report — kWh, estimated cost at tariff, month-to-date | High | +| 2 | Visual | Add PUE trend graph — 30-day rolling PUE vs. target | High | +| 3 | Visual | Add cooling efficiency (kW IT / kW cooling) over time | Medium | +| 4 | Visual | Add alarm MTTR and alarm volume trend per week | Medium | +| 5 | Info | Add scheduled report configuration — email PDF daily/weekly | Medium | +| 6 | Info | Add comparison period — this week vs. last week | Medium | +| 7 | Info | Add sustainability section — CO2e, renewable fraction, WUE | Low | +| 8 | Info | Add SLA compliance section — uptime %, incidents, breach risk | Medium | +| 9 | Info | Expand CSV exports: PDU branch data, CRAC detailed logs, humidity history | Medium | + +--- + +## New Pages to Build + +| Page | Description | Priority | +|------|-------------|----------| +| Generator & Power Path | ATS status, generator load, fuel level, transfer switch history | High | +| Leak Detection | Site-wide leak sensor map, sensor status, historical events | High | +| Fire & Life Safety | VESDA levels, smoke detector zones, suppression system status | High | +| Network Infrastructure | Core/edge switch health, port utilisation, link status | Medium | +| Energy & Sustainability | kWh cost, PUE trend, CO2e, WUE | Medium | +| Maintenance | Planned outages, maintenance windows, alarm suppression | Low | diff --git a/README.md b/README.md new file mode 100644 index 0000000..db78414 --- /dev/null +++ b/README.md @@ -0,0 +1,112 @@ +# DemoBMS + +Intelligent Data Center Infrastructure Management platform. + +## Stack + +- **Frontend:** Next.js 16 + TypeScript + shadcn/ui + Recharts +- **Backend:** Python FastAPI +- **Database:** PostgreSQL + TimescaleDB +- **Auth:** Clerk +- **Runtime:** Docker Compose + +--- + +## Ports + +| Service | Port | +|----------|------| +| Frontend | 5646 | +| Backend | 8000 (internal — not exposed publicly) | +| Database | 5432 (internal) | + +The frontend is the only service that needs to be reachable. Point your reverse proxy at port **5646**. + +--- + +## API Calls & Reverse Proxy + +The frontend never hardcodes a backend hostname. All API calls use the relative path `/api/backend/*`, which Next.js rewrites to the backend on the internal Docker network (`BACKEND_INTERNAL_URL`). From the browser's perspective everything is same-origin — your reverse proxy only needs to forward to port 5646. + +``` +Browser → Reverse Proxy → :5646 (Next.js) + ↓ server-side rewrite + :8000 (FastAPI) — internal only +``` + +--- + +## Quick Start + +### 1. Configure Clerk (required for auth) + +Create a free account at https://clerk.com, create an application, then fill in the keys: + +**`frontend/.env.local`** +``` +NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=pk_test_... +CLERK_SECRET_KEY=sk_test_... +``` + +**`backend/.env`** +``` +CLERK_SECRET_KEY=sk_test_... +CLERK_JWKS_URL=https://your-app.clerk.accounts.dev/.well-known/jwks.json +``` + +### 2. Run with Docker Compose + +```bash +docker compose up --build +``` + +- Frontend: http://your-server:5646 +- API Docs: http://your-server:5646/api/backend/docs + +### 3. Run locally (development) + +**Frontend** +```bash +cd frontend +pnpm install +pnpm dev --port 5646 +``` + +**Backend** +```bash +cd backend +python -m venv .venv +source .venv/bin/activate # Windows: .venv\Scripts\activate +pip install -r requirements.txt +uvicorn main:app --reload +``` + +**Database only (via Docker)** +```bash +docker compose up db +``` + +--- + +## Project Structure + +``` +/bms + /frontend Next.js app + /backend FastAPI app + /simulators Sensor bots (Phase 2) + docker-compose.yml +``` + +## Phase Progress + +- [x] Phase 1 — Foundation (current) +- [ ] Phase 2 — Data Pipeline & Simulator Bots +- [ ] Phase 3 — Core Dashboard (live data) +- [ ] Phase 4 — Environmental Monitoring +- [ ] Phase 5 — Power Management +- [ ] Phase 6 — Cooling & AI Panel +- [ ] Phase 7 — Asset Management +- [ ] Phase 8 — Alarms & Events +- [ ] Phase 9 — Reports +- [ ] Phase 10 — Polish & Hardening diff --git a/UI_UX_PLAN.md b/UI_UX_PLAN.md new file mode 100644 index 0000000..9036e18 --- /dev/null +++ b/UI_UX_PLAN.md @@ -0,0 +1,367 @@ +# DemoBMS — UI/UX Improvement Plan + +> Generated from full page-by-page review (16 pages + shared layout). +> Work through phases in order — each phase builds on the previous. + +--- + +## Phase 1 — Foundation Fixes +**Goal:** Fix broken/incomplete global chrome and shared infrastructure before touching individual pages. +**Effort:** ~1–2 days | **Risk:** Low + +### 1.1 Topbar title map — complete it +- Add missing entries for: generator, leak, fire, network, energy, maintenance +- Replace pathname-based map with a route config object shared with sidebar + +### 1.2 Topbar — replace placeholder avatar with Clerk UserButton +- Swap the blue circle for `` from `@clerk/nextjs` +- Removes the hardcoded placeholder and provides sign-out, profile link for free + +### 1.3 Topbar — remove or disable the site selector +- The dropdown is hardcoded with 3 fake sites and does nothing +- Either wire it to real site context, or remove it entirely until it's real +- A non-functional control erodes trust + +### 1.4 Topbar — consolidate the alarm badge +- Remove the alarm count badge from the sidebar nav item +- Keep it only on the topbar bell icon (single canonical location) + +### 1.5 Sidebar — add section groupings with dividers +- Split 15 nav items into labelled groups: + - **OVERVIEW** — Dashboard, Floor Map + - **INFRASTRUCTURE** — Power, Generator, Cooling, Environmental + - **SAFETY** — Leak Detection, Fire & Safety, Network + - **OPERATIONS** — Assets, Alarms, Capacity + - **MANAGEMENT** — Reports, Energy & CO₂, Maintenance, Settings +- Render section headers as small ALL-CAPS muted labels with a horizontal rule above +- Collapsed sidebar: show only icons, hide section headers + +### 1.6 Sidebar — move collapse toggle to bottom +- Currently the toggle is an absolutely-positioned button that's easy to miss +- Move it to the bottom of the sidebar as a regular icon button above Settings +- Add a tooltip: "Collapse menu" / "Expand menu" + +### 1.7 Centralise threshold constants +- Create `/lib/thresholds.ts` exporting a single const object: + ```ts + export const THRESHOLDS = { + temp: { warn: 26, critical: 28 }, + humidity: { warn: 65, critical: 80 }, + power: { warn: 0.75, critical: 0.85 }, // fraction of capacity + filter: { warn: 80, critical: 120 }, // Pa + compressor: { warn: 0.80, critical: 0.95 }, + battery: { warn: 30, critical: 20 }, + fuel: { warn: 30, critical: 15 }, + } + ``` +- Replace all hardcoded threshold values across every page with imports from this file + +### 1.8 Global spacing standardisation +- Audit and enforce: all page wrappers use `space-y-6`, all cards use `p-6`, all card headers use `pb-3` +- Create a `` wrapper component used by every page to ensure consistent top padding and title rendering + +### 1.9 Empty state and error boundary +- Create a reusable `` component shown when a fetch fails +- Create a reusable `` for no-data states +- Wrap every data-dependent card in a try/catch that renders `` instead of crashing +- Add a top-level React error boundary in the dashboard layout + +--- + +## Phase 2 — Alarms Page +**Goal:** Make the alarm page genuinely usable under operational pressure. +**Effort:** ~1 day | **Risk:** Low + +### 2.1 Sticky filter bar +- Make the filter row (state tabs + severity dropdown) `sticky top-0 z-10` so it stays visible while scrolling through long alarm lists + +### 2.2 Bulk action at top +- Move the "Bulk Resolve" button into the filter/action row at the top, not below the table +- Add a "Select All" checkbox in the table header + +### 2.3 Swap Critical stat card for Avg Age +- Replace the "Critical" count card with "Avg Age" — the mean time alarms have been open +- Display as "Xh Ym" format +- Colour red if avg age > 1h, amber if > 15 min, green if < 15 min + +### 2.4 Column priority on small screens +- On mobile/tablet, keep: Severity | Message | Escalation timer | Actions +- Drop: Sensor ID, State (already conveyed by row colour) +- Escalation timer must always be visible — it is the most operationally critical column + +### 2.5 Pagination +- Add simple page controls: Previous / Page X of Y / Next +- Default page size: 25 rows +- Show total count above the table: "Showing 1–25 of 142 alarms" + +### 2.6 Embed sparkline in stat cards +- Remove the standalone 24-hour sparkline chart +- Embed a micro line chart (24 data points, 40px tall) inside each stat card below the number +- Net result: less vertical space used, same information + +--- + +## Phase 3 — Dashboard Page +**Goal:** Reduce clutter, improve at-a-glance legibility. +**Effort:** ~1 day | **Risk:** Low + +### 3.1 Uniform KPI card grid +- Standardise all 6 KPI cards to the same height and same layout template +- Use a 3×2 grid (3 cols on lg, 2 on md, 1 on sm) consistently + +### 3.2 Replace mini floor map +- The mini floor map widget is too small to be useful +- Replace with a "Room Quick Status" card: + - Two rows: Hall A / Hall B + - Each row: health badge (OK / Warning / Critical), avg temp, total power, CRAC state + - Link to /floor-map for full view + +### 3.3 Data freshness pill in topbar +- Move the "last updated" indicator from the dashboard page into the topbar (right side, before the bell) +- Make it global — show the last successful API poll timestamp for any page +- Colour: green if < 60s, amber if 60–120s, red if > 120s (stale) + +### 3.4 Alarm feed / Room status layout +- Change the bottom row from 2-col + 1-col to 50/50 +- Alarm Feed: left panel +- Room Quick Status: right panel (replaces mini floor map) + +--- + +## Phase 4 — Power Page +**Goal:** Tame the long scroll, improve information hierarchy. +**Effort:** ~1.5 days | **Risk:** Low–Medium + +### 4.1 Page-internal anchor navigation +- Add a sticky sub-nav bar below the topbar with anchor links: + `Site Overview | UPS | Generator | Transfer Switch | Phase Analysis` +- Each section gets an `id=""` and smooth-scroll on click + +### 4.2 Power path diagram +- Add a simple horizontal flow diagram at the top of the page: + `Grid → [ATS: feed A/B] → [UPS: battery/online] → Rack Distribution` +- Use coloured nodes (green = live, amber = degraded, red = fault) +- No library needed — a flex row of icon + connector line components + +### 4.3 Always-visible Phase Summary card +- Show a collapsed "Phase Balance" summary card at all times (Phase A / B / C current kW in a 3-col grid) +- Expand to full Phase Imbalance Table on click or if violations exist + +### 4.4 Per-rack bar chart — full width +- The per-rack chart needs more horizontal room to show 10 bars legibly +- Move it to full width above the history chart (stack them, not side by side) + +--- + +## Phase 5 — Cooling Page +**Goal:** Reduce card density, surface critical maintenance items earlier. +**Effort:** ~1 day | **Risk:** Low + +### 5.1 Standardise fleet summary bar +- Replace the horizontal KPI tile flex row with proper KPI cards matching dashboard style + +### 5.2 Promote filter replacement alert +- If any CRAC unit is within 14 days of filter replacement threshold, show a dismissible alert banner at the top of the page +- Move the Predictive Filter Replacement card to the top of the page (above CRAC cards) + +### 5.3 CRAC card — progressive disclosure +- The current CRAC card has 6 stacked sections +- Keep: thermal hero (supply/return/ΔT), capacity bar, fan speed +- Collapse into a "Details" accordion: compressor pressures + electrical readings +- The thermal hero section should be 30% larger — it is the most important readout + +### 5.4 Thermal hero — increase visual weight +- Increase supply/return temp font to text-3xl +- ΔT value in a coloured pill (green/amber/red based on threshold) +- Add a small up/down arrow showing trend (last 5 min) + +--- + +## Phase 6 — Environmental Page +**Goal:** Unify interactive state across sections, reduce redundancy. +**Effort:** ~1 day | **Risk:** Low + +### 6.1 Shared room tab state +- All sections on the page (heatmap, dew point, trend chart) should react to a single room tab selector at the top of the page, not per-section tabs +- Add one prominent "Hall A / Hall B" tab switcher at the page level + +### 6.2 Dual-axis chart — clarify axes +- Add unit labels on the Y-axis: left axis "Temperature (°C)", right axis "Humidity (%)" +- Change the humidity line to a dashed style (already done) but also add a subtle fill under it to visually distinguish it from the temperature line +- Add a brief legend note: "Shaded areas = ASHRAE A1 safe zone" + +### 6.3 VESDA and Leak panels — link to dedicated pages +- Label both panels clearly as "Summary — see Leak Detection / Fire & Safety for detail" +- Add a "View full page →" link in each panel header +- This avoids duplicating full detail here + +--- + +## Phase 7 — Floor Map Page +**Goal:** Improve overlay controls and map legibility. +**Effort:** ~1 day | **Risk:** Low–Medium + +### 7.1 Overlay controls — proper segmented control +- Replace the 4 overlay buttons with a shadcn `` style segmented control +- Active tab: filled background, not just a subtle border change + +### 7.2 Hot/cold aisle labels — improve visibility +- Increase aisle divider label font size and weight +- Add icon: 🔴 Hot Aisle / 🔵 Cold Aisle (or Lucide Flame / Snowflake) +- Increase divider bar height slightly + +### 7.3 Rack tile — secondary metric on hover only +- In Temperature overlay: show °C as main value; power bar appears on hover tooltip only +- In Power overlay: show % as main value; temp appears in tooltip +- Reduces visual clutter in the tile grid + +### 7.4 CRAC strip — move above rack grid +- The CRAC strip is currently below the rack grid and easy to miss +- Move it above the grid with a stronger visual separator (border + label "Cooling Unit") + +### 7.5 Leak sensor panel — add zone labels +- Add a brief location description to each sensor: "Hall A — under floor, near CRAC" etc. +- Use consistent zone label chips rather than free text + +--- + +## Phase 8 — Assets Page +**Goal:** Make each tab genuinely useful and distinct from other pages. +**Effort:** ~1 day | **Risk:** Low + +### 8.1 Rack Grid tab → Rack Summary Table +- Replace the visual rack grid (duplicates Floor Map) with a sortable table: + - Columns: Rack ID | Room | Temp (°C) | Power (kW) | Power % | Alarms | Status + - Sortable by any column + - Row click opens RackDetailSheet + +### 8.2 Device List — sort headers +- Add click-to-sort on every column header with a sort direction indicator (chevron icon) +- Default sort: Type then Name + +### 8.3 Device type legend +- Add a compact colour legend row above the device table explaining the dot colours +- One row of: ● Server ● Switch ● PDU ● Storage ● Firewall ● KVM + +### 8.4 CRAC / UPS cards → inventory rows +- Replace the large card components with compact inventory table rows: + - ID | Type | Status | Room | Rack | Last Maintenance +- Link to full detail on click (CracDetailSheet / UpsCard modal) + +--- + +## Phase 9 — Remaining Pages +**Goal:** Individual page improvements that don't interact with other phases. +**Effort:** ~1.5 days | **Risk:** Low + +### 9.1 Generator page — add fuel runtime estimate +- Add "Est. runtime at current load: X hours" as the hero stat in each generator card +- Show a small fuel consumption trend chart (last 24h) if data is available +- Clearly differentiate this page from the generator section in Power page by adding: last start log, maintenance schedule table + +### 9.2 Capacity page — radial gauge values +- Ensure a large centered text value (e.g. "74%") is always visible inside the gauge arc +- Add "Headroom: X kW" as a sub-label below the gauge + +### 9.3 Capacity page — runway in months +- Display runway as "~2.3 months" alongside weeks +- Add a 90-day forecast line on the per-rack chart (dotted line extrapolating current growth) + +### 9.4 Fire & Safety page — fire state improvements +- Fire-level cards: increase border to 4px, add a very faint red background overlay (`bg-red-950/30`) +- Replace small status dots with a proper status row: icon + label + state text (e.g. ✓ Detector 1 — Online) +- Show raw obscuration value (%/m) on the bar, not just the bar fill + +### 9.5 Leak Detection page — add history +- Add "Last triggered: X days ago" to each sensor card +- Add a 30-day trigger count badge: "0 events" / "3 events" +- This keeps the page useful when all sensors are clear + +### 9.6 Network page — improve port display +- Change port headline from "72%" to "36 / 48 ports active" — then show % as sub-label +- Group card metrics: top 3 bold (state, ports, bandwidth) + secondary row (CPU, memory, temp) + +### 9.7 Reports page — promote export to top +- Move the 3 export buttons to a prominent action bar at the top of the page +- Add a page-level date range picker that controls all KPIs on the page simultaneously +- Always show numeric % labels on CRAC/UPS uptime bars + +### 9.8 Maintenance page — modal for create form +- Move "Create window" form into a shadcn `` modal triggered by the header button +- In the target selector, group options: Site / Hall A racks / Hall B racks / Cooling / Power +- Add a 7-day horizontal timeline strip below the active windows list showing when each window falls + +### 9.9 Settings page — skeleton structure +- Replace "Coming soon" with a tabbed layout: Profile | Notifications | Thresholds | Site Config +- Thresholds tab: shows the values from `lib/thresholds.ts` as editable fields (even if not persisted to backend yet) +- This makes the page look intentional rather than unfinished + +--- + +## Phase 10 — Polish & Accessibility +**Goal:** Final consistency pass, mobile, keyboard navigation. +**Effort:** ~1–2 days | **Risk:** Low + +### 10.1 Mobile audit +- Test every page at 375px and 768px width +- Fix broken chart widths (use `ResponsiveContainer` everywhere, check it's set) +- Ensure touch targets are ≥44px tall +- Test sidebar sheet on mobile + +### 10.2 Focus rings and keyboard nav +- Add `focus-visible:ring-2 focus-visible:ring-primary` to all interactive elements that are missing it +- Verify logical tab order on every page (left-to-right, top-to-bottom) +- Add `aria-label` to icon-only buttons (alarm bell, collapse toggle, overlay buttons) + +### 10.3 Chart skeleton height matching +- Measure actual rendered chart heights for each chart type +- Set skeleton `h-[]` to match exactly, preventing layout shift on load + +### 10.4 Dark/light mode toggle +- Add a theme toggle button in the topbar (Moon / Sun icon) +- The ThemeProvider is already wired — just needs a toggle button and `localStorage` persistence + +### 10.5 Loading state audit +- Every card that fetches data must show a `` during initial load +- No card should show an empty white box or flash unstyled content + +### 10.6 Toast notification consistency +- Audit all `toast.error()` / `toast.success()` calls across pages +- Ensure every user action (acknowledge alarm, bulk resolve, create maintenance window, export) has a corresponding success/error toast + +--- + +## Summary Table + +| Phase | Focus | Pages Touched | Est. Effort | +|-------|-------|---------------|-------------| +| 1 | Foundation fixes (nav, thresholds, errors) | All (shared) | 1–2 days | +| 2 | Alarms page | Alarms | 1 day | +| 3 | Dashboard page | Dashboard | 1 day | +| 4 | Power page | Power | 1.5 days | +| 5 | Cooling page | Cooling | 1 day | +| 6 | Environmental page | Environmental | 1 day | +| 7 | Floor Map page | Floor Map | 1 day | +| 8 | Assets page | Assets | 1 day | +| 9 | Remaining pages | Generator, Capacity, Fire, Leak, Network, Reports, Maintenance, Settings | 1.5 days | +| 10 | Polish & accessibility | All | 1–2 days | +| **Total** | | | **~11–13 days** | + +--- + +## Dependency Order + +``` +Phase 1 (foundation) + └─► Phase 2 (alarms) + └─► Phase 3 (dashboard) + └─► Phase 4 (power) + └─► Phase 5 (cooling) + └─► Phase 6 (environmental) + └─► Phase 7 (floor map) + └─► Phase 8 (assets) + └─► Phase 9 (remaining pages) + └─► Phase 10 (polish — last, after all pages stable) +``` + +Phases 2–9 are independent of each other and can be parallelised once Phase 1 is complete. diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..9ec1b83 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,17 @@ +FROM python:3.12-slim + +WORKDIR /app + +# curl needed for Docker healthcheck +RUN apt-get update && apt-get install -y --no-install-recommends curl && rm -rf /var/lib/apt/lists/* + +# Install Python dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy app code +COPY . . + +EXPOSE 8000 + +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/backend/__init__.py b/backend/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/api/__init__.py b/backend/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/api/routes/__init__.py b/backend/api/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/api/routes/alarms.py b/backend/api/routes/alarms.py new file mode 100644 index 0000000..db71647 --- /dev/null +++ b/backend/api/routes/alarms.py @@ -0,0 +1,82 @@ +from fastapi import APIRouter, Depends, Query, HTTPException +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + + +@router.get("") +async def list_alarms( + site_id: str = Query(...), + state: str = Query("active", description="active | resolved | acknowledged | all"), + limit: int = Query(50, ge=1, le=200), + session: AsyncSession = Depends(get_session), +): + where = "WHERE site_id = :site_id" + if state != "all": + where += " AND state = :state" + + result = await session.execute(text(f""" + SELECT id, sensor_id, site_id, room_id, rack_id, + severity, message, state, triggered_at, + acknowledged_at, resolved_at + FROM alarms + {where} + ORDER BY triggered_at DESC + LIMIT :limit + """), {"site_id": site_id, "state": state, "limit": limit}) + return [dict(r) for r in result.mappings().all()] + + +@router.post("/{alarm_id}/acknowledge") +async def acknowledge_alarm( + alarm_id: int, + session: AsyncSession = Depends(get_session), +): + result = await session.execute(text(""" + UPDATE alarms + SET state = 'acknowledged', acknowledged_at = NOW() + WHERE id = :id AND state = 'active' + RETURNING id + """), {"id": alarm_id}) + await session.commit() + if not result.fetchone(): + raise HTTPException(status_code=404, detail="Alarm not found or not active") + return {"id": alarm_id, "state": "acknowledged"} + + +@router.post("/{alarm_id}/resolve") +async def resolve_alarm( + alarm_id: int, + session: AsyncSession = Depends(get_session), +): + result = await session.execute(text(""" + UPDATE alarms + SET state = 'resolved', resolved_at = NOW() + WHERE id = :id AND state IN ('active', 'acknowledged') + RETURNING id + """), {"id": alarm_id}) + await session.commit() + if not result.fetchone(): + raise HTTPException(status_code=404, detail="Alarm not found or already resolved") + return {"id": alarm_id, "state": "resolved"} + + +@router.get("/stats") +async def alarm_stats( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + result = await session.execute(text(""" + SELECT + COUNT(*) FILTER (WHERE state = 'active') AS active, + COUNT(*) FILTER (WHERE state = 'acknowledged') AS acknowledged, + COUNT(*) FILTER (WHERE state = 'resolved') AS resolved, + COUNT(*) FILTER (WHERE state = 'active' AND severity = 'critical') AS critical, + COUNT(*) FILTER (WHERE state = 'active' AND severity = 'warning') AS warning + FROM alarms + WHERE site_id = :site_id + """), {"site_id": site_id}) + row = result.mappings().one() + return {k: int(v) for k, v in row.items()} diff --git a/backend/api/routes/assets.py b/backend/api/routes/assets.py new file mode 100644 index 0000000..d81a0d3 --- /dev/null +++ b/backend/api/routes/assets.py @@ -0,0 +1,344 @@ +import hashlib +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +# Mirrors the simulator topology — single source of truth for site layout +TOPOLOGY = { + "sg-01": { + "rooms": [ + {"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"}, + {"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"}, + ], + "ups_units": ["ups-01", "ups-02"], + "leak_sensors": ["leak-01"], + } +} + +# ── Device catalog ──────────────────────────────────────────────────────────── +# Each tuple: (name, u_height, power_draw_w) + +_SERVERS = [ + ("Dell PowerEdge R750", 2, 420), + ("HPE ProLiant DL380 Gen10 Plus", 2, 380), + ("Supermicro SuperServer 2029P", 2, 350), + ("Dell PowerEdge R650xs", 1, 280), + ("HPE ProLiant DL360 Gen10 Plus", 1, 260), +] +_SWITCHES = [ + ("Cisco Catalyst C9300-48P", 1, 60), + ("Arista 7050CX3-32S", 1, 180), + ("Juniper EX4300-48T", 1, 75), +] +_PATCHES = [ + ("Leviton 24-Port Cat6A Patch Panel", 1, 5), + ("Panduit 48-Port Cat6A Patch Panel", 1, 5), +] +_PDUS = [ + ("APC AP8888 Metered Rack PDU", 1, 10), + ("Raritan PX3-5190R Metered PDU", 1, 10), +] +_STORAGE = [ + ("Dell EMC PowerVault ME5024", 2, 280), + ("NetApp AFF C190", 2, 200), +] +_FIREWALL = [ + ("Palo Alto PA-5220", 2, 150), + ("Fortinet FortiGate 3000F",2, 180), +] +_KVM = [("Raritan KX III-464", 1, 15)] + + +def _serial(rack_id: str, u: int) -> str: + return hashlib.md5(f"{rack_id}-u{u}".encode()).hexdigest()[:10].upper() + + +def _rack_seq(rack_id: str) -> int: + """SG1A01.05 → 5, SG1A02.05 → 25, SG1B01.05 → 5""" + # Format: SG1A01.05 — row at [4:6], rack num after dot + row = int(rack_id[4:6]) # "01" or "02" + num = int(rack_id[7:]) # "01" to "20" + return (row - 1) * 20 + num + + +def _generate_devices(site_id: str, room_id: str, rack_id: str) -> list[dict]: + s = _rack_seq(rack_id) + room_oct = "1" if room_id == "hall-a" else "2" + devices: list[dict] = [] + u = 1 + + def add(name: str, dtype: str, u_start: int, u_height: int, power_w: int, ip: str = "-"): + devices.append({ + "device_id": f"{rack_id}-u{u_start:02d}", + "name": name, + "type": dtype, + "rack_id": rack_id, + "room_id": room_id, + "site_id": site_id, + "u_start": u_start, + "u_height": u_height, + "ip": ip, + "serial": _serial(rack_id, u_start), + "model": name, + "status": "online", + "power_draw_w": power_w, + }) + + # U1: Patch panel + p = _PATCHES[s % len(_PATCHES)] + add(p[0], "patch_panel", u, p[1], p[2]); u += p[1] + + # U2: Switch + sw = _SWITCHES[s % len(_SWITCHES)] + add(sw[0], "switch", u, sw[1], sw[2], f"10.10.{room_oct}.{s}"); u += sw[1] + + # KVM in rack 5 / 15 + if s in (5, 15): + kvm = _KVM[0] + add(kvm[0], "kvm", u, kvm[1], kvm[2], f"10.10.{room_oct}.{s + 100}"); u += kvm[1] + + # Firewall in first rack of each room + if rack_id in ("SG1A01.01", "SG1B01.01"): + fw = _FIREWALL[s % len(_FIREWALL)] + add(fw[0], "firewall", u, fw[1], fw[2], f"10.10.{room_oct}.254"); u += fw[1] + + # Storage in rack 3 and 13 + if s in (3, 13): + stor = _STORAGE[s % len(_STORAGE)] + add(stor[0], "storage", u, stor[1], stor[2], f"10.10.{room_oct}.{s + 50}"); u += stor[1] + + # Servers filling U slots up to U41 + srv_pool = (_SERVERS * 3) + ip_counter = (s - 1) * 15 + 10 + for idx, (name, u_h, pwr) in enumerate(srv_pool): + if u + u_h > 41: + break + # Occasional empty gap for realism + if idx > 0 and (s + idx) % 8 == 0 and u + u_h + 1 <= 41: + u += 1 + if u + u_h > 41: + break + add(name, "server", u, u_h, pwr, f"10.10.{room_oct}.{ip_counter}"); u += u_h + ip_counter += 1 + + # U42: PDU + pdu = _PDUS[s % len(_PDUS)] + add(pdu[0], "pdu", 42, pdu[1], pdu[2]) + + return devices + + +# ── Endpoints ───────────────────────────────────────────────────────────────── + +@router.get("") +async def get_assets( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + site = TOPOLOGY.get(site_id) + if not site: + return {"site_id": site_id, "rooms": [], "ups_units": []} + + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, room_id, rack_id, value + FROM readings + WHERE site_id = :site_id + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + readings = result.mappings().all() + + alarm_result = await session.execute(text(""" + SELECT rack_id, COUNT(*) AS cnt, MAX(severity) AS worst + FROM alarms + WHERE site_id = :site_id AND state = 'active' AND rack_id IS NOT NULL + GROUP BY rack_id + """), {"site_id": site_id}) + alarm_map: dict[str, tuple[int, str]] = { + r["rack_id"]: (int(r["cnt"]), r["worst"]) + for r in alarm_result.mappings().all() + } + + by_sensor: dict[str, float] = {r["sensor_id"]: float(r["value"]) for r in readings} + + def rack_reading(site: str, room: str, rack: str, suffix: str) -> float | None: + return by_sensor.get(f"{site}/{room}/{rack}/{suffix}") + + def cooling_reading(site: str, crac: str, suffix: str) -> float | None: + return by_sensor.get(f"{site}/cooling/{crac}/{suffix}") + + def ups_reading(site: str, ups: str, suffix: str) -> float | None: + return by_sensor.get(f"{site}/power/{ups}/{suffix}") + + rooms = [] + for room in site["rooms"]: + room_id = room["room_id"] + crac_id = room["crac_id"] + + supply = cooling_reading(site_id, crac_id, "supply_temp") + return_t = cooling_reading(site_id, crac_id, "return_temp") + fan = cooling_reading(site_id, crac_id, "fan_pct") + crac_has_data = any(sid.startswith(f"{site_id}/cooling/{crac_id}") for sid in by_sensor) + if supply is not None: + crac_state = "online" + elif crac_has_data: + crac_state = "fault" + else: + crac_state = "unknown" + + racks = [] + for rack_id in room["racks"]: + temp = rack_reading(site_id, room_id, rack_id, "temperature") + power = rack_reading(site_id, room_id, rack_id, "power_kw") + alarm_cnt, worst_sev = alarm_map.get(rack_id, (0, None)) + + status = "ok" + if worst_sev == "critical" or (temp is not None and temp >= 30): + status = "critical" + elif worst_sev == "warning" or (temp is not None and temp >= 26): + status = "warning" + elif temp is None and power is None: + status = "unknown" + + racks.append({ + "rack_id": rack_id, + "temp": round(temp, 1) if temp is not None else None, + "power_kw": round(power, 2) if power is not None else None, + "status": status, + "alarm_count": alarm_cnt, + }) + + rooms.append({ + "room_id": room_id, + "crac": { + "crac_id": crac_id, + "state": crac_state, + "supply_temp": round(supply, 1) if supply is not None else None, + "return_temp": round(return_t, 1) if return_t is not None else None, + "fan_pct": round(fan, 1) if fan is not None else None, + }, + "racks": racks, + }) + + ups_units = [] + for ups_id in site["ups_units"]: + charge = ups_reading(site_id, ups_id, "charge_pct") + load = ups_reading(site_id, ups_id, "load_pct") + runtime = ups_reading(site_id, ups_id, "runtime_min") + state_raw = ups_reading(site_id, ups_id, "state") + if state_raw is not None: + state = "battery" if state_raw == 1.0 else "online" + elif charge is not None: + state = "battery" if charge < 20.0 else "online" + else: + state = "unknown" + ups_units.append({ + "ups_id": ups_id, + "state": state, + "charge_pct": round(charge, 1) if charge is not None else None, + "load_pct": round(load, 1) if load is not None else None, + "runtime_min": round(runtime, 0) if runtime is not None else None, + }) + + return {"site_id": site_id, "rooms": rooms, "ups_units": ups_units} + + +@router.get("/devices") +async def get_all_devices(site_id: str = Query(...)): + """All devices across all racks for the site.""" + site = TOPOLOGY.get(site_id) + if not site: + return [] + devices = [] + for room in site["rooms"]: + for rack_id in room["racks"]: + devices.extend(_generate_devices(site_id, room["room_id"], rack_id)) + return devices + + +@router.get("/rack-devices") +async def get_rack_devices(site_id: str = Query(...), rack_id: str = Query(...)): + """Devices in a specific rack.""" + site = TOPOLOGY.get(site_id) + if not site: + return [] + for room in site["rooms"]: + if rack_id in room["racks"]: + return _generate_devices(site_id, room["room_id"], rack_id) + return [] + + +@router.get("/pdus") +async def get_pdus( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Per-rack PDU live phase data.""" + site = TOPOLOGY.get(site_id) + if not site: + return [] + + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, room_id, rack_id, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ( + 'power_kw', 'pdu_phase_a_kw', 'pdu_phase_b_kw', 'pdu_phase_c_kw', + 'pdu_phase_a_a', 'pdu_phase_b_a', 'pdu_phase_c_a', 'pdu_imbalance' + ) + AND recorded_at > NOW() - INTERVAL '5 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + # Build per-rack dict keyed by rack_id + rack_data: dict[str, dict] = {} + for row in result.mappings().all(): + rack_id = row["rack_id"] + if not rack_id: + continue + if rack_id not in rack_data: + rack_data[rack_id] = {"rack_id": rack_id, "room_id": row["room_id"]} + field_map = { + "power_kw": "total_kw", + "pdu_phase_a_kw": "phase_a_kw", + "pdu_phase_b_kw": "phase_b_kw", + "pdu_phase_c_kw": "phase_c_kw", + "pdu_phase_a_a": "phase_a_a", + "pdu_phase_b_a": "phase_b_a", + "pdu_phase_c_a": "phase_c_a", + "pdu_imbalance": "imbalance_pct", + } + field = field_map.get(row["sensor_type"]) + if field: + rack_data[rack_id][field] = round(float(row["value"]), 2) + + # Emit rows for every rack in topology order, filling in None for missing data + out = [] + for room in site["rooms"]: + for rack_id in room["racks"]: + d = rack_data.get(rack_id, {"rack_id": rack_id, "room_id": room["room_id"]}) + imb = d.get("imbalance_pct") + status = ( + "critical" if imb is not None and imb >= 10 + else "warning" if imb is not None and imb >= 5 + else "ok" + ) + out.append({ + "rack_id": rack_id, + "room_id": d.get("room_id", room["room_id"]), + "total_kw": d.get("total_kw"), + "phase_a_kw": d.get("phase_a_kw"), + "phase_b_kw": d.get("phase_b_kw"), + "phase_c_kw": d.get("phase_c_kw"), + "phase_a_a": d.get("phase_a_a"), + "phase_b_a": d.get("phase_b_a"), + "phase_c_a": d.get("phase_c_a"), + "imbalance_pct": imb, + "status": status, + }) + return out diff --git a/backend/api/routes/capacity.py b/backend/api/routes/capacity.py new file mode 100644 index 0000000..0327e6b --- /dev/null +++ b/backend/api/routes/capacity.py @@ -0,0 +1,110 @@ +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +ROOMS = { + "sg-01": [ + {"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"}, + {"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"}, + ] +} + +# Rated capacity config — would be per-asset configurable in production +RACK_POWER_CAPACITY_KW = 10.0 # max kW per rack +ROOM_POWER_CAPACITY_KW = 400.0 # 40 racks × 10 kW +CRAC_COOLING_CAPACITY_KW = 160.0 # rated cooling per CRAC +RACK_U_TOTAL = 42 + + +@router.get("/summary") +async def capacity_summary( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Per-rack and per-room capacity: power used vs rated, cooling load vs rated, rack space.""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + rack_id, room_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ('power_kw', 'temperature') + AND rack_id IS NOT NULL + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + rows = result.mappings().all() + + # Index: rack_id → {power_kw, temperature, room_id} + rack_idx: dict[str, dict] = {} + for row in rows: + rid = row["rack_id"] + if rid not in rack_idx: + rack_idx[rid] = {"room_id": row["room_id"]} + if row["sensor_type"] == "power_kw": + rack_idx[rid]["power_kw"] = round(float(row["value"]), 2) + elif row["sensor_type"] == "temperature": + rack_idx[rid]["temperature"] = round(float(row["value"]), 1) + + rooms_out = [] + racks_out = [] + + for room in ROOMS.get(site_id, []): + room_id = room["room_id"] + room_power = 0.0 + populated = 0 + + for rack_id in room["racks"]: + d = rack_idx.get(rack_id, {}) + power = d.get("power_kw") + temp = d.get("temperature") + + if power is not None: + room_power += power + populated += 1 + + power_pct = round((power / RACK_POWER_CAPACITY_KW) * 100, 1) if power is not None else None + racks_out.append({ + "rack_id": rack_id, + "room_id": room_id, + "power_kw": power, + "power_capacity_kw": RACK_POWER_CAPACITY_KW, + "power_pct": power_pct, + "temp": temp, + }) + + room_power = round(room_power, 2) + rooms_out.append({ + "room_id": room_id, + "power": { + "used_kw": room_power, + "capacity_kw": ROOM_POWER_CAPACITY_KW, + "pct": round((room_power / ROOM_POWER_CAPACITY_KW) * 100, 1), + "headroom_kw": round(ROOM_POWER_CAPACITY_KW - room_power, 2), + }, + "cooling": { + "load_kw": room_power, # IT power ≈ heat generated + "capacity_kw": CRAC_COOLING_CAPACITY_KW, + "pct": round(min(100.0, (room_power / CRAC_COOLING_CAPACITY_KW) * 100), 1), + "headroom_kw": round(max(0.0, CRAC_COOLING_CAPACITY_KW - room_power), 2), + }, + "space": { + "racks_total": len(room["racks"]), + "racks_populated": populated, + "pct": round((populated / len(room["racks"])) * 100, 1), + }, + }) + + return { + "site_id": site_id, + "config": { + "rack_power_kw": RACK_POWER_CAPACITY_KW, + "room_power_kw": ROOM_POWER_CAPACITY_KW, + "crac_cooling_kw": CRAC_COOLING_CAPACITY_KW, + "rack_u_total": RACK_U_TOTAL, + }, + "rooms": rooms_out, + "racks": racks_out, + } diff --git a/backend/api/routes/cooling.py b/backend/api/routes/cooling.py new file mode 100644 index 0000000..c1f3b3a --- /dev/null +++ b/backend/api/routes/cooling.py @@ -0,0 +1,131 @@ +from datetime import datetime, timezone, timedelta +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +CHILLERS = {"sg-01": ["chiller-01"]} + +CHILLER_FIELD_MAP = { + "chiller_chw_supply": "chw_supply_c", + "chiller_chw_return": "chw_return_c", + "chiller_chw_delta": "chw_delta_c", + "chiller_flow_gpm": "flow_gpm", + "chiller_load_kw": "cooling_load_kw", + "chiller_load_pct": "cooling_load_pct", + "chiller_cop": "cop", + "chiller_comp_load": "compressor_load_pct", + "chiller_cond_press": "condenser_pressure_bar", + "chiller_evap_press": "evaporator_pressure_bar", + "chiller_cw_supply": "cw_supply_c", + "chiller_cw_return": "cw_return_c", + "chiller_run_hours": "run_hours", +} + + +@router.get("/status") +async def chiller_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest chiller plant readings.""" + types_sql = ", ".join(f"'{t}'" for t in [*CHILLER_FIELD_MAP.keys(), "chiller_state"]) + result = await session.execute(text(f""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ({types_sql}) + AND recorded_at > NOW() - INTERVAL '5 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + chiller_data: dict[str, dict] = {} + for row in result.mappings().all(): + parts = row["sensor_id"].split("/") + # sensor_id: {site}/{cooling/chiller}/{chiller_id}/{key} → parts[3] + if len(parts) < 4: + continue + chiller_id = parts[3] + if chiller_id not in chiller_data: + chiller_data[chiller_id] = {"chiller_id": chiller_id} + field = CHILLER_FIELD_MAP.get(row["sensor_type"]) + if field: + chiller_data[chiller_id][field] = round(float(row["value"]), 2) + elif row["sensor_type"] == "chiller_state": + chiller_data[chiller_id]["state"] = "online" if float(row["value"]) > 0.5 else "fault" + + out = [] + for chiller_id in CHILLERS.get(site_id, []): + d = chiller_data.get(chiller_id, {"chiller_id": chiller_id, "state": "unknown"}) + d.setdefault("state", "online") + out.append(d) + return out + + +@router.get("/history") +async def chiller_history( + site_id: str = Query(...), + chiller_id: str = Query(...), + hours: int = Query(6, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """Time-series COP, load kW, and CHW temps for a chiller.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + METRICS = ("chiller_cop", "chiller_load_kw", "chiller_load_pct", + "chiller_chw_supply", "chiller_chw_return", "chiller_comp_load") + types_sql = ", ".join(f"'{t}'" for t in METRICS) + try: + result = await session.execute(text(f""" + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 3) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ({types_sql}) + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, + "pattern": f"{site_id}/cooling/chiller/{chiller_id}/%", + "from_time": from_time}) + except Exception: + result = await session.execute(text(f""" + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 3) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ({types_sql}) + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, + "pattern": f"{site_id}/cooling/chiller/{chiller_id}/%", + "from_time": from_time}) + + bucket_map: dict[str, dict] = {} + for row in result.mappings().all(): + b = str(row["bucket"]) + if b not in bucket_map: + bucket_map[b] = {"bucket": b} + bucket_map[b][row["sensor_type"]] = float(row["avg_val"]) + + points = [] + for b, vals in sorted(bucket_map.items()): + points.append({ + "bucket": b, + "cop": vals.get("chiller_cop"), + "load_kw": vals.get("chiller_load_kw"), + "load_pct": vals.get("chiller_load_pct"), + "chw_supply_c": vals.get("chiller_chw_supply"), + "chw_return_c": vals.get("chiller_chw_return"), + "comp_load": vals.get("chiller_comp_load"), + }) + return points diff --git a/backend/api/routes/env.py b/backend/api/routes/env.py new file mode 100644 index 0000000..4bfefe3 --- /dev/null +++ b/backend/api/routes/env.py @@ -0,0 +1,440 @@ +from datetime import datetime, timezone, timedelta +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +ROOMS = { + "sg-01": [ + {"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"}, + {"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"}, + ] +} + + +@router.get("/rack-readings") +async def rack_env_readings( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest temperature and humidity per rack, grouped by room.""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + rack_id, room_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ('temperature', 'humidity') + AND rack_id IS NOT NULL + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + rows = result.mappings().all() + + # Index by (rack_id, sensor_type) + data: dict[tuple, float] = {(r["rack_id"], r["sensor_type"]): float(r["value"]) for r in rows} + + rooms = [] + for room in ROOMS.get(site_id, []): + racks = [] + for rack_id in room["racks"]: + temp = data.get((rack_id, "temperature")) + hum = data.get((rack_id, "humidity")) + racks.append({ + "rack_id": rack_id, + "temperature": round(temp, 1) if temp is not None else None, + "humidity": round(hum, 1) if hum is not None else None, + }) + rooms.append({"room_id": room["room_id"], "racks": racks}) + return rooms + + +@router.get("/humidity-history") +async def humidity_history( + site_id: str = Query(...), + hours: int = Query(6, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """Average humidity per room bucketed by 5 minutes.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + try: + result = await session.execute(text(""" + SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 1) AS avg_humidity + FROM ( + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_id, room_id, + AVG(value) AS avg_per_rack + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'humidity' + AND room_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, sensor_id, room_id + ) per_rack + GROUP BY bucket, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + except Exception: + result = await session.execute(text(""" + SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 1) AS avg_humidity + FROM ( + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_id, room_id, + AVG(value) AS avg_per_rack + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'humidity' + AND room_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, sensor_id, room_id + ) per_rack + GROUP BY bucket, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + return [dict(r) for r in result.mappings().all()] + + +# All CRAC sensor types stored in the readings table +CRAC_SENSOR_TYPES = ( + "cooling_supply", "cooling_return", "cooling_fan", + "cooling_supply_hum", "cooling_return_hum", "cooling_airflow", "cooling_filter_dp", + "cooling_cap_kw", "cooling_cap_pct", "cooling_cop", "cooling_shr", + "cooling_comp_state", "cooling_comp_load", "cooling_comp_power", "cooling_comp_hours", + "cooling_high_press", "cooling_low_press", "cooling_superheat", "cooling_subcooling", + "cooling_fan_rpm", "cooling_fan_power", "cooling_fan_hours", + "cooling_unit_power", "cooling_voltage", "cooling_current", "cooling_pf", +) + +# sensor_type → response field name +CRAC_FIELD_MAP = { + "cooling_supply": "supply_temp", + "cooling_return": "return_temp", + "cooling_fan": "fan_pct", + "cooling_supply_hum": "supply_humidity", + "cooling_return_hum": "return_humidity", + "cooling_airflow": "airflow_cfm", + "cooling_filter_dp": "filter_dp_pa", + "cooling_cap_kw": "cooling_capacity_kw", + "cooling_cap_pct": "cooling_capacity_pct", + "cooling_cop": "cop", + "cooling_shr": "sensible_heat_ratio", + "cooling_comp_state": "compressor_state", + "cooling_comp_load": "compressor_load_pct", + "cooling_comp_power": "compressor_power_kw", + "cooling_comp_hours": "compressor_run_hours", + "cooling_high_press": "high_pressure_bar", + "cooling_low_press": "low_pressure_bar", + "cooling_superheat": "discharge_superheat_c", + "cooling_subcooling": "liquid_subcooling_c", + "cooling_fan_rpm": "fan_rpm", + "cooling_fan_power": "fan_power_kw", + "cooling_fan_hours": "fan_run_hours", + "cooling_unit_power": "total_unit_power_kw", + "cooling_voltage": "input_voltage_v", + "cooling_current": "input_current_a", + "cooling_pf": "power_factor", +} + +RATED_CAPACITY_KW = 80.0 + + +@router.get("/crac-status") +async def crac_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest CRAC readings — full sensor set.""" + types_sql = ", ".join(f"'{t}'" for t in CRAC_SENSOR_TYPES) + result = await session.execute(text(f""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ({types_sql}) + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + crac_data: dict[str, dict] = {} + for row in result.mappings().all(): + parts = row["sensor_id"].split("/") + if len(parts) < 3: + continue + crac_id = parts[2] + if crac_id not in crac_data: + crac_data[crac_id] = {"crac_id": crac_id} + field = CRAC_FIELD_MAP.get(row["sensor_type"]) + if field: + crac_data[crac_id][field] = round(float(row["value"]), 3) + + room_map = {room["crac_id"]: room["room_id"] for room in ROOMS.get(site_id, [])} + + result_list = [] + for crac_id, d in sorted(crac_data.items()): + supply = d.get("supply_temp") + ret = d.get("return_temp") + delta = round(ret - supply, 1) if (ret is not None and supply is not None) else None + state = "online" if supply is not None else "fault" + result_list.append({ + "crac_id": crac_id, + "room_id": room_map.get(crac_id), + "state": state, + "delta": delta, + "rated_capacity_kw": RATED_CAPACITY_KW, + **{k: round(v, 2) if isinstance(v, float) else v for k, v in d.items() if k != "crac_id"}, + }) + + # Surface CRACs with no recent readings as faulted + known = set(crac_data.keys()) + for room in ROOMS.get(site_id, []): + if room["crac_id"] not in known: + result_list.append({ + "crac_id": room["crac_id"], + "room_id": room["room_id"], + "state": "fault", + "delta": None, + "rated_capacity_kw": RATED_CAPACITY_KW, + }) + + return sorted(result_list, key=lambda x: x["crac_id"]) + + +@router.get("/crac-history") +async def crac_history( + site_id: str = Query(...), + crac_id: str = Query(...), + hours: int = Query(6, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """Time-series history for a single CRAC unit — capacity, COP, compressor load, filter ΔP, temps.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + METRICS = ( + "cooling_supply", "cooling_return", "cooling_cap_kw", + "cooling_cap_pct", "cooling_cop", "cooling_comp_load", + "cooling_filter_dp", "cooling_fan", + ) + types_sql = ", ".join(f"'{t}'" for t in METRICS) + try: + result = await session.execute(text(f""" + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 3) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ({types_sql}) + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time}) + except Exception: + result = await session.execute(text(f""" + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 3) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ({types_sql}) + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time}) + + bucket_map: dict[str, dict] = {} + for row in result.mappings().all(): + b = str(row["bucket"]) + if b not in bucket_map: + bucket_map[b] = {"bucket": b} + bucket_map[b][row["sensor_type"]] = float(row["avg_val"]) + + points = [] + for b, vals in sorted(bucket_map.items()): + supply = vals.get("cooling_supply") + ret = vals.get("cooling_return") + points.append({ + "bucket": b, + "supply_temp": round(supply, 1) if supply is not None else None, + "return_temp": round(ret, 1) if ret is not None else None, + "delta_t": round(ret - supply, 1) if (supply is not None and ret is not None) else None, + "capacity_kw": vals.get("cooling_cap_kw"), + "capacity_pct": vals.get("cooling_cap_pct"), + "cop": vals.get("cooling_cop"), + "comp_load": vals.get("cooling_comp_load"), + "filter_dp": vals.get("cooling_filter_dp"), + "fan_pct": vals.get("cooling_fan"), + }) + return points + + +@router.get("/crac-delta-history") +async def crac_delta_history( + site_id: str = Query(...), + crac_id: str = Query(...), + hours: int = Query(1, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """ΔT (return - supply) over time for a single CRAC unit.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + try: + result = await session.execute(text(""" + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_type, + AVG(value) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ('cooling_supply', 'cooling_return') + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time}) + except Exception: + result = await session.execute(text(""" + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_type, + AVG(value) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ('cooling_supply', 'cooling_return') + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time}) + + rows = result.mappings().all() + bucket_map: dict[str, dict] = {} + for row in rows: + b = str(row["bucket"]) + if b not in bucket_map: + bucket_map[b] = {"bucket": b} + bucket_map[b][row["sensor_type"]] = float(row["avg_val"]) + + points = [] + for b, vals in bucket_map.items(): + supply = vals.get("cooling_supply") + ret = vals.get("cooling_return") + if supply is not None and ret is not None: + points.append({"bucket": b, "delta": round(ret - supply, 2)}) + + return sorted(points, key=lambda x: x["bucket"]) + + +@router.get("/rack-history") +async def rack_history( + site_id: str = Query(...), + rack_id: str = Query(...), + hours: int = Query(6, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """Temperature and power history for a single rack.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + try: + result = await session.execute(text(""" + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 2) AS avg_value + FROM readings + WHERE site_id = :site_id + AND rack_id = :rack_id + AND sensor_type IN ('temperature', 'humidity', 'power_kw') + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, "rack_id": rack_id, "from_time": from_time}) + except Exception: + result = await session.execute(text(""" + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 2) AS avg_value + FROM readings + WHERE site_id = :site_id + AND rack_id = :rack_id + AND sensor_type IN ('temperature', 'humidity', 'power_kw') + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, "rack_id": rack_id, "from_time": from_time}) + + rows = result.mappings().all() + + # Pivot into {bucket, temperature, humidity, power_kw} + bucket_map: dict[str, dict] = {} + for row in rows: + b = str(row["bucket"]) + if b not in bucket_map: + bucket_map[b] = {"bucket": b} + bucket_map[b][row["sensor_type"]] = float(row["avg_value"]) + + # Fetch active alarms for this rack + alarms = await session.execute(text(""" + SELECT id, severity, message, state, triggered_at + FROM alarms + WHERE site_id = :site_id AND rack_id = :rack_id + ORDER BY triggered_at DESC + LIMIT 10 + """), {"site_id": site_id, "rack_id": rack_id}) + + return { + "rack_id": rack_id, + "site_id": site_id, + "history": list(bucket_map.values()), + "alarms": [dict(r) for r in alarms.mappings().all()], + } + + +@router.get("/particles") +async def particle_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest particle counts per room.""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + room_id, sensor_type, value, recorded_at + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ('particles_0_5um', 'particles_5um') + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + room_data: dict[str, dict] = {} + for row in result.mappings().all(): + rid = row["room_id"] + if rid not in room_data: + room_data[rid] = {} + room_data[rid][row["sensor_type"]] = round(float(row["value"])) + + rooms_cfg = ROOMS.get(site_id, []) + out = [] + for room in rooms_cfg: + rid = room["room_id"] + d = room_data.get(rid, {}) + p05 = d.get("particles_0_5um") + p5 = d.get("particles_5um") + # Derive ISO 14644-1 class (simplified: class 8 = 3.52M @ 0.5µm) + iso_class = None + if p05 is not None: + if p05 <= 10_000: iso_class = 5 + elif p05 <= 100_000: iso_class = 6 + elif p05 <= 1_000_000: iso_class = 7 + elif p05 <= 3_520_000: iso_class = 8 + else: iso_class = 9 + out.append({ + "room_id": rid, + "particles_0_5um": p05, + "particles_5um": p5, + "iso_class": iso_class, + }) + return out diff --git a/backend/api/routes/fire.py b/backend/api/routes/fire.py new file mode 100644 index 0000000..cd7f203 --- /dev/null +++ b/backend/api/routes/fire.py @@ -0,0 +1,75 @@ +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +VESDA_ZONES = { + "sg-01": [ + {"zone_id": "vesda-hall-a", "room_id": "hall-a"}, + {"zone_id": "vesda-hall-b", "room_id": "hall-b"}, + ] +} + +LEVEL_MAP = {0: "normal", 1: "alert", 2: "action", 3: "fire"} + +VESDA_TYPES = ("vesda_level", "vesda_obscuration", "vesda_det1", "vesda_det2", + "vesda_power", "vesda_flow") + + +@router.get("/status") +async def fire_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest VESDA readings per fire zone.""" + types_sql = ", ".join(f"'{t}'" for t in VESDA_TYPES) + result = await session.execute(text(f""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ({types_sql}) + AND recorded_at > NOW() - INTERVAL '2 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + zone_data: dict[str, dict] = {} + for row in result.mappings().all(): + parts = row["sensor_id"].split("/") + if len(parts) < 3: + continue + zone_id = parts[2] + if zone_id not in zone_data: + zone_data[zone_id] = {"zone_id": zone_id} + v = float(row["value"]) + s_type = row["sensor_type"] + if s_type == "vesda_level": + zone_data[zone_id]["level"] = LEVEL_MAP.get(round(v), "normal") + elif s_type == "vesda_obscuration": + zone_data[zone_id]["obscuration_pct_m"] = round(v, 3) + elif s_type == "vesda_det1": + zone_data[zone_id]["detector_1_ok"] = v > 0.5 + elif s_type == "vesda_det2": + zone_data[zone_id]["detector_2_ok"] = v > 0.5 + elif s_type == "vesda_power": + zone_data[zone_id]["power_ok"] = v > 0.5 + elif s_type == "vesda_flow": + zone_data[zone_id]["flow_ok"] = v > 0.5 + + zone_room_map = {z["zone_id"]: z["room_id"] for z in VESDA_ZONES.get(site_id, [])} + + out = [] + for zone_cfg in VESDA_ZONES.get(site_id, []): + zone_id = zone_cfg["zone_id"] + d = zone_data.get(zone_id, {"zone_id": zone_id}) + d.setdefault("level", "normal") + d.setdefault("obscuration_pct_m", None) + d.setdefault("detector_1_ok", True) + d.setdefault("detector_2_ok", True) + d.setdefault("power_ok", True) + d.setdefault("flow_ok", True) + d["room_id"] = zone_room_map.get(zone_id) + out.append(d) + return out diff --git a/backend/api/routes/floor_layout.py b/backend/api/routes/floor_layout.py new file mode 100644 index 0000000..c6f3eda --- /dev/null +++ b/backend/api/routes/floor_layout.py @@ -0,0 +1,33 @@ +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import text +from core.database import get_session + +router = APIRouter() + + +@router.get("") +async def get_floor_layout(site_id: str, db: AsyncSession = Depends(get_session)): + row = await db.execute( + text("SELECT value FROM site_config WHERE site_id = :site_id AND key = 'floor_layout'"), + {"site_id": site_id}, + ) + result = row.fetchone() + if result is None: + raise HTTPException(status_code=404, detail="No floor layout saved for this site") + return result[0] + + +@router.put("") +async def save_floor_layout(site_id: str, layout: dict, db: AsyncSession = Depends(get_session)): + await db.execute( + text(""" + INSERT INTO site_config (site_id, key, value, updated_at) + VALUES (:site_id, 'floor_layout', CAST(:value AS jsonb), NOW()) + ON CONFLICT (site_id, key) + DO UPDATE SET value = EXCLUDED.value, updated_at = NOW() + """), + {"site_id": site_id, "value": __import__("json").dumps(layout)}, + ) + await db.commit() + return {"ok": True} diff --git a/backend/api/routes/generator.py b/backend/api/routes/generator.py new file mode 100644 index 0000000..a2cdf66 --- /dev/null +++ b/backend/api/routes/generator.py @@ -0,0 +1,138 @@ +from datetime import datetime, timezone, timedelta +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +GENERATORS = {"sg-01": ["gen-01"]} + +GEN_FIELD_MAP = { + "gen_fuel_pct": "fuel_pct", + "gen_fuel_l": "fuel_litres", + "gen_fuel_rate": "fuel_rate_lph", + "gen_load_kw": "load_kw", + "gen_load_pct": "load_pct", + "gen_run_hours": "run_hours", + "gen_voltage_v": "voltage_v", + "gen_freq_hz": "frequency_hz", + "gen_rpm": "engine_rpm", + "gen_oil_press": "oil_pressure_bar", + "gen_coolant_c": "coolant_temp_c", + "gen_exhaust_c": "exhaust_temp_c", + "gen_alt_temp_c": "alternator_temp_c", + "gen_pf": "power_factor", + "gen_batt_v": "battery_v", +} + +STATE_MAP = {-1.0: "fault", 0.0: "standby", 1.0: "running", 2.0: "test"} + + +@router.get("/status") +async def generator_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest reading for each generator.""" + types_sql = ", ".join(f"'{t}'" for t in [*GEN_FIELD_MAP.keys(), "gen_state"]) + result = await session.execute(text(f""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ({types_sql}) + AND recorded_at > NOW() - INTERVAL '5 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + gen_data: dict[str, dict] = {} + for row in result.mappings().all(): + parts = row["sensor_id"].split("/") + if len(parts) < 3: + continue + gen_id = parts[2] + if gen_id not in gen_data: + gen_data[gen_id] = {"gen_id": gen_id} + field = GEN_FIELD_MAP.get(row["sensor_type"]) + if field: + gen_data[gen_id][field] = round(float(row["value"]), 2) + elif row["sensor_type"] == "gen_state": + v = round(float(row["value"])) + gen_data[gen_id]["state"] = STATE_MAP.get(v, "standby") + + out = [] + for gen_id in GENERATORS.get(site_id, []): + d = gen_data.get(gen_id, {"gen_id": gen_id, "state": "unknown"}) + if "state" not in d: + d["state"] = "standby" + out.append(d) + return out + + +HISTORY_METRICS = ( + "gen_load_pct", "gen_fuel_pct", "gen_coolant_c", + "gen_exhaust_c", "gen_freq_hz", "gen_alt_temp_c", +) + +@router.get("/history") +async def generator_history( + site_id: str = Query(...), + gen_id: str = Query(...), + hours: int = Query(6, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """5-minute bucketed time-series for a single generator.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + types_sql = ", ".join(f"'{t}'" for t in HISTORY_METRICS) + try: + result = await session.execute(text(f""" + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 2) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ({types_sql}) + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, + "pattern": f"{site_id}/generator/{gen_id}/%", + "from_time": from_time}) + except Exception: + result = await session.execute(text(f""" + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 2) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ({types_sql}) + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, + "pattern": f"{site_id}/generator/{gen_id}/%", + "from_time": from_time}) + + # Pivot: bucket → {metric: value} + buckets: dict[str, dict] = {} + for row in result.mappings().all(): + b = row["bucket"].isoformat() + buckets.setdefault(b, {"bucket": b}) + key_map = { + "gen_load_pct": "load_pct", + "gen_fuel_pct": "fuel_pct", + "gen_coolant_c": "coolant_temp_c", + "gen_exhaust_c": "exhaust_temp_c", + "gen_freq_hz": "frequency_hz", + "gen_alt_temp_c":"alternator_temp_c", + } + field = key_map.get(row["sensor_type"]) + if field: + buckets[b][field] = float(row["avg_val"]) + + return list(buckets.values()) diff --git a/backend/api/routes/health.py b/backend/api/routes/health.py new file mode 100644 index 0000000..673045a --- /dev/null +++ b/backend/api/routes/health.py @@ -0,0 +1,13 @@ +from fastapi import APIRouter +from datetime import datetime, timezone + +router = APIRouter() + + +@router.get("/health") +async def health_check(): + return { + "status": "ok", + "service": "DemoBMS API", + "timestamp": datetime.now(timezone.utc).isoformat(), + } diff --git a/backend/api/routes/leak.py b/backend/api/routes/leak.py new file mode 100644 index 0000000..167fad0 --- /dev/null +++ b/backend/api/routes/leak.py @@ -0,0 +1,57 @@ +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +# Static topology metadata — mirrors simulator config +LEAK_SENSORS = { + "sg-01": [ + {"sensor_id": "leak-01", "floor_zone": "crac-zone-a", "under_floor": True, "near_crac": True, "room_id": "hall-a"}, + {"sensor_id": "leak-02", "floor_zone": "server-row-b1", "under_floor": True, "near_crac": False, "room_id": "hall-b"}, + {"sensor_id": "leak-03", "floor_zone": "ups-room", "under_floor": False, "near_crac": False, "room_id": None}, + ] +} + + +@router.get("/status") +async def leak_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest state for all leak sensors, enriched with location metadata.""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, value, recorded_at + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'leak' + AND recorded_at > NOW() - INTERVAL '5 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + # sensor_id format: {site_id}/leak/{sensor_id} + state_map: dict[str, dict] = {} + for row in result.mappings().all(): + parts = row["sensor_id"].split("/") + if len(parts) < 3: + continue + sid = parts[2] + state_map[sid] = { + "state": "detected" if float(row["value"]) > 0.5 else "clear", + "recorded_at": str(row["recorded_at"]), + } + + out = [] + for cfg in LEAK_SENSORS.get(site_id, []): + sid = cfg["sensor_id"] + entry = {**cfg} + if sid in state_map: + entry["state"] = state_map[sid]["state"] + entry["recorded_at"] = state_map[sid]["recorded_at"] + else: + entry["state"] = "unknown" + entry["recorded_at"] = None + out.append(entry) + return out diff --git a/backend/api/routes/maintenance.py b/backend/api/routes/maintenance.py new file mode 100644 index 0000000..7373af0 --- /dev/null +++ b/backend/api/routes/maintenance.py @@ -0,0 +1,77 @@ +import uuid +from datetime import datetime, timezone +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel + +router = APIRouter() + +# In-memory store (demo — resets on restart) +_windows: list[dict] = [] + + +class WindowCreate(BaseModel): + site_id: str + title: str + target: str # "all", a room_id like "hall-a", or a rack_id like "rack-A01" + target_label: str # human-readable label + start_dt: str # ISO 8601 + end_dt: str # ISO 8601 + suppress_alarms: bool = True + notes: str = "" + + +def _window_status(w: dict) -> str: + now = datetime.now(timezone.utc).isoformat() + if w["end_dt"] < now: + return "expired" + if w["start_dt"] <= now: + return "active" + return "scheduled" + + +@router.get("") +async def list_windows(site_id: str = "sg-01"): + return [ + {**w, "status": _window_status(w)} + for w in _windows + if w["site_id"] == site_id + ] + + +@router.post("", status_code=201) +async def create_window(body: WindowCreate): + window = { + "id": str(uuid.uuid4())[:8], + "site_id": body.site_id, + "title": body.title, + "target": body.target, + "target_label": body.target_label, + "start_dt": body.start_dt, + "end_dt": body.end_dt, + "suppress_alarms": body.suppress_alarms, + "notes": body.notes, + "created_at": datetime.now(timezone.utc).isoformat(), + } + _windows.append(window) + return {**window, "status": _window_status(window)} + + +@router.delete("/{window_id}", status_code=204) +async def delete_window(window_id: str): + global _windows + before = len(_windows) + _windows = [w for w in _windows if w["id"] != window_id] + if len(_windows) == before: + raise HTTPException(status_code=404, detail="Window not found") + + +@router.get("/active") +async def active_windows(site_id: str = "sg-01"): + """Returns only currently active windows — used by alarm page for suppression check.""" + now = datetime.now(timezone.utc).isoformat() + return [ + w for w in _windows + if w["site_id"] == site_id + and w["start_dt"] <= now <= w["end_dt"] + and w["suppress_alarms"] + ] diff --git a/backend/api/routes/network.py b/backend/api/routes/network.py new file mode 100644 index 0000000..2ed2519 --- /dev/null +++ b/backend/api/routes/network.py @@ -0,0 +1,69 @@ +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +SWITCHES = { + "sg-01": [ + {"switch_id": "sw-core-01", "name": "Core Switch — Hall A", "model": "Cisco Catalyst C9300-48P", "room_id": "hall-a", "rack_id": "SG1A01.01", "port_count": 48, "role": "core"}, + {"switch_id": "sw-core-02", "name": "Core Switch — Hall B", "model": "Arista 7050CX3-32S", "room_id": "hall-b", "rack_id": "SG1B01.01", "port_count": 32, "role": "core"}, + {"switch_id": "sw-edge-01", "name": "Edge / Uplink Switch", "model": "Juniper EX4300-48T", "room_id": "hall-a", "rack_id": "SG1A01.05", "port_count": 48, "role": "edge"}, + ] +} + +NET_FIELD_MAP = { + "net_uptime_s": "uptime_s", + "net_active_ports": "active_ports", + "net_bw_in_mbps": "bandwidth_in_mbps", + "net_bw_out_mbps": "bandwidth_out_mbps", + "net_cpu_pct": "cpu_pct", + "net_mem_pct": "mem_pct", + "net_temp_c": "temperature_c", + "net_pkt_loss_pct": "packet_loss_pct", +} + +STATE_MAP = {0.0: "up", 1.0: "degraded", 2.0: "down"} + + +@router.get("/status") +async def network_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest reading for each network switch.""" + types_sql = ", ".join(f"'{t}'" for t in [*NET_FIELD_MAP.keys(), "net_state"]) + result = await session.execute(text(f""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ({types_sql}) + AND recorded_at > NOW() - INTERVAL '5 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + sw_data: dict[str, dict] = {} + for row in result.mappings().all(): + parts = row["sensor_id"].split("/") + if len(parts) < 3: + continue + sw_id = parts[2] + if sw_id not in sw_data: + sw_data[sw_id] = {} + field = NET_FIELD_MAP.get(row["sensor_type"]) + if field: + sw_data[sw_id][field] = round(float(row["value"]), 2) + elif row["sensor_type"] == "net_state": + v = round(float(row["value"])) + sw_data[sw_id]["state"] = STATE_MAP.get(v, "unknown") + + out = [] + for sw_cfg in SWITCHES.get(site_id, []): + sw_id = sw_cfg["switch_id"] + d = {**sw_cfg, **sw_data.get(sw_id, {})} + if "state" not in d: + d["state"] = "unknown" + out.append(d) + return out diff --git a/backend/api/routes/power.py b/backend/api/routes/power.py new file mode 100644 index 0000000..494ca3b --- /dev/null +++ b/backend/api/routes/power.py @@ -0,0 +1,460 @@ +from datetime import datetime, timezone, timedelta +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + +# Topology — mirrors simulator config +ROOMS = { + "sg-01": [ + {"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)]}, + {"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)]}, + ] +} +ATS_UNITS = {"sg-01": ["ats-01"]} +GENERATORS = {"sg-01": ["gen-01"]} + +ACTIVE_FEED_MAP = {0.0: "utility-a", 1.0: "utility-b", 2.0: "generator"} + +# Singapore commercial electricity tariff (SGD / kWh, approximate) +TARIFF_SGD_KWH = 0.298 + + +@router.get("/rack-breakdown") +async def rack_power_breakdown( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest kW reading per rack, grouped by room.""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + rack_id, room_id, value AS power_kw + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND rack_id IS NOT NULL + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + rows = result.mappings().all() + + rack_map: dict[str, dict] = {r["rack_id"]: dict(r) for r in rows} + + rooms = [] + for room in ROOMS.get(site_id, []): + racks = [] + for rack_id in room["racks"]: + reading = rack_map.get(rack_id) + racks.append({ + "rack_id": rack_id, + "power_kw": round(float(reading["power_kw"]), 2) if reading else None, + }) + rooms.append({"room_id": room["room_id"], "racks": racks}) + + return rooms + + +@router.get("/room-history") +async def room_power_history( + site_id: str = Query(...), + hours: int = Query(6, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """Total power per room bucketed by 5 minutes — for a multi-line trend chart.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + try: + result = await session.execute(text(""" + SELECT bucket, room_id, ROUND(SUM(avg_per_rack)::numeric, 1) AS total_kw + FROM ( + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_id, room_id, + AVG(value) AS avg_per_rack + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND room_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, sensor_id, room_id + ) per_rack + GROUP BY bucket, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + except Exception: + result = await session.execute(text(""" + SELECT bucket, room_id, ROUND(SUM(avg_per_rack)::numeric, 1) AS total_kw + FROM ( + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_id, room_id, + AVG(value) AS avg_per_rack + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND room_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, sensor_id, room_id + ) per_rack + GROUP BY bucket, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + return [dict(r) for r in result.mappings().all()] + + +@router.get("/ups") +async def ups_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest UPS readings.""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ('ups_charge', 'ups_load', 'ups_runtime', 'ups_state', 'ups_voltage') + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + rows = result.mappings().all() + + # sensor_id format: sg-01/power/ups-01/charge_pct + ups_data: dict[str, dict] = {} + for row in rows: + parts = row["sensor_id"].split("/") + if len(parts) < 3: + continue + ups_id = parts[2] + if ups_id not in ups_data: + ups_data[ups_id] = {"ups_id": ups_id} + key_map = { + "ups_charge": "charge_pct", + "ups_load": "load_pct", + "ups_runtime": "runtime_min", + "ups_state": "_state_raw", + "ups_voltage": "voltage_v", + } + field = key_map.get(row["sensor_type"]) + if field: + ups_data[ups_id][field] = round(float(row["value"]), 1) + + STATE_MAP = {0.0: "online", 1.0: "battery", 2.0: "overload"} + result_list = [] + for ups_id, d in sorted(ups_data.items()): + # Use stored state if available; fall back to charge heuristic only if state never arrived + state_raw = d.get("_state_raw") + if state_raw is not None: + state = STATE_MAP.get(round(state_raw), "online") + else: + charge = d.get("charge_pct") + state = "battery" if (charge is not None and charge < 20.0) else "online" + result_list.append({ + "ups_id": ups_id, + "state": state, + "charge_pct": d.get("charge_pct"), + "load_pct": d.get("load_pct"), + "runtime_min": d.get("runtime_min"), + "voltage_v": d.get("voltage_v"), + }) + return result_list + + +@router.get("/ups/history") +async def ups_history( + site_id: str = Query(...), + ups_id: str = Query(...), + hours: int = Query(6, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """5-minute bucketed trend for a single UPS: charge, load, runtime, voltage.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + types_sql = "'ups_charge', 'ups_load', 'ups_runtime', 'ups_voltage'" + try: + result = await session.execute(text(f""" + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 2) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ({types_sql}) + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, + "pattern": f"{site_id}/power/{ups_id}/%", + "from_time": from_time}) + except Exception: + result = await session.execute(text(f""" + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_type, + ROUND(AVG(value)::numeric, 2) AS avg_val + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type IN ({types_sql}) + AND recorded_at > :from_time + GROUP BY bucket, sensor_type + ORDER BY bucket ASC + """), {"site_id": site_id, + "pattern": f"{site_id}/power/{ups_id}/%", + "from_time": from_time}) + + KEY_MAP = { + "ups_charge": "charge_pct", + "ups_load": "load_pct", + "ups_runtime": "runtime_min", + "ups_voltage": "voltage_v", + } + buckets: dict[str, dict] = {} + for row in result.mappings().all(): + b = row["bucket"].isoformat() + buckets.setdefault(b, {"bucket": b}) + field = KEY_MAP.get(row["sensor_type"]) + if field: + buckets[b][field] = float(row["avg_val"]) + + return list(buckets.values()) + + +@router.get("/ats") +async def ats_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Latest ATS transfer switch readings.""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ('ats_active', 'ats_state', 'ats_xfer_count', + 'ats_xfer_ms', 'ats_ua_v', 'ats_ub_v', 'ats_gen_v') + AND recorded_at > NOW() - INTERVAL '2 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + ats_data: dict[str, dict] = {} + for row in result.mappings().all(): + parts = row["sensor_id"].split("/") + # sensor_id: {site}/power/ats/{ats_id}/{key} → parts[3] + if len(parts) < 4: + continue + ats_id = parts[3] + if ats_id not in ats_data: + ats_data[ats_id] = {"ats_id": ats_id} + v = float(row["value"]) + s_type = row["sensor_type"] + if s_type == "ats_active": + ats_data[ats_id]["active_feed"] = ACTIVE_FEED_MAP.get(round(v), "utility-a") + elif s_type == "ats_state": + ats_data[ats_id]["state"] = "transferring" if v > 0.5 else "stable" + elif s_type == "ats_xfer_count": + ats_data[ats_id]["transfer_count"] = int(v) + elif s_type == "ats_xfer_ms": + ats_data[ats_id]["last_transfer_ms"] = round(v, 0) if v > 0 else None + elif s_type == "ats_ua_v": + ats_data[ats_id]["utility_a_v"] = round(v, 1) + elif s_type == "ats_ub_v": + ats_data[ats_id]["utility_b_v"] = round(v, 1) + elif s_type == "ats_gen_v": + ats_data[ats_id]["generator_v"] = round(v, 1) + + out = [] + for ats_id in ATS_UNITS.get(site_id, []): + d = ats_data.get(ats_id, {"ats_id": ats_id}) + d.setdefault("state", "stable") + d.setdefault("active_feed", "utility-a") + d.setdefault("transfer_count", 0) + d.setdefault("last_transfer_ms", None) + out.append(d) + return out + + +@router.get("/phase") +async def pdu_phase_breakdown( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Per-phase kW, amps, and imbalance % for every rack PDU.""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + rack_id, room_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ('pdu_phase_a_kw', 'pdu_phase_b_kw', 'pdu_phase_c_kw', + 'pdu_phase_a_a', 'pdu_phase_b_a', 'pdu_phase_c_a', + 'pdu_imbalance') + AND rack_id IS NOT NULL + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + + FIELD_MAP = { + "pdu_phase_a_kw": "phase_a_kw", + "pdu_phase_b_kw": "phase_b_kw", + "pdu_phase_c_kw": "phase_c_kw", + "pdu_phase_a_a": "phase_a_a", + "pdu_phase_b_a": "phase_b_a", + "pdu_phase_c_a": "phase_c_a", + "pdu_imbalance": "imbalance_pct", + } + + rack_map: dict[tuple, float] = {} + rack_rooms: dict[str, str] = {} + for row in result.mappings().all(): + rack_id = row["rack_id"] + room_id = row["room_id"] + s_type = row["sensor_type"] + if rack_id: + rack_map[(rack_id, s_type)] = round(float(row["value"]), 2) + if room_id: + rack_rooms[rack_id] = room_id + + rooms = [] + for room in ROOMS.get(site_id, []): + racks = [] + for rack_id in room["racks"]: + entry: dict = {"rack_id": rack_id, "room_id": room["room_id"]} + for s_type, field in FIELD_MAP.items(): + entry[field] = rack_map.get((rack_id, s_type)) + racks.append(entry) + rooms.append({"room_id": room["room_id"], "racks": racks}) + return rooms + + +@router.get("/redundancy") +async def power_redundancy( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Compute power redundancy level: 2N, N+1, or N.""" + # Count UPS units online + ups_result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, value + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'ups_charge' + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + ups_rows = ups_result.mappings().all() + ups_online = len([r for r in ups_rows if float(r["value"]) > 10.0]) + ups_total = len(ups_rows) + + # ATS active feed + ats_result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, value + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'ats_active' + AND recorded_at > NOW() - INTERVAL '2 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + ats_rows = ats_result.mappings().all() + ats_active_feed = None + if ats_rows: + ats_active_feed = ACTIVE_FEED_MAP.get(round(float(ats_rows[0]["value"])), "utility-a") + + # Generator available (not fault) + gen_result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, value + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'gen_state' + AND recorded_at > NOW() - INTERVAL '5 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + gen_rows = gen_result.mappings().all() + gen_available = len([r for r in gen_rows if float(r["value"]) >= 0.0]) > 0 + + # Derive level + if ups_total >= 2 and ups_online >= 2 and gen_available: + level = "2N" + elif ups_online >= 1 and gen_available: + level = "N+1" + else: + level = "N" + + return { + "site_id": site_id, + "level": level, + "ups_total": ups_total, + "ups_online": ups_online, + "generator_ok": gen_available, + "ats_active_feed": ats_active_feed, + "notes": ( + "Dual UPS + generator = 2N" if level == "2N" else + "Single path active — reduced redundancy" if level == "N" else + "N+1 — one redundant path available" + ), + } + + +@router.get("/utility") +async def utility_power( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Current total IT load and estimated monthly energy cost.""" + # Latest total IT load + kw_result = await session.execute(text(""" + SELECT ROUND(SUM(value)::numeric, 2) AS total_kw + FROM ( + SELECT DISTINCT ON (sensor_id) sensor_id, value + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + ) latest + """), {"site_id": site_id}) + kw_row = kw_result.mappings().first() + total_kw = float(kw_row["total_kw"] or 0) if kw_row else 0.0 + + # Estimated month-to-date kWh (from readings since start of month) + from_month = datetime.now(timezone.utc).replace(day=1, hour=0, minute=0, second=0, microsecond=0) + kwh_result = await session.execute(text(""" + SELECT ROUND((SUM(value) * 5.0 / 60.0)::numeric, 1) AS kwh_mtd + FROM ( + SELECT DISTINCT ON (sensor_id, date_trunc('minute', recorded_at)) + sensor_id, value + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND recorded_at > :from_month + ORDER BY sensor_id, date_trunc('minute', recorded_at), recorded_at DESC + ) bucketed + """), {"site_id": site_id, "from_month": from_month}) + kwh_row = kwh_result.mappings().first() + kwh_mtd = float(kwh_row["kwh_mtd"] or 0) if kwh_row else 0.0 + + cost_mtd = round(kwh_mtd * TARIFF_SGD_KWH, 2) + # Annualised from month-to-date pace + now = datetime.now(timezone.utc) + day_of_month = now.day + days_in_month = 30 + if day_of_month > 0: + kwh_annual_est = round(kwh_mtd / day_of_month * 365, 0) + cost_annual_est = round(kwh_annual_est * TARIFF_SGD_KWH, 2) + else: + kwh_annual_est = 0.0 + cost_annual_est = 0.0 + + return { + "site_id": site_id, + "total_kw": total_kw, + "tariff_sgd_kwh": TARIFF_SGD_KWH, + "kwh_month_to_date": kwh_mtd, + "cost_sgd_mtd": cost_mtd, + "kwh_annual_est": kwh_annual_est, + "cost_sgd_annual_est": cost_annual_est, + "currency": "SGD", + } diff --git a/backend/api/routes/readings.py b/backend/api/routes/readings.py new file mode 100644 index 0000000..fe00f16 --- /dev/null +++ b/backend/api/routes/readings.py @@ -0,0 +1,229 @@ +from datetime import datetime, timezone, timedelta +from fastapi import APIRouter, Depends, Query +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession +from core.database import get_session + +router = APIRouter() + + +@router.get("/latest") +async def get_latest_readings( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Most recent reading per sensor for a site (last 10 minutes).""" + result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_id) + sensor_id, sensor_type, site_id, room_id, rack_id, value, unit, recorded_at + FROM readings + WHERE site_id = :site_id + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + """), {"site_id": site_id}) + return [dict(r) for r in result.mappings().all()] + + +@router.get("/kpis") +async def get_site_kpis( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Aggregate KPIs for the overview dashboard.""" + power = await session.execute(text(""" + SELECT COALESCE(SUM(value), 0) AS total_power_kw + FROM ( + SELECT DISTINCT ON (sensor_id) sensor_id, value + FROM readings + WHERE site_id = :site_id AND sensor_type = 'power_kw' + AND recorded_at > NOW() - INTERVAL '5 minutes' + ORDER BY sensor_id, recorded_at DESC + ) latest + """), {"site_id": site_id}) + + temp = await session.execute(text(""" + SELECT COALESCE(AVG(value), 0) AS avg_temp + FROM ( + SELECT DISTINCT ON (sensor_id) sensor_id, value + FROM readings + WHERE site_id = :site_id AND sensor_type = 'temperature' + AND recorded_at > NOW() - INTERVAL '5 minutes' + ORDER BY sensor_id, recorded_at DESC + ) latest + """), {"site_id": site_id}) + + alarms = await session.execute(text(""" + SELECT COUNT(*) AS alarm_count + FROM alarms + WHERE site_id = :site_id AND state = 'active' + """), {"site_id": site_id}) + + total_kw = float(power.mappings().one()["total_power_kw"]) + avg_temp = float(temp.mappings().one()["avg_temp"]) + alarm_cnt = int(alarms.mappings().one()["alarm_count"]) + pue = round(total_kw / (total_kw * 0.87), 2) if total_kw > 0 else 0.0 + + return { + "total_power_kw": round(total_kw, 1), + "pue": pue, + "avg_temperature": round(avg_temp, 1), + "active_alarms": alarm_cnt, + } + + +@router.get("/site-power-history") +async def get_site_power_history( + site_id: str = Query(...), + hours: int = Query(1, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """Total power (kW) bucketed by 5 minutes — for the power trend chart.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + try: + result = await session.execute(text(""" + SELECT bucket, ROUND(SUM(avg_per_sensor)::numeric, 1) AS total_kw + FROM ( + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_id, + AVG(value) AS avg_per_sensor + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND recorded_at > :from_time + GROUP BY bucket, sensor_id + ) per_sensor + GROUP BY bucket + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + except Exception: + result = await session.execute(text(""" + SELECT bucket, ROUND(SUM(avg_per_sensor)::numeric, 1) AS total_kw + FROM ( + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_id, + AVG(value) AS avg_per_sensor + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND recorded_at > :from_time + GROUP BY bucket, sensor_id + ) per_sensor + GROUP BY bucket + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + return [dict(r) for r in result.mappings().all()] + + +@router.get("/room-temp-history") +async def get_room_temp_history( + site_id: str = Query(...), + hours: int = Query(1, ge=1, le=24), + session: AsyncSession = Depends(get_session), +): + """Average temperature per room bucketed by 5 minutes — for the temp trend chart.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + try: + result = await session.execute(text(""" + SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 2) AS avg_temp + FROM ( + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + sensor_id, room_id, + AVG(value) AS avg_per_rack + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'temperature' + AND room_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, sensor_id, room_id + ) per_rack + GROUP BY bucket, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + except Exception: + result = await session.execute(text(""" + SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 2) AS avg_temp + FROM ( + SELECT + date_trunc('minute', recorded_at) AS bucket, + sensor_id, room_id, + AVG(value) AS avg_per_rack + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'temperature' + AND room_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, sensor_id, room_id + ) per_rack + GROUP BY bucket, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + return [dict(r) for r in result.mappings().all()] + + +@router.get("/room-status") +async def get_room_status( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Current per-room summary: avg temp, total power, rack count, alarm count.""" + temp = await session.execute(text(""" + SELECT room_id, ROUND(AVG(value)::numeric, 1) AS avg_temp + FROM ( + SELECT DISTINCT ON (sensor_id) sensor_id, room_id, value + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'temperature' + AND room_id IS NOT NULL + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + ) latest + GROUP BY room_id + """), {"site_id": site_id}) + + power = await session.execute(text(""" + SELECT room_id, ROUND(SUM(value)::numeric, 1) AS total_kw + FROM ( + SELECT DISTINCT ON (sensor_id) sensor_id, room_id, value + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND room_id IS NOT NULL + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + ) latest + GROUP BY room_id + """), {"site_id": site_id}) + + alarm_counts = await session.execute(text(""" + SELECT room_id, COUNT(*) AS alarm_count, MAX(severity) AS worst_severity + FROM alarms + WHERE site_id = :site_id AND state = 'active' AND room_id IS NOT NULL + GROUP BY room_id + """), {"site_id": site_id}) + + temp_map = {r["room_id"]: float(r["avg_temp"]) for r in temp.mappings().all()} + power_map = {r["room_id"]: float(r["total_kw"]) for r in power.mappings().all()} + alarm_map = {r["room_id"]: (int(r["alarm_count"]), r["worst_severity"]) + for r in alarm_counts.mappings().all()} + + rooms = sorted(set(list(temp_map.keys()) + list(power_map.keys()))) + result = [] + for room_id in rooms: + avg_temp = temp_map.get(room_id, 0.0) + alarm_cnt, ws = alarm_map.get(room_id, (0, None)) + status = "ok" + if ws == "critical" or avg_temp >= 30: + status = "critical" + elif ws == "warning" or avg_temp >= 26: + status = "warning" + result.append({ + "room_id": room_id, + "avg_temp": avg_temp, + "total_kw": power_map.get(room_id, 0.0), + "alarm_count": alarm_cnt, + "status": status, + }) + return result diff --git a/backend/api/routes/reports.py b/backend/api/routes/reports.py new file mode 100644 index 0000000..ea9f209 --- /dev/null +++ b/backend/api/routes/reports.py @@ -0,0 +1,356 @@ +import csv +import io +from datetime import datetime, timezone, timedelta + +from fastapi import APIRouter, Depends, Query +from fastapi.responses import StreamingResponse +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +from core.database import get_session + +router = APIRouter() + +TARIFF_SGD_KWH = 0.298 + +ROOMS = { + "sg-01": [ + {"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"}, + {"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"}, + ] +} +UPS_IDS = {"sg-01": ["ups-01", "ups-02"]} + + +@router.get("/energy") +async def energy_report( + site_id: str = Query(...), + days: int = Query(30, ge=1, le=90), + session: AsyncSession = Depends(get_session), +): + """kWh consumption, cost, and 30-day PUE trend.""" + from_time = datetime.now(timezone.utc) - timedelta(days=days) + + # Total kWh over period (5-min buckets × kW / 12 = kWh per bucket) + try: + kwh_result = await session.execute(text(""" + SELECT ROUND((SUM(avg_kw) / 12.0)::numeric, 1) AS kwh_total + FROM ( + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + AVG(value) AS avg_kw + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND recorded_at > :from_time + GROUP BY bucket + ) bucketed + """), {"site_id": site_id, "from_time": from_time}) + except Exception: + kwh_result = await session.execute(text(""" + SELECT ROUND((SUM(avg_kw) / 12.0)::numeric, 1) AS kwh_total + FROM ( + SELECT + date_trunc('minute', recorded_at) AS bucket, + AVG(value) AS avg_kw + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND recorded_at > :from_time + GROUP BY bucket + ) bucketed + """), {"site_id": site_id, "from_time": from_time}) + + kwh_row = kwh_result.mappings().first() + kwh_total = float(kwh_row["kwh_total"] or 0) if kwh_row else 0.0 + cost_sgd = round(kwh_total * TARIFF_SGD_KWH, 2) + + # PUE daily average (IT load / total facility load — approximated as IT load / 0.85 overhead) + # Since we only have IT load, estimate PUE = total_facility / it_load ≈ 1.4–1.6 + # For a proper PUE we'd need facility meter — use a day-by-day IT load trend instead + try: + pue_result = await session.execute(text(""" + SELECT + time_bucket('1 day', recorded_at) AS day, + ROUND(AVG(value)::numeric, 2) AS avg_it_kw + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND recorded_at > :from_time + GROUP BY day + ORDER BY day ASC + """), {"site_id": site_id, "from_time": from_time}) + except Exception: + pue_result = await session.execute(text(""" + SELECT + date_trunc('day', recorded_at) AS day, + ROUND(AVG(value)::numeric, 2) AS avg_it_kw + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND recorded_at > :from_time + GROUP BY day + ORDER BY day ASC + """), {"site_id": site_id, "from_time": from_time}) + + # Estimated PUE: assume ~40% overhead (cooling + lighting + UPS losses) + OVERHEAD_FACTOR = 1.40 + pue_trend = [ + { + "day": str(r["day"]), + "avg_it_kw": float(r["avg_it_kw"]), + "pue_est": round(OVERHEAD_FACTOR, 2), + } + for r in pue_result.mappings().all() + ] + + return { + "site_id": site_id, + "period_days": days, + "from_date": from_time.date().isoformat(), + "to_date": datetime.now(timezone.utc).date().isoformat(), + "kwh_total": kwh_total, + "cost_sgd": cost_sgd, + "tariff_sgd_kwh": TARIFF_SGD_KWH, + "currency": "SGD", + "pue_estimated": OVERHEAD_FACTOR, + "pue_trend": pue_trend, + } + + +@router.get("/summary") +async def site_summary( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Site-level summary: KPIs, alarm stats, CRAC uptime%, UPS uptime%.""" + # KPIs + kpi_res = await session.execute(text(""" + SELECT + ROUND(SUM(CASE WHEN sensor_type = 'power_kw' THEN value END)::numeric, 2) AS total_power_kw, + ROUND(AVG(CASE WHEN sensor_type = 'temperature' THEN value END)::numeric, 1) AS avg_temperature + FROM ( + SELECT DISTINCT ON (sensor_id) sensor_id, sensor_type, value + FROM readings + WHERE site_id = :site_id + AND sensor_type IN ('power_kw', 'temperature') + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_id, recorded_at DESC + ) latest + """), {"site_id": site_id}) + kpi_row = kpi_res.mappings().first() or {} + + # Alarm stats (all-time by state/severity) + alarm_res = await session.execute(text(""" + SELECT state, severity, COUNT(*) AS cnt + FROM alarms + WHERE site_id = :site_id + GROUP BY state, severity + """), {"site_id": site_id}) + alarm_stats: dict = {"active": 0, "acknowledged": 0, "resolved": 0, "critical": 0, "warning": 0} + for row in alarm_res.mappings().all(): + if row["state"] in alarm_stats: + alarm_stats[row["state"]] += int(row["cnt"]) + if row["severity"] in ("critical", "warning"): + alarm_stats[row["severity"]] += int(row["cnt"]) + + # CRAC uptime % over last 24h + from_24h = datetime.now(timezone.utc) - timedelta(hours=24) + total_buckets = 24 * 12 # one 5-min bucket per 5 minutes + cracs = [] + for room in ROOMS.get(site_id, []): + crac_id = room["crac_id"] + try: + r = await session.execute(text(""" + SELECT COUNT(DISTINCT time_bucket('5 minutes', recorded_at)) AS buckets + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type = 'cooling_supply' + AND recorded_at > :from_time + """), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_24h}) + except Exception: + r = await session.execute(text(""" + SELECT COUNT(DISTINCT date_trunc('minute', recorded_at)) AS buckets + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type = 'cooling_supply' + AND recorded_at > :from_time + """), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_24h}) + row = r.mappings().first() + buckets = int(row["buckets"]) if row and row["buckets"] else 0 + cracs.append({ + "crac_id": crac_id, + "room_id": room["room_id"], + "uptime_pct": round(min(100.0, buckets / total_buckets * 100), 1), + }) + + # UPS uptime % over last 24h + ups_units = [] + for ups_id in UPS_IDS.get(site_id, []): + try: + r = await session.execute(text(""" + SELECT COUNT(DISTINCT time_bucket('5 minutes', recorded_at)) AS buckets + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type = 'ups_charge' + AND recorded_at > :from_time + """), {"site_id": site_id, "pattern": f"{site_id}/ups/{ups_id}/%", "from_time": from_24h}) + except Exception: + r = await session.execute(text(""" + SELECT COUNT(DISTINCT date_trunc('minute', recorded_at)) AS buckets + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND sensor_type = 'ups_charge' + AND recorded_at > :from_time + """), {"site_id": site_id, "pattern": f"{site_id}/ups/{ups_id}/%", "from_time": from_24h}) + row = r.mappings().first() + buckets = int(row["buckets"]) if row and row["buckets"] else 0 + ups_units.append({ + "ups_id": ups_id, + "uptime_pct": round(min(100.0, buckets / total_buckets * 100), 1), + }) + + return { + "site_id": site_id, + "generated_at": datetime.now(timezone.utc).isoformat(), + "kpis": { + "total_power_kw": float(kpi_row.get("total_power_kw") or 0), + "avg_temperature": float(kpi_row.get("avg_temperature") or 0), + }, + "alarm_stats": alarm_stats, + "crac_uptime": cracs, + "ups_uptime": ups_units, + } + + +@router.get("/export/power") +async def export_power( + site_id: str = Query(...), + hours: int = Query(24, ge=1, le=168), + session: AsyncSession = Depends(get_session), +): + """Download power history as CSV.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + try: + result = await session.execute(text(""" + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + room_id, + ROUND(SUM(value)::numeric, 2) AS total_kw + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND room_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + except Exception: + result = await session.execute(text(""" + SELECT + date_trunc('minute', recorded_at) AS bucket, + room_id, + ROUND(SUM(value)::numeric, 2) AS total_kw + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'power_kw' + AND room_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + + output = io.StringIO() + writer = csv.writer(output) + writer.writerow(["timestamp", "room_id", "total_kw"]) + for row in result.mappings().all(): + writer.writerow([row["bucket"], row["room_id"], row["total_kw"]]) + output.seek(0) + return StreamingResponse( + iter([output.getvalue()]), + media_type="text/csv", + headers={"Content-Disposition": f"attachment; filename=power_{site_id}_{hours}h.csv"}, + ) + + +@router.get("/export/temperature") +async def export_temperature( + site_id: str = Query(...), + hours: int = Query(24, ge=1, le=168), + session: AsyncSession = Depends(get_session), +): + """Download temperature history per rack as CSV.""" + from_time = datetime.now(timezone.utc) - timedelta(hours=hours) + try: + result = await session.execute(text(""" + SELECT + time_bucket('5 minutes', recorded_at) AS bucket, + rack_id, room_id, + ROUND(AVG(value)::numeric, 1) AS avg_temp + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'temperature' + AND rack_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, rack_id, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + except Exception: + result = await session.execute(text(""" + SELECT + date_trunc('minute', recorded_at) AS bucket, + rack_id, room_id, + ROUND(AVG(value)::numeric, 1) AS avg_temp + FROM readings + WHERE site_id = :site_id + AND sensor_type = 'temperature' + AND rack_id IS NOT NULL + AND recorded_at > :from_time + GROUP BY bucket, rack_id, room_id + ORDER BY bucket ASC + """), {"site_id": site_id, "from_time": from_time}) + + output = io.StringIO() + writer = csv.writer(output) + writer.writerow(["timestamp", "room_id", "rack_id", "avg_temp_c"]) + for row in result.mappings().all(): + writer.writerow([row["bucket"], row["room_id"], row["rack_id"], row["avg_temp"]]) + output.seek(0) + return StreamingResponse( + iter([output.getvalue()]), + media_type="text/csv", + headers={"Content-Disposition": f"attachment; filename=temperature_{site_id}_{hours}h.csv"}, + ) + + +@router.get("/export/alarms") +async def export_alarms( + site_id: str = Query(...), + session: AsyncSession = Depends(get_session), +): + """Download full alarm log as CSV.""" + result = await session.execute(text(""" + SELECT id, severity, message, state, room_id, rack_id, triggered_at + FROM alarms + WHERE site_id = :site_id + ORDER BY triggered_at DESC + """), {"site_id": site_id}) + + output = io.StringIO() + writer = csv.writer(output) + writer.writerow(["id", "severity", "message", "state", "room_id", "rack_id", "triggered_at"]) + for row in result.mappings().all(): + writer.writerow([ + row["id"], row["severity"], row["message"], row["state"], + row["room_id"], row["rack_id"], row["triggered_at"], + ]) + output.seek(0) + return StreamingResponse( + iter([output.getvalue()]), + media_type="text/csv", + headers={"Content-Disposition": f"attachment; filename=alarms_{site_id}.csv"}, + ) diff --git a/backend/api/routes/scenarios.py b/backend/api/routes/scenarios.py new file mode 100644 index 0000000..a7cf6a5 --- /dev/null +++ b/backend/api/routes/scenarios.py @@ -0,0 +1,248 @@ +""" +Scenario control API — proxies trigger/reset commands to the MQTT broker +so the frontend can fire simulator scenarios over HTTP. +""" +import json +import asyncio +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel +from typing import Optional +import aiomqtt + +from core.config import settings + +router = APIRouter() + +# ── Scenario catalogue ─────────────────────────────────────────────────────── +# Mirrors the definitions in simulators/scenarios/runner.py and compound.py. +# Kept here so the frontend has a single typed source of truth. + +SCENARIOS = [ + # ── Compound (multi-bot, time-sequenced) ───────────────────────────────── + { + "name": "HOT_NIGHT", + "label": "Hot Night", + "description": "CRAC-01 compressor trips silently. The backup unit overworks itself. Rack temps climb, power draw rises, VESDA alert fires.", + "duration": "~10 min", + "compound": True, + "default_target": None, + "targets": [], + }, + { + "name": "GENERATOR_TEST_GONE_WRONG", + "label": "Generator Test Gone Wrong", + "description": "Planned ATS transfer to generator. Generator was low on fuel and faults after 15 min. UPS must carry full site load alone.", + "duration": "~16 min", + "compound": True, + "default_target": None, + "targets": [], + }, + { + "name": "SLOW_BURN", + "label": "Slow Burn", + "description": "A dirty filter nobody noticed. Airflow degrades for 30 min. Temps creep, humidity climbs, VESDA alerts, then CRAC trips on thermal protection.", + "duration": "~30 min", + "compound": True, + "default_target": None, + "targets": [], + }, + { + "name": "LAST_RESORT", + "label": "Last Resort", + "description": "Utility fails. Generator starts then faults after 2 minutes. UPS absorbs the full load, overheats, and VESDA escalates to fire.", + "duration": "~9 min", + "compound": True, + "default_target": None, + "targets": [], + }, + # ── Cooling ────────────────────────────────────────────────────────────── + { + "name": "COOLING_FAILURE", + "label": "Cooling Failure", + "description": "CRAC unit goes offline — rack temperatures rise rapidly.", + "duration": "ongoing", + "compound": False, + "default_target": "crac-01", + "targets": ["crac-01", "crac-02"], + }, + { + "name": "FAN_DEGRADATION", + "label": "Fan Degradation", + "description": "CRAC fan bearing wear — fan speed drops, ΔT rises over ~25 min.", + "duration": "~25 min", + "compound": False, + "default_target": "crac-01", + "targets": ["crac-01", "crac-02"], + }, + { + "name": "COMPRESSOR_FAULT", + "label": "Compressor Fault", + "description": "Compressor trips — unit drops to fan-only, cooling capacity collapses to ~8%.", + "duration": "ongoing", + "compound": False, + "default_target": "crac-01", + "targets": ["crac-01", "crac-02"], + }, + { + "name": "DIRTY_FILTER", + "label": "Dirty Filter", + "description": "Filter fouling — ΔP rises, airflow and capacity degrade over time.", + "duration": "ongoing", + "compound": False, + "default_target": "crac-01", + "targets": ["crac-01", "crac-02"], + }, + { + "name": "HIGH_TEMPERATURE", + "label": "High Temperature", + "description": "Gradual ambient heat rise — slower than a full cooling failure.", + "duration": "ongoing", + "compound": False, + "default_target": "hall-a", + "targets": ["hall-a", "hall-b"], + }, + { + "name": "HUMIDITY_SPIKE", + "label": "Humidity Spike", + "description": "Humidity climbs — condensation / humidifier fault risk.", + "duration": "ongoing", + "compound": False, + "default_target": "hall-a", + "targets": ["hall-a", "hall-b"], + }, + { + "name": "CHILLER_FAULT", + "label": "Chiller Fault", + "description": "Chiller plant trips — chilled water supply lost.", + "duration": "ongoing", + "compound": False, + "default_target": "chiller-01", + "targets": ["chiller-01"], + }, + # ── Power ──────────────────────────────────────────────────────────────── + { + "name": "UPS_MAINS_FAILURE", + "label": "UPS Mains Failure", + "description": "Mains power lost — UPS switches to battery and drains.", + "duration": "~60 min", + "compound": False, + "default_target": "ups-01", + "targets": ["ups-01", "ups-02"], + }, + { + "name": "POWER_SPIKE", + "label": "Power Spike", + "description": "PDU load surges across a room by up to 50%.", + "duration": "ongoing", + "compound": False, + "default_target": "hall-a", + "targets": ["hall-a", "hall-b"], + }, + { + "name": "RACK_OVERLOAD", + "label": "Rack Overload", + "description": "Single rack redlines at ~85–95% of rated 10 kW capacity.", + "duration": "ongoing", + "compound": False, + "default_target": "SG1A01.10", + "targets": ["SG1A01.10", "SG1B01.10"], + }, + { + "name": "PHASE_IMBALANCE", + "label": "Phase Imbalance", + "description": "PDU phase A overloads, phase C drops — imbalance flag triggers.", + "duration": "ongoing", + "compound": False, + "default_target": "SG1A01.10/pdu", + "targets": ["SG1A01.10/pdu", "SG1B01.10/pdu"], + }, + { + "name": "ATS_TRANSFER", + "label": "ATS Transfer", + "description": "Utility feed lost — ATS transfers load to generator.", + "duration": "ongoing", + "compound": False, + "default_target": "ats-01", + "targets": ["ats-01"], + }, + { + "name": "GENERATOR_FAILURE", + "label": "Generator Running", + "description": "Generator starts and runs under load following a utility failure.", + "duration": "ongoing", + "compound": False, + "default_target": "gen-01", + "targets": ["gen-01"], + }, + { + "name": "GENERATOR_LOW_FUEL", + "label": "Generator Low Fuel", + "description": "Generator fuel level drains to critical low.", + "duration": "ongoing", + "compound": False, + "default_target": "gen-01", + "targets": ["gen-01"], + }, + { + "name": "GENERATOR_FAULT", + "label": "Generator Fault", + "description": "Generator fails — fault state, no output.", + "duration": "ongoing", + "compound": False, + "default_target": "gen-01", + "targets": ["gen-01"], + }, + # ── Environmental / Life Safety ────────────────────────────────────────── + { + "name": "LEAK_DETECTED", + "label": "Leak Detected", + "description": "Water leak sensor triggers a critical alarm.", + "duration": "ongoing", + "compound": False, + "default_target": "leak-01", + "targets": ["leak-01", "leak-02", "leak-03"], + }, + { + "name": "VESDA_ALERT", + "label": "VESDA Alert", + "description": "Smoke obscuration rises into the Alert/Action band.", + "duration": "ongoing", + "compound": False, + "default_target": "vesda-hall-a", + "targets": ["vesda-hall-a", "vesda-hall-b"], + }, + { + "name": "VESDA_FIRE", + "label": "VESDA Fire", + "description": "Smoke obscuration escalates to Fire level.", + "duration": "ongoing", + "compound": False, + "default_target": "vesda-hall-a", + "targets": ["vesda-hall-a", "vesda-hall-b"], + }, +] + + +# ── Request / Response models ──────────────────────────────────────────────── + +class TriggerRequest(BaseModel): + scenario: str + target: Optional[str] = None + + +# ── Endpoints ──────────────────────────────────────────────────────────────── + +@router.get("") +async def list_scenarios(): + return SCENARIOS + + +@router.post("/trigger") +async def trigger_scenario(body: TriggerRequest): + payload = json.dumps({"scenario": body.scenario, "target": body.target}) + try: + async with aiomqtt.Client(settings.MQTT_HOST, port=settings.MQTT_PORT) as client: + await client.publish("bms/control/scenario", payload, qos=1) + except Exception as e: + raise HTTPException(status_code=503, detail=f"MQTT unavailable: {e}") + return {"ok": True, "scenario": body.scenario, "target": body.target} diff --git a/backend/api/routes/settings.py b/backend/api/routes/settings.py new file mode 100644 index 0000000..f2c38b3 --- /dev/null +++ b/backend/api/routes/settings.py @@ -0,0 +1,465 @@ +import json +import logging +from typing import Any +from fastapi import APIRouter, Depends, HTTPException, Query +from pydantic import BaseModel +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +from core.database import get_session +from services.alarm_engine import invalidate_threshold_cache +from services.seed import THRESHOLD_SEED_DATA, DEFAULT_SETTINGS, SITE_ID as DEFAULT_SITE + +router = APIRouter() +logger = logging.getLogger(__name__) + + +# ── Pydantic models ──────────────────────────────────────────────────────────── + +class SensorCreate(BaseModel): + device_id: str + name: str + device_type: str + room_id: str | None = None + rack_id: str | None = None + protocol: str = "mqtt" + protocol_config: dict[str, Any] = {} + enabled: bool = True + +class SensorUpdate(BaseModel): + name: str | None = None + device_type: str | None = None + room_id: str | None = None + rack_id: str | None = None + protocol: str | None = None + protocol_config: dict[str, Any] | None = None + enabled: bool | None = None + +class ThresholdUpdate(BaseModel): + threshold_value: float | None = None + severity: str | None = None + enabled: bool | None = None + +class ThresholdCreate(BaseModel): + sensor_type: str + threshold_value: float + direction: str + severity: str + message_template: str + +class SettingsUpdate(BaseModel): + value: dict[str, Any] + + +# ── Sensors ──────────────────────────────────────────────────────────────────── + +@router.get("/sensors") +async def list_sensors( + site_id: str = Query(DEFAULT_SITE), + device_type: str | None = Query(None), + room_id: str | None = Query(None), + protocol: str | None = Query(None), + session: AsyncSession = Depends(get_session), +): + """List all sensor devices, with optional filters.""" + conditions = ["site_id = :site_id"] + params: dict = {"site_id": site_id} + + if device_type: + conditions.append("device_type = :device_type") + params["device_type"] = device_type + if room_id: + conditions.append("room_id = :room_id") + params["room_id"] = room_id + if protocol: + conditions.append("protocol = :protocol") + params["protocol"] = protocol + + where = " AND ".join(conditions) + result = await session.execute(text(f""" + SELECT id, site_id, device_id, name, device_type, room_id, rack_id, + protocol, protocol_config, enabled, created_at, updated_at + FROM sensors + WHERE {where} + ORDER BY device_type, room_id NULLS LAST, device_id + """), params) + return [dict(r) for r in result.mappings().all()] + + +@router.post("/sensors", status_code=201) +async def create_sensor( + body: SensorCreate, + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + """Register a new sensor device.""" + result = await session.execute(text(""" + INSERT INTO sensors + (site_id, device_id, name, device_type, room_id, rack_id, + protocol, protocol_config, enabled) + VALUES + (:site_id, :device_id, :name, :device_type, :room_id, :rack_id, + :protocol, :protocol_config, :enabled) + RETURNING id, site_id, device_id, name, device_type, room_id, rack_id, + protocol, protocol_config, enabled, created_at, updated_at + """), { + "site_id": site_id, + "device_id": body.device_id, + "name": body.name, + "device_type": body.device_type, + "room_id": body.room_id, + "rack_id": body.rack_id, + "protocol": body.protocol, + "protocol_config": json.dumps(body.protocol_config), + "enabled": body.enabled, + }) + await session.commit() + return dict(result.mappings().first()) + + +@router.get("/sensors/{sensor_id}") +async def get_sensor( + sensor_id: int, + session: AsyncSession = Depends(get_session), +): + """Get a single sensor device plus its most recent readings.""" + result = await session.execute(text(""" + SELECT id, site_id, device_id, name, device_type, room_id, rack_id, + protocol, protocol_config, enabled, created_at, updated_at + FROM sensors WHERE id = :id + """), {"id": sensor_id}) + row = result.mappings().first() + if not row: + raise HTTPException(status_code=404, detail="Sensor not found") + + sensor = dict(row) + + # Fetch latest readings for this device + readings_result = await session.execute(text(""" + SELECT DISTINCT ON (sensor_type) + sensor_type, value, unit, recorded_at + FROM readings + WHERE site_id = :site_id + AND sensor_id LIKE :pattern + AND recorded_at > NOW() - INTERVAL '10 minutes' + ORDER BY sensor_type, recorded_at DESC + """), { + "site_id": sensor["site_id"], + "pattern": f"{sensor['site_id']}%{sensor['device_id']}%", + }) + sensor["recent_readings"] = [dict(r) for r in readings_result.mappings().all()] + + return sensor + + +@router.put("/sensors/{sensor_id}") +async def update_sensor( + sensor_id: int, + body: SensorUpdate, + session: AsyncSession = Depends(get_session), +): + """Update a sensor device's config or toggle enabled.""" + updates = [] + params: dict = {"id": sensor_id} + + if body.name is not None: + updates.append("name = :name") + params["name"] = body.name + if body.device_type is not None: + updates.append("device_type = :device_type") + params["device_type"] = body.device_type + if body.room_id is not None: + updates.append("room_id = :room_id") + params["room_id"] = body.room_id + if body.rack_id is not None: + updates.append("rack_id = :rack_id") + params["rack_id"] = body.rack_id + if body.protocol is not None: + updates.append("protocol = :protocol") + params["protocol"] = body.protocol + if body.protocol_config is not None: + updates.append("protocol_config = :protocol_config") + params["protocol_config"] = json.dumps(body.protocol_config) + if body.enabled is not None: + updates.append("enabled = :enabled") + params["enabled"] = body.enabled + + if not updates: + raise HTTPException(status_code=400, detail="No fields to update") + + updates.append("updated_at = NOW()") + set_clause = ", ".join(updates) + + result = await session.execute(text(f""" + UPDATE sensors SET {set_clause} + WHERE id = :id + RETURNING id, site_id, device_id, name, device_type, room_id, rack_id, + protocol, protocol_config, enabled, created_at, updated_at + """), params) + row = result.mappings().first() + if not row: + raise HTTPException(status_code=404, detail="Sensor not found") + await session.commit() + return dict(row) + + +@router.delete("/sensors/{sensor_id}", status_code=204) +async def delete_sensor( + sensor_id: int, + session: AsyncSession = Depends(get_session), +): + """Remove a sensor device from the registry.""" + result = await session.execute( + text("DELETE FROM sensors WHERE id = :id RETURNING id"), + {"id": sensor_id}, + ) + if not result.fetchone(): + raise HTTPException(status_code=404, detail="Sensor not found") + await session.commit() + + +# ── Alarm thresholds ─────────────────────────────────────────────────────────── + +@router.get("/thresholds") +async def list_thresholds( + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + """Return all user-editable threshold rules (locked=false).""" + result = await session.execute(text(""" + SELECT id, site_id, sensor_type, threshold_value, direction, + severity, message_template, enabled, locked, created_at, updated_at + FROM alarm_thresholds + WHERE site_id = :site_id AND locked = false + ORDER BY id + """), {"site_id": site_id}) + return [dict(r) for r in result.mappings().all()] + + +@router.put("/thresholds/{threshold_id}") +async def update_threshold( + threshold_id: int, + body: ThresholdUpdate, + session: AsyncSession = Depends(get_session), +): + """Update a threshold value, severity, or enabled state.""" + # Refuse to update locked rules + locked_result = await session.execute( + text("SELECT locked, site_id FROM alarm_thresholds WHERE id = :id"), + {"id": threshold_id}, + ) + row = locked_result.mappings().first() + if not row: + raise HTTPException(status_code=404, detail="Threshold not found") + if row["locked"]: + raise HTTPException(status_code=403, detail="Cannot modify locked threshold") + + updates = [] + params: dict = {"id": threshold_id} + + if body.threshold_value is not None: + updates.append("threshold_value = :threshold_value") + params["threshold_value"] = body.threshold_value + if body.severity is not None: + if body.severity not in ("warning", "critical"): + raise HTTPException(status_code=400, detail="severity must be warning or critical") + updates.append("severity = :severity") + params["severity"] = body.severity + if body.enabled is not None: + updates.append("enabled = :enabled") + params["enabled"] = body.enabled + + if not updates: + raise HTTPException(status_code=400, detail="No fields to update") + + updates.append("updated_at = NOW()") + set_clause = ", ".join(updates) + + result = await session.execute(text(f""" + UPDATE alarm_thresholds SET {set_clause} + WHERE id = :id + RETURNING id, site_id, sensor_type, threshold_value, direction, + severity, message_template, enabled, locked, updated_at + """), params) + await session.commit() + invalidate_threshold_cache(row["site_id"]) + return dict(result.mappings().first()) + + +@router.post("/thresholds", status_code=201) +async def create_threshold( + body: ThresholdCreate, + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + """Add a custom threshold rule.""" + if body.direction not in ("above", "below"): + raise HTTPException(status_code=400, detail="direction must be above or below") + if body.severity not in ("warning", "critical"): + raise HTTPException(status_code=400, detail="severity must be warning or critical") + + result = await session.execute(text(""" + INSERT INTO alarm_thresholds + (site_id, sensor_type, threshold_value, direction, severity, message_template, enabled, locked) + VALUES + (:site_id, :sensor_type, :threshold_value, :direction, :severity, :message_template, true, false) + RETURNING id, site_id, sensor_type, threshold_value, direction, + severity, message_template, enabled, locked, created_at, updated_at + """), { + "site_id": site_id, + "sensor_type": body.sensor_type, + "threshold_value": body.threshold_value, + "direction": body.direction, + "severity": body.severity, + "message_template": body.message_template, + }) + await session.commit() + invalidate_threshold_cache(site_id) + return dict(result.mappings().first()) + + +@router.delete("/thresholds/{threshold_id}", status_code=204) +async def delete_threshold( + threshold_id: int, + session: AsyncSession = Depends(get_session), +): + """Delete a custom (non-locked) threshold rule.""" + locked_result = await session.execute( + text("SELECT locked, site_id FROM alarm_thresholds WHERE id = :id"), + {"id": threshold_id}, + ) + row = locked_result.mappings().first() + if not row: + raise HTTPException(status_code=404, detail="Threshold not found") + if row["locked"]: + raise HTTPException(status_code=403, detail="Cannot delete locked threshold") + + await session.execute( + text("DELETE FROM alarm_thresholds WHERE id = :id"), + {"id": threshold_id}, + ) + await session.commit() + invalidate_threshold_cache(row["site_id"]) + + +@router.post("/thresholds/reset") +async def reset_thresholds( + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + """Delete all thresholds for a site and re-seed from defaults.""" + await session.execute( + text("DELETE FROM alarm_thresholds WHERE site_id = :site_id"), + {"site_id": site_id}, + ) + for st, tv, direction, severity, msg, locked in THRESHOLD_SEED_DATA: + await session.execute(text(""" + INSERT INTO alarm_thresholds + (site_id, sensor_type, threshold_value, direction, severity, message_template, enabled, locked) + VALUES + (:site_id, :sensor_type, :threshold_value, :direction, :severity, :message_template, true, :locked) + """), { + "site_id": site_id, "sensor_type": st, "threshold_value": tv, + "direction": direction, "severity": severity, + "message_template": msg, "locked": locked, + }) + await session.commit() + invalidate_threshold_cache(site_id) + logger.info(f"Alarm thresholds reset to defaults for {site_id}") + return {"ok": True, "count": len(THRESHOLD_SEED_DATA)} + + +# ── Generic settings (site / notifications / integrations / page_prefs) ──────── + +async def _get_settings(session: AsyncSession, site_id: str, category: str) -> dict: + result = await session.execute(text(""" + SELECT value FROM site_settings + WHERE site_id = :site_id AND category = :category AND key = 'config' + """), {"site_id": site_id, "category": category}) + row = result.mappings().first() + if row: + return row["value"] if isinstance(row["value"], dict) else json.loads(row["value"]) + return DEFAULT_SETTINGS.get(category, {}) + + +async def _put_settings( + session: AsyncSession, site_id: str, category: str, updates: dict +) -> dict: + current = await _get_settings(session, site_id, category) + merged = {**current, **updates} + await session.execute(text(""" + INSERT INTO site_settings (site_id, category, key, value, updated_at) + VALUES (:site_id, :category, 'config', :value, NOW()) + ON CONFLICT (site_id, category, key) + DO UPDATE SET value = :value, updated_at = NOW() + """), {"site_id": site_id, "category": category, "value": json.dumps(merged)}) + await session.commit() + return merged + + +@router.get("/site") +async def get_site_settings( + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + return await _get_settings(session, site_id, "site") + + +@router.put("/site") +async def update_site_settings( + body: SettingsUpdate, + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + return await _put_settings(session, site_id, "site", body.value) + + +@router.get("/notifications") +async def get_notifications( + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + return await _get_settings(session, site_id, "notifications") + + +@router.put("/notifications") +async def update_notifications( + body: SettingsUpdate, + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + return await _put_settings(session, site_id, "notifications", body.value) + + +@router.get("/integrations") +async def get_integrations( + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + return await _get_settings(session, site_id, "integrations") + + +@router.put("/integrations") +async def update_integrations( + body: SettingsUpdate, + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + return await _put_settings(session, site_id, "integrations", body.value) + + +@router.get("/page-prefs") +async def get_page_prefs( + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + return await _get_settings(session, site_id, "page_prefs") + + +@router.put("/page-prefs") +async def update_page_prefs( + body: SettingsUpdate, + site_id: str = Query(DEFAULT_SITE), + session: AsyncSession = Depends(get_session), +): + return await _put_settings(session, site_id, "page_prefs", body.value) diff --git a/backend/api/routes/sites.py b/backend/api/routes/sites.py new file mode 100644 index 0000000..ebb36df --- /dev/null +++ b/backend/api/routes/sites.py @@ -0,0 +1,36 @@ +from fastapi import APIRouter +from pydantic import BaseModel + +router = APIRouter() + + +class Site(BaseModel): + id: str + name: str + location: str + status: str + rack_count: int + total_power_kw: float + pue: float + + +# Static stub data — will be replaced by DB queries in Phase 2 +SITES: list[Site] = [ + Site(id="sg-01", name="Singapore DC01", location="Singapore", status="ok", rack_count=128, total_power_kw=847.0, pue=1.42), + Site(id="sg-02", name="Singapore DC02", location="Singapore", status="warning", rack_count=64, total_power_kw=412.0, pue=1.51), + Site(id="lon-01", name="London DC01", location="London", status="ok", rack_count=96, total_power_kw=631.0, pue=1.38), +] + + +@router.get("", response_model=list[Site]) +async def list_sites(): + return SITES + + +@router.get("/{site_id}", response_model=Site) +async def get_site(site_id: str): + for site in SITES: + if site.id == site_id: + return site + from fastapi import HTTPException + raise HTTPException(status_code=404, detail="Site not found") diff --git a/backend/api/routes/ws.py b/backend/api/routes/ws.py new file mode 100644 index 0000000..e6bfa48 --- /dev/null +++ b/backend/api/routes/ws.py @@ -0,0 +1,16 @@ +from fastapi import APIRouter, WebSocket, WebSocketDisconnect +from services.ws_manager import manager + +router = APIRouter() + + +@router.websocket("/ws") +async def websocket_endpoint(ws: WebSocket): + await manager.connect(ws) + try: + while True: + # We only push from server → client. + # receive_text() keeps the connection alive. + await ws.receive_text() + except WebSocketDisconnect: + manager.disconnect(ws) diff --git a/backend/core/__init__.py b/backend/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/core/config.py b/backend/core/config.py new file mode 100644 index 0000000..70f3408 --- /dev/null +++ b/backend/core/config.py @@ -0,0 +1,27 @@ +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore") + + # App + APP_NAME: str = "DemoBMS API" + DEBUG: bool = False + + # Database + DATABASE_URL: str = "postgresql+asyncpg://dcim:dcim_pass@db:5432/dcim" + + # MQTT broker + MQTT_HOST: str = "localhost" + MQTT_PORT: int = 1883 + + # CORS + CORS_ORIGINS: list[str] = [] + + # Clerk + CLERK_PUBLISHABLE_KEY: str = "" + CLERK_SECRET_KEY: str = "" + CLERK_JWKS_URL: str = "" + + +settings = Settings() diff --git a/backend/core/database.py b/backend/core/database.py new file mode 100644 index 0000000..238cc58 --- /dev/null +++ b/backend/core/database.py @@ -0,0 +1,130 @@ +import logging +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker +from sqlalchemy import text +from core.config import settings + +logger = logging.getLogger(__name__) + +engine = create_async_engine(settings.DATABASE_URL, echo=False, pool_size=10, max_overflow=20) +AsyncSessionLocal = async_sessionmaker(engine, expire_on_commit=False) + + +async def init_db() -> None: + async with engine.begin() as conn: + # Enable TimescaleDB + await conn.execute(text("CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE")) + + # Sensor readings — core time-series table + await conn.execute(text(""" + CREATE TABLE IF NOT EXISTS readings ( + recorded_at TIMESTAMPTZ NOT NULL, + sensor_id VARCHAR(120) NOT NULL, + sensor_type VARCHAR(50) NOT NULL, + site_id VARCHAR(50) NOT NULL, + room_id VARCHAR(50), + rack_id VARCHAR(50), + value DOUBLE PRECISION NOT NULL, + unit VARCHAR(20) + ) + """)) + + # Convert to hypertable — no-op if already one + try: + await conn.execute(text( + "SELECT create_hypertable('readings', by_range('recorded_at'), if_not_exists => TRUE)" + )) + except Exception: + try: + await conn.execute(text( + "SELECT create_hypertable('readings', 'recorded_at', if_not_exists => TRUE)" + )) + except Exception as e: + logger.warning(f"Hypertable setup skipped (table still works): {e}") + + await conn.execute(text(""" + CREATE INDEX IF NOT EXISTS idx_readings_sensor_time + ON readings (sensor_id, recorded_at DESC) + """)) + + # Alarms table + await conn.execute(text(""" + CREATE TABLE IF NOT EXISTS alarms ( + id BIGSERIAL PRIMARY KEY, + sensor_id VARCHAR(120), + site_id VARCHAR(50), + room_id VARCHAR(50), + rack_id VARCHAR(50), + severity VARCHAR(20) NOT NULL, + message TEXT NOT NULL, + state VARCHAR(20) NOT NULL DEFAULT 'active', + triggered_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + acknowledged_at TIMESTAMPTZ, + resolved_at TIMESTAMPTZ + ) + """)) + + # Site config — generic key/value JSON store (used for floor layout etc.) + await conn.execute(text(""" + CREATE TABLE IF NOT EXISTS site_config ( + site_id VARCHAR(50) NOT NULL, + key VARCHAR(100) NOT NULL, + value JSONB NOT NULL, + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (site_id, key) + ) + """)) + + # Sensor device registry + await conn.execute(text(""" + CREATE TABLE IF NOT EXISTS sensors ( + id SERIAL PRIMARY KEY, + site_id VARCHAR(50) NOT NULL, + device_id VARCHAR(100) NOT NULL, + name VARCHAR(200) NOT NULL, + device_type VARCHAR(50) NOT NULL, + room_id VARCHAR(50), + rack_id VARCHAR(50), + protocol VARCHAR(30) NOT NULL DEFAULT 'mqtt', + protocol_config JSONB NOT NULL DEFAULT '{}', + enabled BOOLEAN NOT NULL DEFAULT true, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(site_id, device_id) + ) + """)) + + # Configurable alarm thresholds (replaces hard-coded list at runtime) + await conn.execute(text(""" + CREATE TABLE IF NOT EXISTS alarm_thresholds ( + id SERIAL PRIMARY KEY, + site_id VARCHAR(50) NOT NULL, + sensor_type VARCHAR(50) NOT NULL, + threshold_value FLOAT NOT NULL, + direction VARCHAR(10) NOT NULL, + severity VARCHAR(20) NOT NULL, + message_template TEXT NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT true, + locked BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ) + """)) + + # Site-level settings (profile, notifications, integrations, page prefs) + await conn.execute(text(""" + CREATE TABLE IF NOT EXISTS site_settings ( + site_id VARCHAR(50) NOT NULL, + category VARCHAR(50) NOT NULL, + key VARCHAR(100) NOT NULL, + value JSONB NOT NULL, + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (site_id, category, key) + ) + """)) + + logger.info("Database initialised") + + +async def get_session(): + async with AsyncSessionLocal() as session: + yield session diff --git a/backend/main.py b/backend/main.py new file mode 100644 index 0000000..a85203c --- /dev/null +++ b/backend/main.py @@ -0,0 +1,73 @@ +import asyncio +import logging +from contextlib import asynccontextmanager +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from core.config import settings +from core.database import init_db, AsyncSessionLocal +from services.mqtt_subscriber import run_subscriber +from services.seed import run_all_seeds +from api.routes import ( + health, sites, readings, alarms, ws, assets, + power, env, reports, capacity, + generator, fire, cooling, leak, network, maintenance, floor_layout, + scenarios, settings as settings_router, +) + +logging.basicConfig(level=logging.INFO, format="%(asctime)s %(name)s %(levelname)s %(message)s") +logger = logging.getLogger(__name__) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + logger.info(f"Starting {settings.APP_NAME}") + await init_db() + async with AsyncSessionLocal() as session: + await run_all_seeds(session) + # Start MQTT subscriber as a background task + task = asyncio.create_task(run_subscriber()) + yield + task.cancel() + try: + await task + except asyncio.CancelledError: + pass + logger.info("Shutdown complete") + + +app = FastAPI( + title=settings.APP_NAME, + version="0.2.0", + docs_url="/docs", + redoc_url="/redoc", + lifespan=lifespan, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(health.router, prefix="/api", tags=["health"]) +app.include_router(sites.router, prefix="/api/sites", tags=["sites"]) +app.include_router(readings.router, prefix="/api/readings", tags=["readings"]) +app.include_router(alarms.router, prefix="/api/alarms", tags=["alarms"]) +app.include_router(ws.router, prefix="/api", tags=["websocket"]) +app.include_router(assets.router, prefix="/api/assets", tags=["assets"]) +app.include_router(power.router, prefix="/api/power", tags=["power"]) +app.include_router(env.router, prefix="/api/env", tags=["env"]) +app.include_router(reports.router, prefix="/api/reports", tags=["reports"]) +app.include_router(capacity.router, prefix="/api/capacity", tags=["capacity"]) +app.include_router(generator.router, prefix="/api/generator", tags=["generator"]) +app.include_router(fire.router, prefix="/api/fire", tags=["fire"]) +app.include_router(cooling.router, prefix="/api/cooling", tags=["cooling"]) +app.include_router(leak.router, prefix="/api/leak", tags=["leak"]) +app.include_router(network.router, prefix="/api/network", tags=["network"]) +app.include_router(maintenance.router, prefix="/api/maintenance", tags=["maintenance"]) +app.include_router(floor_layout.router, prefix="/api/floor-layout", tags=["floor-layout"]) +app.include_router(scenarios.router, prefix="/api/scenarios", tags=["scenarios"]) +app.include_router(settings_router.router, prefix="/api/settings", tags=["settings"]) diff --git a/backend/models/__init__.py b/backend/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..ef45c98 --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,11 @@ +fastapi==0.115.6 +uvicorn[standard]==0.32.1 +pydantic==2.10.4 +pydantic-settings==2.7.0 +python-dotenv==1.0.1 +asyncpg==0.30.0 +sqlalchemy[asyncio]==2.0.36 +aiomqtt==2.3.0 +httpx==0.28.1 +python-jose[cryptography]==3.3.0 +python-multipart==0.0.20 diff --git a/backend/services/__init__.py b/backend/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/services/alarm_engine.py b/backend/services/alarm_engine.py new file mode 100644 index 0000000..60af8e0 --- /dev/null +++ b/backend/services/alarm_engine.py @@ -0,0 +1,152 @@ +import logging +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +logger = logging.getLogger(__name__) + +# ── In-memory threshold cache ────────────────────────────────────────────────── +# Loaded from DB on first use; invalidated by settings API after updates. +# Falls back to hard-coded defaults if DB has no rows yet (pre-seed). + +_caches: dict[str, list[dict]] = {} +_dirty_sites: set[str] = {"sg-01"} # start dirty so first request loads from DB + + +def invalidate_threshold_cache(site_id: str = "sg-01") -> None: + """Mark a site's cache as stale. Called by settings API after threshold changes.""" + _dirty_sites.add(site_id) + + +async def _ensure_cache(session: AsyncSession, site_id: str) -> None: + if site_id not in _dirty_sites and site_id in _caches: + return + + result = await session.execute(text(""" + SELECT sensor_type, threshold_value, direction, severity, message_template + FROM alarm_thresholds + WHERE site_id = :site_id AND enabled = true + ORDER BY id + """), {"site_id": site_id}) + rows = result.mappings().all() + + if rows: + _caches[site_id] = [dict(r) for r in rows] + else: + # DB not yet seeded — fall back to hard-coded defaults + _caches[site_id] = _FALLBACK_RULES + + _dirty_sites.discard(site_id) + logger.info(f"Loaded {len(_caches[site_id])} threshold rules for {site_id}") + + +async def check_and_update_alarms( + session: AsyncSession, + sensor_id: str, + sensor_type: str, + site_id: str, + room_id: str | None, + rack_id: str | None, + value: float, +) -> None: + await _ensure_cache(session, site_id) + + for rule in _caches.get(site_id, []): + if rule["sensor_type"] != sensor_type: + continue + + threshold = rule["threshold_value"] + direction = rule["direction"] + severity = rule["severity"] + msg_tpl = rule["message_template"] + + breached = ( + (direction == "above" and value > threshold) or + (direction == "below" and value < threshold) + ) + + if breached: + existing = await session.execute(text(""" + SELECT id FROM alarms + WHERE sensor_id = :sid AND severity = :sev AND state = 'active' + LIMIT 1 + """), {"sid": sensor_id, "sev": severity}) + + if not existing.fetchone(): + message = msg_tpl.format(value=value, sensor_id=sensor_id) + await session.execute(text(""" + INSERT INTO alarms + (sensor_id, site_id, room_id, rack_id, severity, message, state, triggered_at) + VALUES + (:sensor_id, :site_id, :room_id, :rack_id, :severity, :message, 'active', NOW()) + """), { + "sensor_id": sensor_id, "site_id": site_id, + "room_id": room_id, "rack_id": rack_id, + "severity": severity, "message": message, + }) + logger.info(f"Alarm raised [{severity}]: {message}") + else: + await session.execute(text(""" + UPDATE alarms + SET state = 'resolved', resolved_at = NOW() + WHERE sensor_id = :sid AND severity = :sev AND state = 'active' + """), {"sid": sensor_id, "sev": severity}) + + +# ── Hard-coded fallback (used before DB seed runs) ───────────────────────────── + +_FALLBACK_RULES: list[dict] = [ + {"sensor_type": st, "threshold_value": tv, "direction": d, "severity": s, "message_template": m} + for st, tv, d, s, m in [ + ("temperature", 28.0, "above", "warning", "Temperature elevated at {sensor_id}: {value:.1f}°C"), + ("temperature", 32.0, "above", "critical", "Temperature critical at {sensor_id}: {value:.1f}°C"), + ("humidity", 65.0, "above", "warning", "Humidity elevated at {sensor_id}: {value:.0f}%"), + ("power_kw", 7.5, "above", "warning", "PDU load elevated at {sensor_id}: {value:.1f} kW"), + ("power_kw", 9.5, "above", "critical", "PDU load critical at {sensor_id}: {value:.1f} kW"), + ("ups_charge", 80.0, "below", "warning", "UPS battery low at {sensor_id}: {value:.0f}%"), + ("ups_charge", 50.0, "below", "critical", "UPS battery critical at {sensor_id}: {value:.0f}%"), + ("ups_state", 0.5, "above", "critical", "UPS switched to battery at {sensor_id} — mains power lost"), + ("ups_state", 1.5, "above", "critical", "UPS overloaded at {sensor_id} — immediate risk of failure"), + ("ups_load", 85.0, "above", "warning", "UPS load high at {sensor_id}: {value:.0f}%"), + ("ups_load", 95.0, "above", "critical", "UPS load critical at {sensor_id}: {value:.0f}% — overload"), + ("ups_runtime", 15.0, "below", "warning", "UPS runtime low at {sensor_id}: {value:.0f} min remaining"), + ("ups_runtime", 5.0, "below", "critical", "UPS runtime critical at {sensor_id}: {value:.0f} min — imminent shutdown"), + ("leak", 0.5, "above", "critical", "Water leak detected at {sensor_id}!"), + ("cooling_cap_pct", 90.0, "above", "warning", "CRAC near capacity limit at {sensor_id}: {value:.1f}%"), + ("cooling_cop", 1.5, "below", "warning", "CRAC running inefficiently at {sensor_id}: COP {value:.2f}"), + ("cooling_comp_load", 95.0, "above", "warning", "CRAC compressor overloaded at {sensor_id}: {value:.1f}%"), + ("cooling_high_press", 22.0, "above", "critical", "CRAC high refrigerant pressure at {sensor_id}: {value:.1f} bar"), + ("cooling_low_press", 3.0, "below", "critical", "CRAC low refrigerant pressure at {sensor_id}: {value:.1f} bar — possible leak"), + ("cooling_superheat", 16.0, "above", "warning", "CRAC discharge superheat high at {sensor_id}: {value:.1f}°C"), + ("cooling_filter_dp", 80.0, "above", "warning", "CRAC filter requires attention at {sensor_id}: {value:.0f} Pa"), + ("cooling_filter_dp", 120.0, "above", "critical", "CRAC filter critically blocked at {sensor_id}: {value:.0f} Pa — replace now"), + ("cooling_return", 36.0, "above", "warning", "CRAC return air temperature high at {sensor_id}: {value:.1f}°C"), + ("cooling_return", 42.0, "above", "critical", "CRAC return air temperature critical at {sensor_id}: {value:.1f}°C"), + ("gen_fuel_pct", 25.0, "below", "warning", "Generator fuel low at {sensor_id}: {value:.1f}%"), + ("gen_fuel_pct", 10.0, "below", "critical", "Generator fuel critical at {sensor_id}: {value:.1f}%"), + ("gen_state", 0.5, "above", "warning", "Generator running at {sensor_id} — site is on standby power"), + ("gen_state", -0.5, "below", "critical", "Generator fault at {sensor_id} — no standby power available"), + ("gen_load_pct", 85.0, "above", "warning", "Generator load high at {sensor_id}: {value:.1f}%"), + ("gen_load_pct", 95.0, "above", "critical", "Generator overloaded at {sensor_id}: {value:.1f}%"), + ("gen_coolant_c", 95.0, "above", "warning", "Generator coolant temperature high at {sensor_id}: {value:.1f}°C"), + ("gen_coolant_c", 105.0, "above", "critical", "Generator coolant critical at {sensor_id}: {value:.1f}°C — risk of shutdown"), + ("gen_oil_press", 2.0, "below", "critical", "Generator oil pressure low at {sensor_id}: {value:.1f} bar"), + ("pdu_imbalance", 5.0, "above", "warning", "PDU phase imbalance at {sensor_id}: {value:.1f}%"), + ("pdu_imbalance", 15.0, "above", "critical", "PDU phase imbalance critical at {sensor_id}: {value:.1f}%"), + ("ats_active", 1.5, "above", "warning", "ATS transferred to generator at {sensor_id} — utility power lost"), + ("ats_ua_v", 50.0, "below", "critical", "Utility A power failure at {sensor_id} — supply lost"), + ("chiller_state", 0.5, "below", "critical", "Chiller fault at {sensor_id} — CHW supply lost"), + ("chiller_cop", 2.5, "below", "warning", "Chiller running inefficiently at {sensor_id}: COP {value:.2f}"), + ("vesda_level", 0.5, "above", "warning", "VESDA smoke detected at {sensor_id}: level elevated"), + ("vesda_level", 1.5, "above", "warning", "VESDA action threshold reached at {sensor_id}"), + ("vesda_level", 2.5, "above", "critical", "VESDA FIRE ALARM at {sensor_id}!"), + ("vesda_flow", 0.5, "below", "critical", "VESDA aspirator flow fault at {sensor_id} — detector may be compromised"), + ("vesda_det1", 0.5, "below", "warning", "VESDA detector 1 fault at {sensor_id}"), + ("vesda_det2", 0.5, "below", "warning", "VESDA detector 2 fault at {sensor_id}"), + ("net_state", 0.5, "above", "warning", "Network switch degraded at {sensor_id}"), + ("net_state", 1.5, "above", "critical", "Network switch down at {sensor_id} — connectivity lost"), + ("net_pkt_loss_pct", 1.0, "above", "warning", "Packet loss detected at {sensor_id}: {value:.1f}%"), + ("net_pkt_loss_pct", 5.0, "above", "critical", "High packet loss at {sensor_id}: {value:.1f}%"), + ("net_temp_c", 65.0, "above", "warning", "Switch temperature high at {sensor_id}: {value:.1f}°C"), + ("net_temp_c", 75.0, "above", "critical", "Switch temperature critical at {sensor_id}: {value:.1f}°C"), + ] +] diff --git a/backend/services/mqtt_subscriber.py b/backend/services/mqtt_subscriber.py new file mode 100644 index 0000000..381ca9f --- /dev/null +++ b/backend/services/mqtt_subscriber.py @@ -0,0 +1,328 @@ +import asyncio +import json +import logging +from datetime import datetime, timezone + +import aiomqtt +from sqlalchemy import text + +from core.config import settings +from core.database import AsyncSessionLocal +from services.alarm_engine import check_and_update_alarms +from services.ws_manager import manager as ws_manager + +logger = logging.getLogger(__name__) + + +def parse_topic(topic: str) -> dict | None: + """ + Topic formats: + bms/{site_id}/{room_id}/{rack_id}/env — rack environment + bms/{site_id}/{room_id}/{rack_id}/power — rack PDU power + bms/{site_id}/cooling/{crac_id} — CRAC unit + bms/{site_id}/cooling/chiller/{chiller_id} — chiller plant + bms/{site_id}/power/{ups_id} — UPS unit + bms/{site_id}/power/ats/{ats_id} — ATS transfer switch + bms/{site_id}/generator/{gen_id} — diesel generator + bms/{site_id}/fire/{zone_id} — VESDA fire zone + bms/{site_id}/leak/{sensor_id} — water leak sensor + """ + parts = topic.split("/") + if len(parts) < 4 or parts[0] != "bms": + return None + + site_id = parts[1] + + # 5-part: rack env/power OR cooling/chiller/{id} OR power/ats/{id} + if len(parts) == 5: + if parts[4] in ("env", "power"): + return { + "site_id": site_id, "room_id": parts[2], + "rack_id": parts[3], "device_id": None, "msg_type": parts[4], + } + if parts[2] == "cooling" and parts[3] == "chiller": + return { + "site_id": site_id, "room_id": None, "rack_id": None, + "device_id": parts[4], "msg_type": "chiller", + } + if parts[2] == "power" and parts[3] == "ats": + return { + "site_id": site_id, "room_id": None, "rack_id": None, + "device_id": parts[4], "msg_type": "ats", + } + + # 4-part: bms/{site_id}/{room_id}/particles + if len(parts) == 4 and parts[3] == "particles": + return { + "site_id": site_id, "room_id": parts[2], "rack_id": None, + "device_id": None, "msg_type": "particles", + } + + # 4-part: known subsystem topics + if len(parts) == 4 and parts[2] in ("cooling", "power", "leak", "generator", "fire", "network"): + return { + "site_id": site_id, "room_id": None, "rack_id": None, + "device_id": parts[3], "msg_type": parts[2], + } + return None + + +async def process_message(topic: str, payload: dict) -> None: + meta = parse_topic(topic) + if not meta: + return + + site_id = meta["site_id"] + room_id = meta["room_id"] + rack_id = meta["rack_id"] + device_id = meta["device_id"] + msg_type = meta["msg_type"] + now = datetime.now(timezone.utc) + + # Build list of (sensor_id, sensor_type, value, unit) tuples + readings: list[tuple[str, str, float, str]] = [] + + if msg_type == "env" and rack_id: + base = f"{site_id}/{room_id}/{rack_id}" + if "temperature" in payload: + readings.append((f"{base}/temperature", "temperature", float(payload["temperature"]), "°C")) + if "humidity" in payload: + readings.append((f"{base}/humidity", "humidity", float(payload["humidity"]), "%")) + + elif msg_type == "power" and rack_id: + base = f"{site_id}/{room_id}/{rack_id}" + if "load_kw" in payload: + readings.append((f"{base}/power_kw", "power_kw", float(payload["load_kw"]), "kW")) + # Per-phase PDU data + for key, s_type, unit in [ + ("phase_a_kw", "pdu_phase_a_kw", "kW"), + ("phase_b_kw", "pdu_phase_b_kw", "kW"), + ("phase_c_kw", "pdu_phase_c_kw", "kW"), + ("phase_a_a", "pdu_phase_a_a", "A"), + ("phase_b_a", "pdu_phase_b_a", "A"), + ("phase_c_a", "pdu_phase_c_a", "A"), + ("imbalance_pct", "pdu_imbalance", "%"), + ]: + if payload.get(key) is not None: + readings.append((f"{base}/{key}", s_type, float(payload[key]), unit)) + + elif msg_type == "cooling" and device_id: + base = f"{site_id}/cooling/{device_id}" + crac_fields = [ + # (payload_key, sensor_type, unit) + ("supply_temp", "cooling_supply", "°C"), + ("return_temp", "cooling_return", "°C"), + ("fan_pct", "cooling_fan", "%"), + ("supply_humidity", "cooling_supply_hum", "%"), + ("return_humidity", "cooling_return_hum", "%"), + ("airflow_cfm", "cooling_airflow", "CFM"), + ("filter_dp_pa", "cooling_filter_dp", "Pa"), + ("cooling_capacity_kw", "cooling_cap_kw", "kW"), + ("cooling_capacity_pct", "cooling_cap_pct", "%"), + ("cop", "cooling_cop", ""), + ("sensible_heat_ratio", "cooling_shr", ""), + ("compressor_state", "cooling_comp_state", ""), + ("compressor_load_pct", "cooling_comp_load", "%"), + ("compressor_power_kw", "cooling_comp_power", "kW"), + ("compressor_run_hours", "cooling_comp_hours", "h"), + ("high_pressure_bar", "cooling_high_press", "bar"), + ("low_pressure_bar", "cooling_low_press", "bar"), + ("discharge_superheat_c", "cooling_superheat", "°C"), + ("liquid_subcooling_c", "cooling_subcooling", "°C"), + ("fan_rpm", "cooling_fan_rpm", "RPM"), + ("fan_power_kw", "cooling_fan_power", "kW"), + ("fan_run_hours", "cooling_fan_hours", "h"), + ("total_unit_power_kw", "cooling_unit_power", "kW"), + ("input_voltage_v", "cooling_voltage", "V"), + ("input_current_a", "cooling_current", "A"), + ("power_factor", "cooling_pf", ""), + ] + for key, s_type, unit in crac_fields: + if payload.get(key) is not None: + readings.append((f"{base}/{key}", s_type, float(payload[key]), unit)) + + elif msg_type == "power" and device_id: + base = f"{site_id}/power/{device_id}" + for key, s_type, unit in [ + ("charge_pct", "ups_charge", "%"), + ("load_pct", "ups_load", "%"), + ("runtime_min", "ups_runtime", "min"), + ("voltage", "ups_voltage", "V"), + ]: + if key in payload: + readings.append((f"{base}/{key}", s_type, float(payload[key]), unit)) + # Store state explicitly: 0.0 = online, 1.0 = on_battery, 2.0 = overload + if "state" in payload: + state_val = {"online": 0.0, "on_battery": 1.0, "overload": 2.0}.get(payload["state"], 0.0) + readings.append((f"{base}/state", "ups_state", state_val, "")) + + elif msg_type == "generator" and device_id: + base = f"{site_id}/generator/{device_id}" + state_map = {"standby": 0.0, "running": 1.0, "test": 2.0, "fault": -1.0} + for key, s_type, unit in [ + ("fuel_pct", "gen_fuel_pct", "%"), + ("fuel_litres", "gen_fuel_l", "L"), + ("fuel_rate_lph", "gen_fuel_rate", "L/h"), + ("load_kw", "gen_load_kw", "kW"), + ("load_pct", "gen_load_pct", "%"), + ("run_hours", "gen_run_hours", "h"), + ("voltage_v", "gen_voltage_v", "V"), + ("frequency_hz", "gen_freq_hz", "Hz"), + ("engine_rpm", "gen_rpm", "RPM"), + ("oil_pressure_bar", "gen_oil_press", "bar"), + ("coolant_temp_c", "gen_coolant_c", "°C"), + ("exhaust_temp_c", "gen_exhaust_c", "°C"), + ("alternator_temp_c", "gen_alt_temp_c", "°C"), + ("power_factor", "gen_pf", ""), + ("battery_v", "gen_batt_v", "V"), + ]: + if payload.get(key) is not None: + readings.append((f"{base}/{key}", s_type, float(payload[key]), unit)) + if "state" in payload: + readings.append((f"{base}/state", "gen_state", state_map.get(payload["state"], 0.0), "")) + + elif msg_type == "ats" and device_id: + base = f"{site_id}/power/ats/{device_id}" + feed_map = {"utility-a": 0.0, "utility-b": 1.0, "generator": 2.0} + for key, s_type, unit in [ + ("transfer_count", "ats_xfer_count", ""), + ("last_transfer_ms", "ats_xfer_ms", "ms"), + ("utility_a_v", "ats_ua_v", "V"), + ("utility_b_v", "ats_ub_v", "V"), + ("generator_v", "ats_gen_v", "V"), + ]: + if payload.get(key) is not None: + readings.append((f"{base}/{key}", s_type, float(payload[key]), unit)) + if "active_feed" in payload: + readings.append((f"{base}/active_feed", "ats_active", + feed_map.get(payload["active_feed"], 0.0), "")) + if "state" in payload: + readings.append((f"{base}/state", "ats_state", + 1.0 if payload["state"] == "transferring" else 0.0, "")) + + elif msg_type == "chiller" and device_id: + base = f"{site_id}/cooling/chiller/{device_id}" + for key, s_type, unit in [ + ("chw_supply_c", "chiller_chw_supply", "°C"), + ("chw_return_c", "chiller_chw_return", "°C"), + ("chw_delta_c", "chiller_chw_delta", "°C"), + ("flow_gpm", "chiller_flow_gpm", "GPM"), + ("cooling_load_kw", "chiller_load_kw", "kW"), + ("cooling_load_pct", "chiller_load_pct", "%"), + ("cop", "chiller_cop", ""), + ("compressor_load_pct", "chiller_comp_load", "%"), + ("condenser_pressure_bar", "chiller_cond_press", "bar"), + ("evaporator_pressure_bar", "chiller_evap_press", "bar"), + ("cw_supply_c", "chiller_cw_supply", "°C"), + ("cw_return_c", "chiller_cw_return", "°C"), + ("run_hours", "chiller_run_hours", "h"), + ]: + if payload.get(key) is not None: + readings.append((f"{base}/{key}", s_type, float(payload[key]), unit)) + if "state" in payload: + readings.append((f"{base}/state", "chiller_state", + 1.0 if payload["state"] == "online" else 0.0, "")) + + elif msg_type == "fire" and device_id: + base = f"{site_id}/fire/{device_id}" + level_map = {"normal": 0.0, "alert": 1.0, "action": 2.0, "fire": 3.0} + if "level" in payload: + readings.append((f"{base}/level", "vesda_level", + level_map.get(payload["level"], 0.0), "")) + if "obscuration_pct_m" in payload: + readings.append((f"{base}/obscuration", "vesda_obscuration", + float(payload["obscuration_pct_m"]), "%/m")) + for key, s_type in [ + ("detector_1_ok", "vesda_det1"), + ("detector_2_ok", "vesda_det2"), + ("power_ok", "vesda_power"), + ("flow_ok", "vesda_flow"), + ]: + if key in payload: + readings.append((f"{base}/{key}", s_type, + 1.0 if payload[key] else 0.0, "")) + + elif msg_type == "network" and device_id: + base = f"{site_id}/network/{device_id}" + state_map = {"up": 0.0, "degraded": 1.0, "down": 2.0} + for key, s_type, unit in [ + ("uptime_s", "net_uptime_s", "s"), + ("active_ports", "net_active_ports", ""), + ("bandwidth_in_mbps", "net_bw_in_mbps", "Mbps"), + ("bandwidth_out_mbps","net_bw_out_mbps", "Mbps"), + ("cpu_pct", "net_cpu_pct", "%"), + ("mem_pct", "net_mem_pct", "%"), + ("temperature_c", "net_temp_c", "°C"), + ("packet_loss_pct", "net_pkt_loss_pct", "%"), + ]: + if payload.get(key) is not None: + readings.append((f"{base}/{key}", s_type, float(payload[key]), unit)) + if "state" in payload: + readings.append((f"{base}/state", "net_state", + state_map.get(payload["state"], 0.0), "")) + + elif msg_type == "leak" and device_id: + state = payload.get("state", "clear") + readings.append(( + f"{site_id}/leak/{device_id}", "leak", + 1.0 if state == "detected" else 0.0, "", + )) + + elif msg_type == "particles": + base = f"{site_id}/{room_id}/particles" + if "particles_0_5um" in payload: + readings.append((f"{base}/0_5um", "particles_0_5um", float(payload["particles_0_5um"]), "/m³")) + if "particles_5um" in payload: + readings.append((f"{base}/5um", "particles_5um", float(payload["particles_5um"]), "/m³")) + + if not readings: + return + + async with AsyncSessionLocal() as session: + for sensor_id, sensor_type, value, unit in readings: + await session.execute(text(""" + INSERT INTO readings + (recorded_at, sensor_id, sensor_type, site_id, room_id, rack_id, value, unit) + VALUES + (:ts, :sid, :stype, :site, :room, :rack, :val, :unit) + """), { + "ts": now, "sid": sensor_id, "stype": sensor_type, + "site": site_id, "room": room_id, "rack": rack_id, + "val": value, "unit": unit, + }) + await check_and_update_alarms( + session, sensor_id, sensor_type, site_id, room_id, rack_id, value + ) + await session.commit() + + # Push to any connected WebSocket clients + await ws_manager.broadcast({ + "topic": topic, + "site_id": site_id, + "room_id": room_id, + "rack_id": rack_id, + "readings": [ + {"sensor_id": s, "type": t, "value": v, "unit": u} + for s, t, v, u in readings + ], + "timestamp": now.isoformat(), + }) + + +async def run_subscriber() -> None: + """Runs forever, reconnecting on any failure.""" + while True: + try: + logger.info(f"Connecting to MQTT at {settings.MQTT_HOST}:{settings.MQTT_PORT}") + async with aiomqtt.Client(settings.MQTT_HOST, port=settings.MQTT_PORT) as client: + logger.info("MQTT connected — subscribing to bms/#") + await client.subscribe("bms/#") + async for message in client.messages: + try: + payload = json.loads(message.payload.decode()) + await process_message(str(message.topic), payload) + except Exception as e: + logger.error(f"Error processing message on {message.topic}: {e}") + except Exception as e: + logger.error(f"MQTT connection failed: {e} — retrying in 5s") + await asyncio.sleep(5) diff --git a/backend/services/seed.py b/backend/services/seed.py new file mode 100644 index 0000000..435047a --- /dev/null +++ b/backend/services/seed.py @@ -0,0 +1,234 @@ +""" +Seed the database with default sensor registry and alarm threshold rules. +Runs on startup if tables are empty — subsequent restarts are no-ops. +""" +import json +import logging +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +logger = logging.getLogger(__name__) + +SITE_ID = "sg-01" + +# ── Threshold seed data ──────────────────────────────────────────────────────── +# (sensor_type, threshold_value, direction, severity, message_template, locked) +# locked=True → state-machine encoding, hidden from UI +# locked=False → numeric setpoint, user-editable + +THRESHOLD_SEED_DATA: list[tuple] = [ + # Rack environment + ("temperature", 28.0, "above", "warning", "Temperature elevated at {sensor_id}: {value:.1f}°C", False), + ("temperature", 32.0, "above", "critical", "Temperature critical at {sensor_id}: {value:.1f}°C", False), + ("humidity", 65.0, "above", "warning", "Humidity elevated at {sensor_id}: {value:.0f}%", False), + # PDU / rack power + ("power_kw", 7.5, "above", "warning", "PDU load elevated at {sensor_id}: {value:.1f} kW", False), + ("power_kw", 9.5, "above", "critical", "PDU load critical at {sensor_id}: {value:.1f} kW", False), + # UPS — numeric setpoints + ("ups_charge", 80.0, "below", "warning", "UPS battery low at {sensor_id}: {value:.0f}%", False), + ("ups_charge", 50.0, "below", "critical", "UPS battery critical at {sensor_id}: {value:.0f}%", False), + ("ups_load", 85.0, "above", "warning", "UPS load high at {sensor_id}: {value:.0f}%", False), + ("ups_load", 95.0, "above", "critical", "UPS load critical at {sensor_id}: {value:.0f}% — overload", False), + ("ups_runtime", 15.0, "below", "warning", "UPS runtime low at {sensor_id}: {value:.0f} min remaining", False), + ("ups_runtime", 5.0, "below", "critical", "UPS runtime critical at {sensor_id}: {value:.0f} min — imminent shutdown", False), + # UPS — state transitions (locked) + ("ups_state", 0.5, "above", "critical", "UPS switched to battery at {sensor_id} — mains power lost", True), + ("ups_state", 1.5, "above", "critical", "UPS overloaded at {sensor_id} — immediate risk of failure", True), + # Leak (locked — binary) + ("leak", 0.5, "above", "critical", "Water leak detected at {sensor_id}!", True), + # CRAC capacity & efficiency + ("cooling_cap_pct", 90.0, "above", "warning", "CRAC near capacity limit at {sensor_id}: {value:.1f}%", False), + ("cooling_cop", 1.5, "below", "warning", "CRAC running inefficiently at {sensor_id}: COP {value:.2f}", False), + # CRAC compressor + ("cooling_comp_load", 95.0, "above", "warning", "CRAC compressor overloaded at {sensor_id}: {value:.1f}%", False), + ("cooling_high_press", 22.0, "above", "critical", "CRAC high refrigerant pressure at {sensor_id}: {value:.1f} bar", False), + ("cooling_low_press", 3.0, "below", "critical", "CRAC low refrigerant pressure at {sensor_id}: {value:.1f} bar — possible leak", False), + ("cooling_superheat", 16.0, "above", "warning", "CRAC discharge superheat high at {sensor_id}: {value:.1f}°C", False), + # CRAC filter + ("cooling_filter_dp", 80.0, "above", "warning", "CRAC filter requires attention at {sensor_id}: {value:.0f} Pa", False), + ("cooling_filter_dp", 120.0, "above", "critical", "CRAC filter critically blocked at {sensor_id}: {value:.0f} Pa — replace now", False), + # CRAC return air + ("cooling_return", 36.0, "above", "warning", "CRAC return air temperature high at {sensor_id}: {value:.1f}°C", False), + ("cooling_return", 42.0, "above", "critical", "CRAC return air temperature critical at {sensor_id}: {value:.1f}°C", False), + # Generator — numeric setpoints + ("gen_fuel_pct", 25.0, "below", "warning", "Generator fuel low at {sensor_id}: {value:.1f}%", False), + ("gen_fuel_pct", 10.0, "below", "critical", "Generator fuel critical at {sensor_id}: {value:.1f}%", False), + ("gen_load_pct", 85.0, "above", "warning", "Generator load high at {sensor_id}: {value:.1f}%", False), + ("gen_load_pct", 95.0, "above", "critical", "Generator overloaded at {sensor_id}: {value:.1f}%", False), + ("gen_coolant_c", 95.0, "above", "warning", "Generator coolant temperature high at {sensor_id}: {value:.1f}°C", False), + ("gen_coolant_c", 105.0, "above", "critical", "Generator coolant critical at {sensor_id}: {value:.1f}°C — risk of shutdown", False), + ("gen_oil_press", 2.0, "below", "critical", "Generator oil pressure low at {sensor_id}: {value:.1f} bar", False), + # Generator — state transitions (locked) + ("gen_state", 0.5, "above", "warning", "Generator running at {sensor_id} — site is on standby power", True), + ("gen_state", -0.5, "below", "critical", "Generator fault at {sensor_id} — no standby power available", True), + # PDU phase imbalance + ("pdu_imbalance", 5.0, "above", "warning", "PDU phase imbalance at {sensor_id}: {value:.1f}%", False), + ("pdu_imbalance", 15.0, "above", "critical", "PDU phase imbalance critical at {sensor_id}: {value:.1f}%", False), + # ATS — numeric + ("ats_ua_v", 50.0, "below", "critical", "Utility A power failure at {sensor_id} — supply lost", False), + # ATS — state (locked) + ("ats_active", 1.5, "above", "warning", "ATS transferred to generator at {sensor_id} — utility power lost", True), + # Chiller — numeric + ("chiller_cop", 2.5, "below", "warning", "Chiller running inefficiently at {sensor_id}: COP {value:.2f}", False), + # Chiller — state (locked) + ("chiller_state", 0.5, "below", "critical", "Chiller fault at {sensor_id} — CHW supply lost", True), + # VESDA fire — state (all locked) + ("vesda_level", 0.5, "above", "warning", "VESDA smoke detected at {sensor_id}: level elevated", True), + ("vesda_level", 1.5, "above", "warning", "VESDA action threshold reached at {sensor_id}", True), + ("vesda_level", 2.5, "above", "critical", "VESDA FIRE ALARM at {sensor_id}!", True), + ("vesda_flow", 0.5, "below", "critical", "VESDA aspirator flow fault at {sensor_id} — detector may be compromised", True), + ("vesda_det1", 0.5, "below", "warning", "VESDA detector 1 fault at {sensor_id}", True), + ("vesda_det2", 0.5, "below", "warning", "VESDA detector 2 fault at {sensor_id}", True), + # Network — numeric + ("net_pkt_loss_pct", 1.0, "above", "warning", "Packet loss detected at {sensor_id}: {value:.1f}%", False), + ("net_pkt_loss_pct", 5.0, "above", "critical", "High packet loss at {sensor_id}: {value:.1f}%", False), + ("net_temp_c", 65.0, "above", "warning", "Switch temperature high at {sensor_id}: {value:.1f}°C", False), + ("net_temp_c", 75.0, "above", "critical", "Switch temperature critical at {sensor_id}: {value:.1f}°C", False), + # Network — state (locked) + ("net_state", 0.5, "above", "warning", "Network switch degraded at {sensor_id}", True), + ("net_state", 1.5, "above", "critical", "Network switch down at {sensor_id} — connectivity lost", True), +] + +# ── Sensor seed data ──────────────────────────────────────────────────────────── + +def _build_sensor_list() -> list[dict]: + sensors = [ + {"device_id": "gen-01", "name": "Diesel Generator 1", "device_type": "generator", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/generator/gen-01"}}, + {"device_id": "ups-01", "name": "UPS Unit 1", "device_type": "ups", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/power/ups-01"}}, + {"device_id": "ups-02", "name": "UPS Unit 2", "device_type": "ups", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/power/ups-02"}}, + {"device_id": "ats-01", "name": "Transfer Switch 1", "device_type": "ats", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/power/ats/ats-01"}}, + {"device_id": "chiller-01", "name": "Chiller Plant 1", "device_type": "chiller", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/cooling/chiller/chiller-01"}}, + {"device_id": "crac-01", "name": "CRAC Unit — Hall A", "device_type": "crac", "room_id": "hall-a", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/cooling/crac-01"}}, + {"device_id": "crac-02", "name": "CRAC Unit — Hall B", "device_type": "crac", "room_id": "hall-b", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/cooling/crac-02"}}, + {"device_id": "vesda-hall-a","name": "VESDA Fire Zone — Hall A","device_type": "fire_zone", "room_id": "hall-a", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/fire/vesda-hall-a"}}, + {"device_id": "vesda-hall-b","name": "VESDA Fire Zone — Hall B","device_type": "fire_zone", "room_id": "hall-b", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/fire/vesda-hall-b"}}, + {"device_id": "leak-01", "name": "Leak Sensor — CRAC Zone A","device_type": "leak", "room_id": "hall-a", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/leak/leak-01"}}, + {"device_id": "leak-02", "name": "Leak Sensor — Server Row B1","device_type": "leak", "room_id": "hall-b", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/leak/leak-02"}}, + {"device_id": "leak-03", "name": "Leak Sensor — UPS Room", "device_type": "leak", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/leak/leak-03"}}, + {"device_id": "sw-core-01", "name": "Core Switch — Hall A", "device_type": "network_switch","room_id": "hall-a", "rack_id": "SG1A01.01", "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/network/sw-core-01"}}, + {"device_id": "sw-core-02", "name": "Core Switch — Hall B", "device_type": "network_switch","room_id": "hall-b", "rack_id": "SG1B01.01", "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/network/sw-core-02"}}, + {"device_id": "sw-edge-01", "name": "Edge / Uplink Switch", "device_type": "network_switch","room_id": "hall-a", "rack_id": "SG1A01.05", "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/network/sw-edge-01"}}, + ] + # Generate racks + for room_id, row_prefix in [("hall-a", "SG1A"), ("hall-b", "SG1B")]: + for row_num in ["01", "02"]: + for rack_num in range(1, 21): + rack_id = f"{row_prefix}{row_num}.{rack_num:02d}" + sensors.append({ + "device_id": rack_id, + "name": f"Rack PDU — {rack_id}", + "device_type": "rack", + "room_id": room_id, + "rack_id": rack_id, + "protocol": "mqtt", + "protocol_config": { + "env_topic": f"bms/sg-01/{room_id}/{rack_id}/env", + "pdu_topic": f"bms/sg-01/{room_id}/{rack_id}/power", + }, + }) + return sensors + +SENSOR_SEED_DATA = _build_sensor_list() + +# ── Default settings ──────────────────────────────────────────────────────────── + +DEFAULT_SETTINGS: dict[str, dict] = { + "site": { + "name": "Singapore DC01", + "timezone": "Asia/Singapore", + "description": "Production data centre — Singapore", + }, + "notifications": { + "critical_alarms": True, + "warning_alarms": True, + "generator_events": True, + "maintenance_reminders": True, + "webhook_url": "", + "email_recipients": "", + }, + "integrations": { + "mqtt_host": "mqtt", + "mqtt_port": 1883, + }, + "page_prefs": { + "default_time_range_hours": 6, + "refresh_interval_seconds": 30, + }, +} + + +# ── Seed functions ────────────────────────────────────────────────────────────── + +async def seed_thresholds(session: AsyncSession) -> None: + result = await session.execute( + text("SELECT COUNT(*) FROM alarm_thresholds WHERE site_id = :s"), + {"s": SITE_ID}, + ) + if result.scalar() > 0: + return + + for st, tv, direction, severity, msg, locked in THRESHOLD_SEED_DATA: + await session.execute(text(""" + INSERT INTO alarm_thresholds + (site_id, sensor_type, threshold_value, direction, severity, message_template, enabled, locked) + VALUES + (:site_id, :sensor_type, :threshold_value, :direction, :severity, :message_template, true, :locked) + """), { + "site_id": SITE_ID, "sensor_type": st, "threshold_value": tv, + "direction": direction, "severity": severity, + "message_template": msg, "locked": locked, + }) + await session.commit() + logger.info(f"Seeded {len(THRESHOLD_SEED_DATA)} alarm threshold rules") + + +async def seed_sensors(session: AsyncSession) -> None: + result = await session.execute( + text("SELECT COUNT(*) FROM sensors WHERE site_id = :s"), + {"s": SITE_ID}, + ) + if result.scalar() > 0: + return + + for s in SENSOR_SEED_DATA: + await session.execute(text(""" + INSERT INTO sensors + (site_id, device_id, name, device_type, room_id, rack_id, protocol, protocol_config, enabled) + VALUES + (:site_id, :device_id, :name, :device_type, :room_id, :rack_id, :protocol, :protocol_config, true) + ON CONFLICT (site_id, device_id) DO NOTHING + """), { + "site_id": SITE_ID, + "device_id": s["device_id"], + "name": s["name"], + "device_type": s["device_type"], + "room_id": s.get("room_id"), + "rack_id": s.get("rack_id"), + "protocol": s["protocol"], + "protocol_config": json.dumps(s["protocol_config"]), + }) + await session.commit() + logger.info(f"Seeded {len(SENSOR_SEED_DATA)} sensor devices") + + +async def seed_settings(session: AsyncSession) -> None: + for category, defaults in DEFAULT_SETTINGS.items(): + result = await session.execute(text(""" + SELECT COUNT(*) FROM site_settings + WHERE site_id = :s AND category = :cat AND key = 'config' + """), {"s": SITE_ID, "cat": category}) + if result.scalar() > 0: + continue + await session.execute(text(""" + INSERT INTO site_settings (site_id, category, key, value) + VALUES (:site_id, :category, 'config', :value) + ON CONFLICT (site_id, category, key) DO NOTHING + """), {"site_id": SITE_ID, "category": category, "value": json.dumps(defaults)}) + await session.commit() + logger.info("Seeded site settings defaults") + + +async def run_all_seeds(session: AsyncSession) -> None: + await seed_thresholds(session) + await seed_sensors(session) + await seed_settings(session) diff --git a/backend/services/ws_manager.py b/backend/services/ws_manager.py new file mode 100644 index 0000000..cd3cace --- /dev/null +++ b/backend/services/ws_manager.py @@ -0,0 +1,35 @@ +import json +import logging +from fastapi import WebSocket + +logger = logging.getLogger(__name__) + + +class ConnectionManager: + def __init__(self) -> None: + self._connections: set[WebSocket] = set() + + async def connect(self, ws: WebSocket) -> None: + await ws.accept() + self._connections.add(ws) + logger.info(f"WS client connected. Total: {len(self._connections)}") + + def disconnect(self, ws: WebSocket) -> None: + self._connections.discard(ws) + logger.info(f"WS client disconnected. Total: {len(self._connections)}") + + async def broadcast(self, data: dict) -> None: + if not self._connections: + return + message = json.dumps(data, default=str) + dead: set[WebSocket] = set() + for ws in self._connections: + try: + await ws.send_text(message) + except Exception: + dead.add(ws) + self._connections -= dead + + +# Singleton — imported by both the MQTT subscriber and the WS route +manager = ConnectionManager() diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..db29141 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,107 @@ +services: + + # ── MQTT Broker ────────────────────────────────────────────────── + mqtt: + image: eclipse-mosquitto:2 + container_name: dcim_mqtt + restart: unless-stopped + ports: + - "1883:1883" + volumes: + - ./infra/mosquitto/mosquitto.conf:/mosquitto/config/mosquitto.conf:ro + healthcheck: + test: ["CMD-SHELL", "mosquitto_sub -t '$$SYS/#' -C 1 -i healthcheck -W 3"] + interval: 10s + timeout: 5s + retries: 5 + + # ── PostgreSQL + TimescaleDB ───────────────────────────────────── + db: + image: timescale/timescaledb:latest-pg16 + container_name: dcim_db + restart: unless-stopped + environment: + POSTGRES_USER: dcim + POSTGRES_PASSWORD: dcim_pass + POSTGRES_DB: dcim + ports: + - "5433:5432" # host 5433 → container 5432 (avoids conflict with existing Postgres) + volumes: + - db_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U dcim -d dcim"] + interval: 10s + timeout: 5s + retries: 5 + + # ── FastAPI backend ────────────────────────────────────────────── + backend: + build: + context: ./backend + dockerfile: Dockerfile + container_name: dcim_backend + restart: unless-stopped + env_file: + - ./backend/.env + environment: + DATABASE_URL: postgresql+asyncpg://dcim:dcim_pass@db:5432/dcim + MQTT_HOST: mqtt + MQTT_PORT: "1883" + ports: + - "8000:8000" + depends_on: + db: + condition: service_healthy + mqtt: + condition: service_healthy + volumes: + - ./backend:/app + command: uvicorn main:app --host 0.0.0.0 --port 8000 --reload + healthcheck: + test: ["CMD-SHELL", "curl -sf http://localhost:8000/api/health || exit 1"] + interval: 10s + timeout: 5s + retries: 10 + start_period: 20s + + # ── Simulator bots (seed first, then run bots) ─────────────────── + simulators: + build: + context: ./simulators + dockerfile: Dockerfile + container_name: dcim_simulators + restart: unless-stopped + environment: + MQTT_HOST: mqtt + MQTT_PORT: "1883" + DATABASE_URL: postgresql://dcim:dcim_pass@db:5432/dcim + SEED_MINUTES: "30" + depends_on: + db: + condition: service_healthy + mqtt: + condition: service_healthy + backend: + condition: service_healthy + volumes: + - ./simulators:/app + + # ── Next.js frontend ───────────────────────────────────────────── + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + container_name: dcim_frontend + restart: unless-stopped + env_file: + - ./frontend/.env.local + ports: + - "5646:5646" + depends_on: + - backend + environment: + PORT: "5646" + BACKEND_INTERNAL_URL: http://backend:8000 + +volumes: + db_data: diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..5ef6a52 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,41 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files (can opt-in for committing if needed) +.env* + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..69c252c --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,36 @@ +FROM node:22-alpine AS base + +# Install dependencies only when needed +FROM base AS deps +RUN apk add --no-cache libc6-compat +WORKDIR /app +COPY package.json pnpm-lock.yaml* ./ +RUN corepack enable pnpm && pnpm install --frozen-lockfile + +# Build the app +FROM base AS builder +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY . . +ARG BACKEND_INTERNAL_URL=http://backend:8000 +ENV BACKEND_INTERNAL_URL=$BACKEND_INTERNAL_URL +RUN corepack enable pnpm && pnpm build + +# Production runner +FROM base AS runner +WORKDIR /app +ENV NODE_ENV=production + +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + +COPY --from=builder /app/public ./public +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static + +USER nextjs +EXPOSE 5646 +ENV PORT=5646 +ENV HOSTNAME="0.0.0.0" + +CMD ["node", "server.js"] diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..e215bc4 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,36 @@ +This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app). + +## Getting Started + +First, run the development server: + +```bash +npm run dev +# or +yarn dev +# or +pnpm dev +# or +bun dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. + +This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel. + +## Learn More + +To learn more about Next.js, take a look at the following resources: + +- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. +- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. + +You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome! + +## Deploy on Vercel + +The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. + +Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details. diff --git a/frontend/app/(dashboard)/alarms/page.tsx b/frontend/app/(dashboard)/alarms/page.tsx new file mode 100644 index 0000000..82180bd --- /dev/null +++ b/frontend/app/(dashboard)/alarms/page.tsx @@ -0,0 +1,753 @@ +"use client"; + +import { useEffect, useState, useCallback, useMemo, useRef } from "react"; +import { useRouter } from "next/navigation"; +import { toast } from "sonner"; +import { + fetchAlarms, fetchAlarmStats, acknowledgeAlarm, resolveAlarm, + type Alarm, type AlarmStats, +} from "@/lib/api"; +import { RackDetailSheet } from "@/components/dashboard/rack-detail-sheet"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { + AlertTriangle, CheckCircle2, Clock, XCircle, Bell, + ChevronsUpDown, ChevronUp, ChevronDown, Activity, +} from "lucide-react"; +import { + BarChart, Bar, XAxis, Tooltip, ResponsiveContainer, Cell, +} from "recharts"; +import { cn } from "@/lib/utils"; + +const SITE_ID = "sg-01"; +const PAGE_SIZE = 25; + +type StateFilter = "active" | "acknowledged" | "resolved" | "all"; +type SeverityFilter = "all" | "critical" | "warning" | "info"; +type SortKey = "severity" | "triggered_at" | "state"; +type SortDir = "asc" | "desc"; + +function timeAgo(iso: string): string { + const diff = Date.now() - new Date(iso).getTime(); + const m = Math.floor(diff / 60000); + if (m < 1) return "just now"; + if (m < 60) return `${m}m`; + const h = Math.floor(m / 60); + if (h < 24) return `${h}h`; + return `${Math.floor(h / 24)}d`; +} + +function useNow(intervalMs = 30_000): number { + const [now, setNow] = useState(() => Date.now()); + useEffect(() => { + const id = setInterval(() => setNow(Date.now()), intervalMs); + return () => clearInterval(id); + }, [intervalMs]); + return now; +} + +function escalationMinutes(triggeredAt: string, now: number): number { + return Math.floor((now - new Date(triggeredAt).getTime()) / 60_000); +} + +function EscalationTimer({ triggeredAt, now }: { triggeredAt: string; now: number }) { + const mins = escalationMinutes(triggeredAt, now); + const h = Math.floor(mins / 60); + const m = mins % 60; + const label = h > 0 ? `${h}h ${m}m` : `${m}m`; + + const colorClass = + mins >= 60 ? "text-destructive" : + mins >= 15 ? "text-amber-400" : + mins >= 5 ? "text-amber-300" : + "text-muted-foreground"; + + const pulse = mins >= 60; + + return ( + + + {label} + + ); +} + +function alarmCategory(sensorId: string | null | undefined): { label: string; className: string } { + if (!sensorId) return { label: "System", className: "bg-muted/50 text-muted-foreground" }; + const s = sensorId.toLowerCase(); + if (s.includes("cooling") || s.includes("crac") || s.includes("refrigerant") || s.includes("cop")) + return { label: "Refrigerant", className: "bg-cyan-500/10 text-cyan-400" }; + if (s.includes("temp") || s.includes("thermal") || s.includes("humidity") || s.includes("hum")) + return { label: "Thermal", className: "bg-orange-500/10 text-orange-400" }; + if (s.includes("power") || s.includes("ups") || s.includes("pdu") || s.includes("kw") || s.includes("watt")) + return { label: "Power", className: "bg-yellow-500/10 text-yellow-400" }; + if (s.includes("leak") || s.includes("water") || s.includes("flood")) + return { label: "Leak", className: "bg-blue-500/10 text-blue-400" }; + return { label: "System", className: "bg-muted/50 text-muted-foreground" }; +} + +const severityConfig: Record = { + critical: { label: "Critical", bg: "bg-destructive/15 text-destructive border-destructive/30", dot: "bg-destructive" }, + warning: { label: "Warning", bg: "bg-amber-500/15 text-amber-400 border-amber-500/30", dot: "bg-amber-500" }, + info: { label: "Info", bg: "bg-blue-500/15 text-blue-400 border-blue-500/30", dot: "bg-blue-500" }, +}; + +const stateConfig: Record = { + active: { label: "Active", className: "bg-destructive/10 text-destructive" }, + acknowledged: { label: "Acknowledged", className: "bg-amber-500/10 text-amber-400" }, + resolved: { label: "Resolved", className: "bg-green-500/10 text-green-400" }, +}; + +function SeverityBadge({ severity }: { severity: string }) { + const c = severityConfig[severity] ?? severityConfig.info; + return ( + + + {c.label} + + ); +} + +function StatCard({ label, value, icon: Icon, highlight }: { label: string; value: number; icon: React.ElementType; highlight?: boolean }) { + return ( + + +
0 ? "bg-destructive/10" : "bg-muted")}> + 0 ? "text-destructive" : "text-muted-foreground")} /> +
+
+

0 ? "text-destructive" : "")}>{value}

+

{label}

+
+
+
+ ); +} + +function AvgAgeCard({ alarms }: { alarms: Alarm[] }) { + const activeAlarms = alarms.filter(a => a.state === "active"); + const avgMins = useMemo(() => { + if (activeAlarms.length === 0) return 0; + const now = Date.now(); + const totalMins = activeAlarms.reduce((sum, a) => { + return sum + Math.floor((now - new Date(a.triggered_at).getTime()) / 60_000); + }, 0); + return Math.round(totalMins / activeAlarms.length); + }, [activeAlarms]); + + const label = avgMins >= 60 + ? `${Math.floor(avgMins / 60)}h ${avgMins % 60}m` + : `${avgMins}m`; + + const colorClass = avgMins > 60 ? "text-destructive" + : avgMins > 15 ? "text-amber-400" + : "text-green-400"; + + const iconColor = avgMins > 60 ? "text-destructive" + : avgMins > 15 ? "text-amber-400" + : "text-muted-foreground"; + + const bgColor = avgMins > 60 ? "bg-destructive/10" + : avgMins > 15 ? "bg-amber-500/10" + : "bg-muted"; + + return ( + + +
+ +
+
+

0 ? colorClass : "")}>{activeAlarms.length > 0 ? label : "—"}

+

Avg Age

+
+
+
+ ); +} + +type Correlation = { + id: string + title: string + severity: "critical" | "warning" + description: string + alarmIds: number[] +} + +function correlateAlarms(alarms: Alarm[]): Correlation[] { + const active = alarms.filter(a => a.state === "active"); + const results: Correlation[] = []; + + // Rule 1: ≥2 thermal alarms in the same room → probable CRAC issue + const thermalByRoom = new Map(); + for (const a of active) { + const isThermal = a.sensor_id + ? /temp|thermal|humidity|hum/i.test(a.sensor_id) + : /temp|thermal|hot|cool/i.test(a.message); + const room = a.room_id; + if (isThermal && room) { + if (!thermalByRoom.has(room)) thermalByRoom.set(room, []); + thermalByRoom.get(room)!.push(a); + } + } + for (const [room, roomAlarms] of thermalByRoom.entries()) { + if (roomAlarms.length >= 2) { + results.push({ + id: `thermal-${room}`, + title: `Thermal event — ${room.replace("hall-", "Hall ")}`, + severity: roomAlarms.some(a => a.severity === "critical") ? "critical" : "warning", + description: `${roomAlarms.length} thermal alarms in the same room. Probable cause: CRAC cooling degradation or containment breach.`, + alarmIds: roomAlarms.map(a => a.id), + }); + } + } + + // Rule 2: ≥3 power alarms across different racks → PDU or UPS path issue + const powerAlarms = active.filter(a => + a.sensor_id ? /power|pdu|ups|kw|watt/i.test(a.sensor_id) : /power|overload|circuit/i.test(a.message) + ); + const powerRacks = new Set(powerAlarms.map(a => a.rack_id).filter(Boolean)); + if (powerRacks.size >= 2) { + results.push({ + id: "power-multi-rack", + title: "Multi-rack power event", + severity: powerAlarms.some(a => a.severity === "critical") ? "critical" : "warning", + description: `Power alarms on ${powerRacks.size} racks simultaneously. Probable cause: upstream PDU, busway tap, or UPS transfer.`, + alarmIds: powerAlarms.map(a => a.id), + }); + } + + // Rule 3: Generator + ATS alarms together → power path / utility failure + const genAlarm = active.find(a => a.sensor_id ? /gen/i.test(a.sensor_id) : /generator/i.test(a.message)); + const atsAlarm = active.find(a => a.sensor_id ? /ats/i.test(a.sensor_id) : /transfer|utility/i.test(a.message)); + if (genAlarm && atsAlarm) { + results.push({ + id: "gen-ats-event", + title: "Power path event — generator + ATS", + severity: "critical", + description: "Generator and ATS alarms are co-active. Possible utility failure with generator transfer in progress.", + alarmIds: [genAlarm.id, atsAlarm.id], + }); + } + + // Rule 4: ≥2 leak alarms → site-wide leak / pipe burst + const leakAlarms = active.filter(a => + a.sensor_id ? /leak|water|flood/i.test(a.sensor_id) : /leak|water/i.test(a.message) + ); + if (leakAlarms.length >= 2) { + results.push({ + id: "multi-leak", + title: "Multiple leak sensors triggered", + severity: "critical", + description: `${leakAlarms.length} leak sensors active. Probable cause: pipe burst, chilled water leak, or CRAC drain overflow.`, + alarmIds: leakAlarms.map(a => a.id), + }); + } + + // Rule 5: VESDA + high temp in same room → fire / smoke event + const vesdaAlarm = active.find(a => a.sensor_id ? /vesda|fire/i.test(a.sensor_id) : /fire|smoke|vesda/i.test(a.message)); + const hotRooms = new Set(active.filter(a => a.severity === "critical" && a.room_id && /temp/i.test(a.message + (a.sensor_id ?? ""))).map(a => a.room_id)); + if (vesdaAlarm && hotRooms.size > 0) { + results.push({ + id: "fire-temp-event", + title: "Fire / smoke event suspected", + severity: "critical", + description: "VESDA alarm co-active with critical temperature alarms. Possible fire or smoke event — check fire safety systems immediately.", + alarmIds: active.filter(a => hotRooms.has(a.room_id)).map(a => a.id).concat(vesdaAlarm.id), + }); + } + + return results; +} + +function RootCausePanel({ alarms }: { alarms: Alarm[] }) { + const correlations = correlateAlarms(alarms); + if (correlations.length === 0) return null; + + return ( + + + + + Root Cause Analysis + + {correlations.length} pattern{correlations.length > 1 ? "s" : ""} detected + + + + + {correlations.map(c => ( +
+ +
+

+ {c.title} + + ({c.alarmIds.length} alarm{c.alarmIds.length !== 1 ? "s" : ""}) + +

+

{c.description}

+
+
+ ))} +
+
+ ); +} + +export default function AlarmsPage() { + const router = useRouter(); + const now = useNow(30_000); + const [alarms, setAlarms] = useState([]); + const [allAlarms, setAllAlarms] = useState([]); + const [stats, setStats] = useState(null); + const [stateFilter, setStateFilter] = useState("active"); + const [sevFilter, setSevFilter] = useState("all"); + const [sortKey, setSortKey] = useState("triggered_at"); + const [sortDir, setSortDir] = useState("desc"); + const [loading, setLoading] = useState(true); + const [acting, setActing] = useState(null); + const [selected, setSelected] = useState>(new Set()); + const [bulkActing, setBulkActing] = useState(false); + const [selectedRack, setSelectedRack] = useState(null); + const [assignments, setAssignments] = useState>({}); + const [page, setPage] = useState(1); + + useEffect(() => { + try { + setAssignments(JSON.parse(localStorage.getItem("alarm-assignments") ?? "{}")); + } catch {} + }, []); + + function setAssignment(id: number, assignee: string) { + const next = { ...assignments, [id]: assignee }; + setAssignments(next); + localStorage.setItem("alarm-assignments", JSON.stringify(next)); + } + + const load = useCallback(async () => { + try { + const [a, s, all] = await Promise.all([ + fetchAlarms(SITE_ID, stateFilter), + fetchAlarmStats(SITE_ID), + fetchAlarms(SITE_ID, "all", 200), + ]); + setAlarms(a); + setStats(s); + setAllAlarms(all); + } catch { + toast.error("Failed to load alarms"); + } finally { + setLoading(false); + } + }, [stateFilter]); + + useEffect(() => { + setLoading(true); + load(); + const id = setInterval(load, 15_000); + return () => clearInterval(id); + }, [load]); + + // Reset page when filters change + useEffect(() => { + setPage(1); + }, [stateFilter, sevFilter]); + + async function handleAcknowledge(id: number) { + setActing(id); + try { await acknowledgeAlarm(id); toast.success("Alarm acknowledged"); await load(); } finally { setActing(null); } + } + + async function handleResolve(id: number) { + setActing(id); + try { await resolveAlarm(id); toast.success("Alarm resolved"); await load(); } finally { setActing(null); } + } + + async function handleBulkResolve() { + setBulkActing(true); + const count = selected.size; + try { + await Promise.all(Array.from(selected).map((id) => resolveAlarm(id))); + toast.success(`${count} alarm${count !== 1 ? "s" : ""} resolved`); + setSelected(new Set()); + await load(); + } finally { setBulkActing(false); } + } + + function toggleSelect(id: number) { + setSelected((prev) => { + const next = new Set(prev); + next.has(id) ? next.delete(id) : next.add(id); + return next; + }); + } + + function toggleSelectAll() { + const resolvable = visible.filter((a) => a.state !== "resolved").map((a) => a.id); + if (resolvable.every((id) => selected.has(id))) { + setSelected(new Set()); + } else { + setSelected(new Set(resolvable)); + } + } + + const sevOrder: Record = { critical: 0, warning: 1, info: 2 }; + const stateOrder: Record = { active: 0, acknowledged: 1, resolved: 2 }; + + function toggleSort(key: SortKey) { + if (sortKey === key) setSortDir((d) => d === "asc" ? "desc" : "asc"); + else { setSortKey(key); setSortDir("desc"); } + } + + function SortIcon({ col }: { col: SortKey }) { + if (sortKey !== col) return ; + return sortDir === "asc" ? : ; + } + + const visible = (sevFilter === "all" ? alarms : alarms.filter((a) => a.severity === sevFilter)) + .slice() + .sort((a, b) => { + let cmp = 0; + if (sortKey === "severity") cmp = (sevOrder[a.severity] ?? 9) - (sevOrder[b.severity] ?? 9); + if (sortKey === "triggered_at") cmp = new Date(a.triggered_at).getTime() - new Date(b.triggered_at).getTime(); + if (sortKey === "state") cmp = (stateOrder[a.state] ?? 9) - (stateOrder[b.state] ?? 9); + return sortDir === "asc" ? cmp : -cmp; + }); + + const pageCount = Math.ceil(visible.length / PAGE_SIZE); + const paginated = visible.slice((page - 1) * PAGE_SIZE, page * PAGE_SIZE); + + return ( +
+ setSelectedRack(null)} /> + +
+

Alarms & Events

+

Singapore DC01 — refreshes every 15s

+
+ + {/* Escalation banner — longest unacknowledged critical */} + {(() => { + const critActive = alarms.filter(a => a.severity === "critical" && a.state === "active"); + if (critActive.length === 0) return null; + const oldest = critActive.reduce((a, b) => + new Date(a.triggered_at) < new Date(b.triggered_at) ? a : b + ); + const mins = escalationMinutes(oldest.triggered_at, now); + const urgency = mins >= 60 ? "bg-destructive/10 border-destructive/30 text-destructive" + : mins >= 15 ? "bg-amber-500/10 border-amber-500/30 text-amber-400" + : "bg-amber-500/5 border-amber-500/20 text-amber-300"; + return ( +
+ + + {critActive.length} critical alarm{critActive.length > 1 ? "s" : ""} unacknowledged + {" — "}longest open for + +
+ ); + })()} + + {/* Root cause correlation panel */} + {!loading && } + + {/* Stat cards */} +
+ {stats ? ( + <> + + + + + + ) : ( + Array.from({ length: 4 }).map((_, i) => ) + )} +
+ + {/* Sticky filter bar */} +
+
+ setStateFilter(v as StateFilter)}> + + Active + Acknowledged + Resolved + All + + + +
+ {(["all", "critical", "warning", "info"] as SeverityFilter[]).map((s) => ( + + ))} +
+ + {/* Bulk actions inline in filter row */} + {selected.size > 0 && ( +
+ {selected.size} selected + + +
+ )} +
+
+ + {/* Row count */} + {!loading && ( +

+ {visible.length} alarm{visible.length !== 1 ? "s" : ""} matching filter +

+ )} + + {/* Table */} + + + {loading ? ( +
+ {Array.from({ length: 5 }).map((_, i) => )} +
+ ) : visible.length === 0 ? ( +
+ +

No alarms matching this filter

+
+ ) : ( +
+ + + + + + + + + + + + + + + + + + {paginated.map((alarm) => { + const sc = stateConfig[alarm.state] ?? stateConfig.active; + const cat = alarmCategory(alarm.sensor_id); + return ( + + + + + + + + + + + + + + ); + })} + +
+ a.state !== "resolved").every((a) => selected.has(a.id))} + onChange={toggleSelectAll} + /> + + + MessageLocationSensorCategory + + + + EscalationAssignedActions
+ {alarm.state !== "resolved" && ( + toggleSelect(alarm.id)} + /> + )} + + + + {alarm.message} + + {(alarm.room_id || alarm.rack_id) ? ( +
+ {alarm.room_id && ( + + )} + {alarm.room_id && alarm.rack_id && /} + {alarm.rack_id && ( + + )} +
+ ) : } +
+ {alarm.sensor_id ? ( + + {alarm.sensor_id.split("/").slice(-1)[0]} + + ) : } + + + {cat.label} + + + + {sc.label} + + + {timeAgo(alarm.triggered_at)} + + {alarm.state !== "resolved" && alarm.severity === "critical" ? ( + + ) : ( + + )} + + + +
+ {alarm.state === "active" && ( + + )} + {(alarm.state === "active" || alarm.state === "acknowledged") && ( + + )} +
+
+
+ )} +
+
+ + {/* Pagination bar */} + {!loading && visible.length > PAGE_SIZE && ( +
+ + Showing {(page - 1) * PAGE_SIZE + 1}–{Math.min(page * PAGE_SIZE, visible.length)} of {visible.length} alarms + +
+ + {page} / {pageCount} + +
+
+ )} +
+ ); +} diff --git a/frontend/app/(dashboard)/assets/page.tsx b/frontend/app/(dashboard)/assets/page.tsx new file mode 100644 index 0000000..fa3bdb5 --- /dev/null +++ b/frontend/app/(dashboard)/assets/page.tsx @@ -0,0 +1,703 @@ +"use client"; + +import { useEffect, useState, useMemo } from "react"; +import { toast } from "sonner"; +import { + fetchAssets, fetchAllDevices, fetchPduReadings, + type AssetsData, type RackAsset, type CracAsset, type UpsAsset, type Device, type PduReading, +} from "@/lib/api"; +import { RackDetailSheet } from "@/components/dashboard/rack-detail-sheet"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { + Thermometer, Zap, Wind, Battery, AlertTriangle, + CheckCircle2, HelpCircle, LayoutGrid, List, Download, +} from "lucide-react"; +import { cn } from "@/lib/utils"; + +const SITE_ID = "sg-01"; +const roomLabels: Record = { "hall-a": "Hall A", "hall-b": "Hall B" }; + +// ── Status helpers ──────────────────────────────────────────────────────────── + +const statusStyles: Record = { + ok: { dot: "bg-green-500", border: "border-green-500/20" }, + warning: { dot: "bg-amber-500", border: "border-amber-500/30" }, + critical: { dot: "bg-destructive", border: "border-destructive/30" }, + unknown: { dot: "bg-muted", border: "border-border" }, +}; + +const TYPE_STYLES: Record = { + server: { dot: "bg-blue-400", label: "Server" }, + switch: { dot: "bg-green-400", label: "Switch" }, + patch_panel: { dot: "bg-slate-400", label: "Patch Panel" }, + pdu: { dot: "bg-amber-400", label: "PDU" }, + storage: { dot: "bg-purple-400", label: "Storage" }, + firewall: { dot: "bg-red-400", label: "Firewall" }, + kvm: { dot: "bg-teal-400", label: "KVM" }, +}; + +// ── Compact CRAC row ────────────────────────────────────────────────────────── + +function CracRow({ crac }: { crac: CracAsset }) { + const online = crac.state === "online"; + const fault = crac.state === "fault"; + return ( +
+ + {crac.crac_id.toUpperCase()} + + {fault ? : + online ? : + } + {fault ? "Fault" : online ? "Online" : "Unk"} + + Supply: {crac.supply_temp !== null ? `${crac.supply_temp}°C` : "—"} + Return: {crac.return_temp !== null ? `${crac.return_temp}°C` : "—"} + Fan: {crac.fan_pct !== null ? `${crac.fan_pct}%` : "—"} +
+ ); +} + +// ── Compact UPS row ─────────────────────────────────────────────────────────── + +function UpsRow({ ups }: { ups: UpsAsset }) { + const onBattery = ups.state === "battery"; + return ( +
+ + {ups.ups_id.toUpperCase()} + + {onBattery ? : } + {onBattery ? "Battery" : ups.state === "online" ? "Mains" : "Unk"} + + Charge: {ups.charge_pct !== null ? `${ups.charge_pct}%` : "—"} + Load: {ups.load_pct !== null ? `${ups.load_pct}%` : "—"} +
+ ); +} + +// ── Rack sortable table ─────────────────────────────────────────────────────── + +type RackSortCol = "rack_id" | "temp" | "power_kw" | "power_pct" | "alarm_count" | "status"; +type SortDir = "asc" | "desc"; + +function RackTable({ + racks, roomId, statusFilter, onRackClick, +}: { + racks: RackAsset[]; + roomId: string; + statusFilter: "all" | "warning" | "critical"; + onRackClick: (id: string) => void; +}) { + const [sortCol, setSortCol] = useState("rack_id"); + const [sortDir, setSortDir] = useState("asc"); + + function toggleSort(col: RackSortCol) { + if (sortCol === col) setSortDir(d => d === "asc" ? "desc" : "asc"); + else { setSortCol(col); setSortDir("asc"); } + } + + function SortIcon({ col }: { col: RackSortCol }) { + if (sortCol !== col) return ; + return {sortDir === "asc" ? "↑" : "↓"}; + } + + const filtered = useMemo(() => { + const base = statusFilter === "all" ? racks : racks.filter(r => r.status === statusFilter); + return [...base].sort((a, b) => { + let cmp = 0; + if (sortCol === "temp" || sortCol === "power_kw" || sortCol === "alarm_count") { + cmp = ((a[sortCol] ?? 0) as number) - ((b[sortCol] ?? 0) as number); + } else if (sortCol === "power_pct") { + const aP = a.power_kw !== null ? a.power_kw / 10 * 100 : 0; + const bP = b.power_kw !== null ? b.power_kw / 10 * 100 : 0; + cmp = aP - bP; + } else { + cmp = String(a[sortCol]).localeCompare(String(b[sortCol])); + } + return sortDir === "asc" ? cmp : -cmp; + }); + }, [racks, statusFilter, sortCol, sortDir]); + + type ColDef = { col: RackSortCol; label: string }; + const cols: ColDef[] = [ + { col: "rack_id", label: "Rack ID" }, + { col: "temp", label: "Temp (°C)" }, + { col: "power_kw", label: "Power (kW)" }, + { col: "power_pct", label: "Power%" }, + { col: "alarm_count", label: "Alarms" }, + { col: "status", label: "Status" }, + ]; + + return ( +
+ + + + {cols.map(({ col, label }) => ( + + ))} + {/* Room column header */} + + + + + {filtered.length === 0 ? ( + + + + ) : ( + filtered.map(rack => { + const powerPct = rack.power_kw !== null ? (rack.power_kw / 10) * 100 : null; + const tempCls = rack.temp !== null + ? rack.temp >= 30 ? "text-destructive" : rack.temp >= 28 ? "text-amber-400" : "" + : ""; + const pctCls = powerPct !== null + ? powerPct >= 85 ? "text-destructive" : powerPct >= 75 ? "text-amber-400" : "" + : ""; + const s = statusStyles[rack.status] ?? statusStyles.unknown; + return ( + onRackClick(rack.rack_id)} + className="border-b border-border/40 last:border-0 hover:bg-muted/20 transition-colors cursor-pointer" + > + + + + + + + + + ); + }) + )} + +
+ + Room
No racks matching this filter
{rack.rack_id.toUpperCase()} + {rack.temp !== null ? rack.temp : "—"} + + {rack.power_kw !== null ? rack.power_kw : "—"} + + {powerPct !== null ? `${powerPct.toFixed(0)}%` : "—"} + + {rack.alarm_count > 0 + ? {rack.alarm_count} + : 0} + +
+ + {rack.status} +
+
{roomLabels[roomId] ?? roomId}
+
+ ); +} + +// ── Inventory table ─────────────────────────────────────────────────────────── + +type SortCol = "name" | "type" | "rack_id" | "room_id" | "u_start" | "power_draw_w"; + +function InventoryTable({ siteId, onRackClick }: { siteId: string; onRackClick: (rackId: string) => void }) { + const [devices, setDevices] = useState([]); + const [loading, setLoading] = useState(true); + const [search, setSearch] = useState(""); + const [typeFilter, setTypeFilter] = useState("all"); + const [roomFilter, setRoomFilter] = useState("all"); + const [sortCol, setSortCol] = useState("name"); + const [sortDir, setSortDir] = useState("asc"); + + useEffect(() => { + fetchAllDevices(siteId) + .then(setDevices) + .catch(() => {}) + .finally(() => setLoading(false)); + }, [siteId]); + + function toggleSort(col: SortCol) { + if (sortCol === col) setSortDir(d => d === "asc" ? "desc" : "asc"); + else { setSortCol(col); setSortDir("asc"); } + } + + function SortIcon({ col }: { col: SortCol }) { + if (sortCol !== col) return ; + return {sortDir === "asc" ? "↑" : "↓"}; + } + + const filtered = useMemo(() => { + const q = search.toLowerCase(); + const base = devices.filter(d => { + if (typeFilter !== "all" && d.type !== typeFilter) return false; + if (roomFilter !== "all" && d.room_id !== roomFilter) return false; + if (q && !d.name.toLowerCase().includes(q) && !d.rack_id.includes(q) && !d.ip.includes(q) && !d.serial.toLowerCase().includes(q)) return false; + return true; + }); + return [...base].sort((a, b) => { + let cmp = 0; + if (sortCol === "power_draw_w" || sortCol === "u_start") { + cmp = (a[sortCol] ?? 0) - (b[sortCol] ?? 0); + } else { + cmp = String(a[sortCol]).localeCompare(String(b[sortCol])); + } + return sortDir === "asc" ? cmp : -cmp; + }); + }, [devices, search, typeFilter, roomFilter, sortCol, sortDir]); + + const totalPower = filtered.reduce((s, d) => s + d.power_draw_w, 0); + const types = Array.from(new Set(devices.map(d => d.type))).sort(); + + function downloadCsv() { + const headers = ["Device", "Type", "Rack", "Room", "U Start", "U Height", "IP", "Serial", "Power (W)", "Status"]; + const rows = filtered.map((d) => [ + d.name, TYPE_STYLES[d.type]?.label ?? d.type, d.rack_id.toUpperCase(), + roomLabels[d.room_id] ?? d.room_id, d.u_start, d.u_height, + d.ip !== "-" ? d.ip : "", d.serial, d.power_draw_w, d.status, + ]); + const csv = [headers, ...rows] + .map((r) => r.map((v) => `"${String(v ?? "").replace(/"/g, '""')}"`).join(",")) + .join("\n"); + const blob = new Blob([csv], { type: "text/csv;charset=utf-8;" }); + const url = URL.createObjectURL(blob); + const a = Object.assign(document.createElement("a"), { + href: url, download: `bms-inventory-${new Date().toISOString().slice(0, 10)}.csv`, + }); + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + URL.revokeObjectURL(url); + toast.success("Export downloaded"); + } + + if (loading) { + return ( +
+ {Array.from({ length: 8 }).map((_, i) => )} +
+ ); + } + + return ( +
+ {/* Device type legend */} +
+ {Object.entries(TYPE_STYLES).map(([key, { dot, label }]) => ( +
+ + {label} +
+ ))} +
+ + {/* Filters */} +
+ setSearch(e.target.value)} + className="flex-1 min-w-48 h-8 rounded-md border border-border bg-muted/30 px-3 text-xs focus:outline-none focus:ring-1 focus:ring-primary" + /> + + + + {filtered.length} devices · {(totalPower / 1000).toFixed(1)} kW + + +
+ + {/* Table */} +
+ + + + {([ + { col: "name" as SortCol, label: "Device", cls: "text-left px-3 py-2" }, + { col: "type" as SortCol, label: "Type", cls: "text-left px-3 py-2" }, + { col: "rack_id" as SortCol, label: "Rack", cls: "text-left px-3 py-2" }, + { col: "room_id" as SortCol, label: "Room", cls: "text-left px-3 py-2 hidden sm:table-cell" }, + { col: "u_start" as SortCol, label: "U", cls: "text-left px-3 py-2 hidden md:table-cell" }, + ]).map(({ col, label, cls }) => ( + + ))} + + + + + + + + {filtered.length === 0 ? ( + + + + ) : ( + filtered.map(d => { + const ts = TYPE_STYLES[d.type]; + return ( + onRackClick(d.rack_id)}> + + + + + + + + + + + ); + }) + )} + +
+ + IP + + StatusLifecycle
+ No devices match your filters. +
+
{d.name}
+
{d.serial}
+
+
+ + {ts?.label ?? d.type} +
+
{d.rack_id.toUpperCase()} + {roomLabels[d.room_id] ?? d.room_id} + + U{d.u_start}{d.u_height > 1 ? `–U${d.u_start + d.u_height - 1}` : ""} + + {d.ip !== "-" ? d.ip : "—"} + {d.power_draw_w} W + + ● online + + + + {d.status === "online" ? "Active" : d.status === "offline" ? "Offline" : "Unknown"} + +
+
+
+ ); +} + +// ── PDU Monitoring ──────────────────────────────────────────────────────────── + +function PduMonitoringSection({ siteId }: { siteId: string }) { + const [pdus, setPdus] = useState([]); + const [loading, setLoading] = useState(true); + + useEffect(() => { + fetchPduReadings(siteId) + .then(setPdus) + .catch(() => {}) + .finally(() => setLoading(false)); + const id = setInterval(() => fetchPduReadings(siteId).then(setPdus).catch(() => {}), 30_000); + return () => clearInterval(id); + }, [siteId]); + + const critical = pdus.filter(p => p.status === "critical").length; + const warning = pdus.filter(p => p.status === "warning").length; + + return ( + + +
+ + PDU Phase Monitoring + +
+ {critical > 0 && {critical} critical} + {warning > 0 && {warning} warning} + {critical === 0 && warning === 0 && !loading && ( + All balanced + )} +
+
+
+ + {loading ? ( +
+ {Array.from({ length: 4 }).map((_, i) => )} +
+ ) : ( +
+ + + + + + + + + + + + + + + + + {pdus.map(p => ( + + + + + + + + + + + + + ))} + +
RackRoomTotal kWPh-A kWPh-B kWPh-C kWPh-A APh-B APh-C AImbalance
{p.rack_id.toUpperCase().replace("RACK-", "")} + {roomLabels[p.room_id] ?? p.room_id} + + {p.total_kw !== null ? p.total_kw.toFixed(2) : "—"} + + {p.phase_a_kw !== null ? p.phase_a_kw.toFixed(2) : "—"} + + {p.phase_b_kw !== null ? p.phase_b_kw.toFixed(2) : "—"} + + {p.phase_c_kw !== null ? p.phase_c_kw.toFixed(2) : "—"} + + {p.phase_a_a !== null ? p.phase_a_a.toFixed(1) : "—"} + + {p.phase_b_a !== null ? p.phase_b_a.toFixed(1) : "—"} + + {p.phase_c_a !== null ? p.phase_c_a.toFixed(1) : "—"} + + {p.imbalance_pct !== null ? ( + + {p.imbalance_pct.toFixed(1)}% + + ) : "—"} +
+
+ )} +
+
+ ); +} + +// ── Page ────────────────────────────────────────────────────────────────────── + +export default function AssetsPage() { + const [data, setData] = useState(null); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(false); + const [selectedRack, setSelectedRack] = useState(null); + const [statusFilter, setStatusFilter] = useState<"all" | "warning" | "critical">("all"); + const [view, setView] = useState<"grid" | "inventory">("grid"); + + async function load() { + try { const d = await fetchAssets(SITE_ID); setData(d); setError(false); } + catch { setError(true); toast.error("Failed to load asset data"); } + finally { setLoading(false); } + } + + useEffect(() => { + load(); + const id = setInterval(load, 30_000); + return () => clearInterval(id); + }, []); + + if (loading) { + return ( +
+ +
+ {Array.from({ length: 10 }).map((_, i) => )} +
+
+ ); + } + + if (error || !data) { + return ( +
+ Unable to load asset data. +
+ ); + } + + const defaultTab = data.rooms[0]?.room_id ?? ""; + const totalRacks = data.rooms.reduce((s, r) => s + r.racks.length, 0); + const critCount = data.rooms.flatMap(r => r.racks).filter(r => r.status === "critical").length; + const warnCount = data.rooms.flatMap(r => r.racks).filter(r => r.status === "warning").length; + + return ( +
+ {/* Header */} +
+
+

Asset Registry

+

+ Singapore DC01 · {totalRacks} racks + {critCount > 0 && · {critCount} critical} + {warnCount > 0 && · {warnCount} warning} +

+
+ + {/* View toggle */} +
+ + +
+
+ + setSelectedRack(null)} /> + + {view === "inventory" ? ( + + ) : ( + <> + {/* Compact UPS + CRAC rows */} +
+ {data.ups_units.map(ups => )} + {data.rooms.map(room => )} +
+ + {/* PDU phase monitoring */} + + + {/* Per-room rack table */} + + + {data.rooms.map(room => ( + + {roomLabels[room.room_id] ?? room.room_id} + + ))} + + + {data.rooms.map(room => { + const rWarn = room.racks.filter(r => r.status === "warning").length; + const rCrit = room.racks.filter(r => r.status === "critical").length; + + return ( + +
+
+

+ Racks — {roomLabels[room.room_id] ?? room.room_id} +

+
+ {(["all", "warning", "critical"] as const).map(f => ( + + ))} +
+
+ + +
+
+ ); + })} +
+ + )} +
+ ); +} diff --git a/frontend/app/(dashboard)/capacity/page.tsx b/frontend/app/(dashboard)/capacity/page.tsx new file mode 100644 index 0000000..7e89bf7 --- /dev/null +++ b/frontend/app/(dashboard)/capacity/page.tsx @@ -0,0 +1,596 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { toast } from "sonner"; +import { fetchCapacitySummary, type CapacitySummary, type RoomCapacity, type RackCapacity } from "@/lib/api"; +import { PageShell } from "@/components/layout/page-shell"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { BarChart, Bar, XAxis, YAxis, CartesianGrid, Tooltip, ReferenceLine, ResponsiveContainer, Cell } from "recharts"; +import { Zap, Wind, Server, RefreshCw, AlertTriangle, TrendingDown, TrendingUp, Clock } from "lucide-react"; +import { cn } from "@/lib/utils"; + +const SITE_ID = "sg-01"; +const ROOM_LABELS: Record = { "hall-a": "Hall A", "hall-b": "Hall B" }; + +// ── Radial gauge ────────────────────────────────────────────────────────────── + +function RadialGauge({ pct, warn, crit, headroom, unit }: { pct: number; warn: number; crit: number; headroom?: number; unit?: string }) { + const r = 36; + const circumference = 2 * Math.PI * r; + const arc = circumference * 0.75; // 270° sweep + const filled = Math.min(pct / 100, 1) * arc; + + const color = + pct >= crit ? "#ef4444" : + pct >= warn ? "#f59e0b" : + "#22c55e"; + const textColor = + pct >= crit ? "text-destructive" : + pct >= warn ? "text-amber-400" : + "text-green-400"; + + return ( +
+ + {/* Track */} + + {/* Fill */} + + +
+ + {pct.toFixed(1)} + + % + {headroom !== undefined && unit !== undefined && ( +

+ {headroom.toFixed(1)} {unit} +

+ )} +
+
+ ); +} + +// ── Capacity gauge card ──────────────────────────────────────────────────────── + +function CapacityGauge({ + label, used, capacity, unit, pct, headroom, icon: Icon, warn = 70, crit = 85, +}: { + label: string; used: number; capacity: number; unit: string; pct: number; + headroom: number; icon: React.ElementType; warn?: number; crit?: number; +}) { + const textColor = pct >= crit ? "text-destructive" : pct >= warn ? "text-amber-400" : "text-green-400"; + const status = pct >= crit ? "Critical" : pct >= warn ? "Warning" : "OK"; + + return ( +
+
+
+ + {label} +
+ = crit ? "bg-destructive/10" : pct >= warn ? "bg-amber-500/10" : "bg-green-500/10" + )}> + {status} + +
+ + + +
+ {used.toFixed(1)} {unit} used + {capacity.toFixed(0)} {unit} rated +
+ +
= crit ? "bg-destructive/10 text-destructive" : + pct >= warn ? "bg-amber-500/10 text-amber-400" : + "bg-green-500/10 text-green-400" + )}> + {headroom.toFixed(1)} {unit} headroom remaining +
+
+ ); +} + +// ── Capacity runway component ────────────────────────────────────── +// Assumes ~0.5 kW/week average growth rate to forecast when limits are hit + +const GROWTH_KW_WEEK = 0.5; +const WARN_PCT = 85; + +function RunwayCard({ rooms }: { rooms: RoomCapacity[] }) { + return ( + + + + + Capacity Runway + + (assuming {GROWTH_KW_WEEK} kW/week growth) + + + + +
+ {rooms.map((room) => { + const powerHeadroomToWarn = Math.max(0, room.power.capacity_kw * (WARN_PCT / 100) - room.power.used_kw); + const coolHeadroomToWarn = Math.max(0, room.cooling.capacity_kw * (WARN_PCT / 100) - room.cooling.load_kw); + const powerRunwayWeeks = Math.round(powerHeadroomToWarn / GROWTH_KW_WEEK); + const coolRunwayWeeks = Math.round(coolHeadroomToWarn / GROWTH_KW_WEEK); + const constrainedBy = powerRunwayWeeks <= coolRunwayWeeks ? "power" : "cooling"; + const minRunway = Math.min(powerRunwayWeeks, coolRunwayWeeks); + + const runwayColor = + minRunway < 4 ? "text-destructive" : + minRunway < 12 ? "text-amber-400" : + "text-green-400"; + + // N+1 cooling: at 1 CRAC per room, losing it means all load hits chillers/other rooms + const n1Margin = room.cooling.capacity_kw - room.cooling.load_kw; + const n1Ok = n1Margin > room.cooling.capacity_kw * 0.2; // 20% spare = N+1 safe + + return ( +
+
+ {ROOM_LABELS[room.room_id] ?? room.room_id} +
+ + {n1Ok ? "N+1 OK" : "N+1 marginal"} + +
+
+ +
+ +
+

+ {minRunway}w +

+

+ ≈{(minRunway / 4.33).toFixed(1)}mo +

+

+ until {WARN_PCT}% {constrainedBy} limit +

+
+
+ +
+
+

Power runway

+

+ {powerRunwayWeeks}w / ≈{(powerRunwayWeeks / 4.33).toFixed(1)}mo +

+

{powerHeadroomToWarn.toFixed(1)} kW free

+
+
+

Cooling runway

+

+ {coolRunwayWeeks}w / ≈{(coolRunwayWeeks / 4.33).toFixed(1)}mo +

+

{coolHeadroomToWarn.toFixed(1)} kW free

+
+
+
+ ); + })} +
+
+
+ ); +} + +// ── Room summary strip ──────────────────────────────────────────────────────── + +function RoomSummaryStrip({ rooms }: { rooms: RoomCapacity[] }) { + return ( +
+ {rooms.map((room) => { + const powerPct = room.power.pct; + const coolPct = room.cooling.pct; + const worstPct = Math.max(powerPct, coolPct); + const worstColor = + worstPct >= 85 ? "border-destructive/40 bg-destructive/5" : + worstPct >= 70 ? "border-amber-500/40 bg-amber-500/5" : + "border-border bg-muted/10"; + + return ( +
+
+ {ROOM_LABELS[room.room_id] ?? room.room_id} + = 85 ? "bg-destructive/10 text-destructive" : + worstPct >= 70 ? "bg-amber-500/10 text-amber-400" : + "bg-green-500/10 text-green-400" + )}> + {worstPct >= 85 ? "Critical" : worstPct >= 70 ? "Warning" : "OK"} + +
+
+
+

Power

+

= 85 ? "text-destructive" : powerPct >= 70 ? "text-amber-400" : "text-green-400")}> + {powerPct.toFixed(1)}% +

+

{room.power.used_kw.toFixed(1)} / {room.power.capacity_kw} kW

+
+
+

Cooling

+

= 80 ? "text-destructive" : coolPct >= 65 ? "text-amber-400" : "text-green-400")}> + {coolPct.toFixed(1)}% +

+

{room.cooling.load_kw.toFixed(1)} / {room.cooling.capacity_kw} kW

+
+
+

Space

+

{room.space.racks_populated} / {room.space.racks_total}

+

{room.space.racks_total - room.space.racks_populated} slots free

+
+
+
+ ); + })} +
+ ); +} + +// ── Room capacity section ───────────────────────────────────────────────────── + +function RoomCapacityPanel({ room, racks, config }: { + room: RoomCapacity; + racks: RackCapacity[]; + config: CapacitySummary["config"]; +}) { + const roomRacks = racks.filter((r) => r.room_id === room.room_id); + + const chartData = roomRacks + .map((r) => ({ + rack: r.rack_id.replace("rack-", "").toUpperCase(), + rack_id: r.rack_id, + pct: r.power_pct ?? 0, + kw: r.power_kw ?? 0, + temp: r.temp, + })) + .sort((a, b) => b.pct - a.pct); + + const forecastPct = Math.min(100, (chartData.reduce((s, d) => s + d.pct, 0) / Math.max(1, chartData.length)) + (GROWTH_KW_WEEK * 13 / config.rack_power_kw * 100)); + + const highLoad = roomRacks.filter((r) => (r.power_pct ?? 0) >= 75); + const stranded = roomRacks.filter((r) => r.power_kw !== null && (r.power_pct ?? 0) < 20); + const strandedKw = stranded.reduce((s, r) => s + ((config.rack_power_kw - (r.power_kw ?? 0))), 0); + + return ( +
+
+ + +
+
+ Space +
+
+
+ + + + +
+ + {room.space.racks_populated} + + /{room.space.racks_total} +
+
+
+
+ {room.space.racks_populated} active + {room.space.racks_total - room.space.racks_populated} free +
+
+ Each rack rated {config.rack_u_total}U / {config.rack_power_kw} kW max +
+
+
+ + + + + Per-rack Power Utilisation + + + + {chartData.length === 0 ? ( +
+ No rack data available +
+ ) : ( + + + + + `${v}%`} + /> + [ + `${Number(v).toFixed(1)}% (${props.payload.kw.toFixed(2)} kW)`, "Power load" + ]} + /> + + + + + {chartData.map((d) => ( + = 90 ? "oklch(0.55 0.22 25)" : + d.pct >= 75 ? "oklch(0.65 0.20 45)" : + d.pct >= 50 ? "oklch(0.68 0.14 162)" : + "oklch(0.62 0.17 212)" + } + /> + ))} + + + + )} +
+
+ +
+ + + + High Load Racks + + + + {highLoad.length === 0 ? ( +

All racks within normal limits

+ ) : ( +
+ {highLoad.sort((a, b) => (b.power_pct ?? 0) - (a.power_pct ?? 0)).map((r) => ( +
+ {r.rack_id.toUpperCase()} +
+ {r.power_kw?.toFixed(1)} kW + = 90 ? "text-destructive" : "text-amber-400" + )}>{r.power_pct?.toFixed(1)}% +
+
+ ))} +
+ )} +
+
+ + + +
+ + Stranded Capacity + + {stranded.length > 0 && ( + + {strandedKw.toFixed(1)} kW recoverable + + )} +
+
+ + {stranded.length === 0 ? ( +

No underutilised racks detected

+ ) : ( +
+ {stranded.sort((a, b) => (a.power_pct ?? 0) - (b.power_pct ?? 0)).map((r) => ( +
+ {r.rack_id.toUpperCase()} +
+ {r.power_kw?.toFixed(1)} kW + {r.power_pct?.toFixed(1)}% utilised +
+
+ ))} +

+ {stranded.length} rack{stranded.length > 1 ? "s" : ""} below 20% — consider consolidation +

+
+ )} +
+
+
+
+ ); +} + +// ── Page ────────────────────────────────────────────────────────────────────── + +export default function CapacityPage() { + const [data, setData] = useState(null); + const [loading, setLoading] = useState(true); + const [activeRoom, setActiveRoom] = useState("hall-a"); + + const load = useCallback(async () => { + try { setData(await fetchCapacitySummary(SITE_ID)); } + catch { toast.error("Failed to load capacity data"); } + finally { setLoading(false); } + }, []); + + useEffect(() => { + load(); + const id = setInterval(load, 30_000); + return () => clearInterval(id); + }, [load]); + + const sitePower = data?.rooms.reduce((s, r) => s + r.power.used_kw, 0) ?? 0; + const siteCapacity = data?.rooms.reduce((s, r) => s + r.power.capacity_kw, 0) ?? 0; + + return ( + +
+
+

Capacity Planning

+

Singapore DC01 — power, cooling & space headroom

+
+ +
+ + {loading ? ( +
+ +
+ {Array.from({ length: 3 }).map((_, i) => )} +
+ +
+ ) : !data ? ( +
+ Unable to load capacity data. +
+ ) : ( + <> + {/* Site summary banner */} +
+
+ Site IT load + {" "} + {sitePower.toFixed(1)} kW + / {siteCapacity.toFixed(0)} kW rated +
+
+ Site load + {" "} + = 85 ? "text-destructive" : + (sitePower / siteCapacity * 100) >= 70 ? "text-amber-400" : "text-green-400" + )}> + {(sitePower / siteCapacity * 100).toFixed(1)}% + +
+
+ Capacity config: {data.config.rack_power_kw} kW/rack ·{" "} + {data.config.crac_cooling_kw} kW CRAC ·{" "} + {data.config.rack_u_total}U/rack +
+
+ + {/* Room comparison strip */} + + + {/* Capacity runway + N+1 */} + + + {/* Per-room detail tabs */} +
+ + + {data.rooms.map((r) => ( + + {ROOM_LABELS[r.room_id] ?? r.room_id} + {r.power.pct >= 85 && ( + + )} + + ))} + + + +
+ {data.rooms + .filter((r) => r.room_id === activeRoom) + .map((room) => ( + + ))} +
+
+ + )} +
+ ); +} diff --git a/frontend/app/(dashboard)/cooling/page.tsx b/frontend/app/(dashboard)/cooling/page.tsx new file mode 100644 index 0000000..aafecb8 --- /dev/null +++ b/frontend/app/(dashboard)/cooling/page.tsx @@ -0,0 +1,610 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { toast } from "sonner"; +import { fetchCracStatus, fetchChillerStatus, type CracStatus, type ChillerStatus } from "@/lib/api"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { CracDetailSheet } from "@/components/dashboard/crac-detail-sheet"; +import { + Wind, AlertTriangle, CheckCircle2, Zap, ChevronRight, ArrowRight, Waves, Filter, + ChevronUp, ChevronDown, +} from "lucide-react"; +import { cn } from "@/lib/utils"; + +const SITE_ID = "sg-01"; +const roomLabels: Record = { "hall-a": "Hall A", "hall-b": "Hall B" }; + +function fmt(v: number | null | undefined, dec = 1, unit = "") { + if (v == null) return "—"; + return `${v.toFixed(dec)}${unit}`; +} + +function FillBar({ + value, max, color, warn, crit, height = "h-2", +}: { + value: number | null; max: number; color: string; + warn?: number; crit?: number; height?: string; +}) { + const pct = value != null ? Math.min(100, (value / max) * 100) : 0; + const barColor = + crit && value != null && value >= crit ? "#ef4444" : + warn && value != null && value >= warn ? "#f59e0b" : + color; + return ( +
+
+
+ ); +} + +function KpiTile({ label, value, sub, warn }: { + label: string; value: string; sub?: string; warn?: boolean; +}) { + return ( +
+

{label}

+

{value}

+ {sub &&

{sub}

} +
+ ); +} + +function CracCard({ crac, onOpen }: { crac: CracStatus; onOpen: () => void }) { + const [showCompressor, setShowCompressor] = useState(false); + const online = crac.state === "online"; + + const deltaWarn = (crac.delta ?? 0) > 11; + const deltaCrit = (crac.delta ?? 0) > 14; + const capWarn = (crac.cooling_capacity_pct ?? 0) > 75; + const capCrit = (crac.cooling_capacity_pct ?? 0) > 90; + const copWarn = (crac.cop ?? 99) < 1.5; + const filterWarn = (crac.filter_dp_pa ?? 0) > 80; + const filterCrit = (crac.filter_dp_pa ?? 0) > 120; + const compWarn = (crac.compressor_load_pct ?? 0) > 95; + const hiPWarn = (crac.high_pressure_bar ?? 0) > 22; + const loPWarn = (crac.low_pressure_bar ?? 99) < 3; + + return ( + + {/* ── Header ───────────────────────────────────────────────── */} + +
+
+ +
+ + {crac.crac_id.toUpperCase()} + + {crac.room_id && ( +

+ {roomLabels[crac.room_id] ?? crac.room_id} +

+ )} +
+
+
+ {online && ( + + {deltaCrit || capCrit ? "Critical" : deltaWarn || capWarn || filterWarn || copWarn ? "Warning" : "Normal"} + + )} + + {online + ? <> Online + : <> Fault} + + +
+
+
+ + + {!online ? ( +
+ Unit offline — cooling capacity in this room is degraded. +
+ ) : ( + <> + {/* ── Thermal hero ─────────────────────────────────────── */} +
+
+
+

Supply

+

+ {fmt(crac.supply_temp, 1)}°C +

+
+
+

+ ΔT {fmt(crac.delta, 1)}°C +

+
+
+ +
+
+
+
+

Return

+

+ {fmt(crac.return_temp, 1)}°C +

+
+
+
+ + {/* ── Cooling capacity ─────────────────────────────────── */} +
+
+ Cooling Capacity + + + {fmt(crac.cooling_capacity_kw, 1)} / {crac.rated_capacity_kw} kW + + · + + COP {fmt(crac.cop, 2)} + + +
+ +

+ {fmt(crac.cooling_capacity_pct, 1)}% utilised +

+
+ + {/* ── Fan + Filter ─────────────────────────────────────── */} +
+
+
+ Fan + + {fmt(crac.fan_pct, 1)}% + {crac.fan_rpm != null ? ` · ${Math.round(crac.fan_rpm).toLocaleString()} rpm` : ""} + +
+ +
+
+
+ Filter ΔP + + {fmt(crac.filter_dp_pa, 0)} Pa + {!filterWarn && } + +
+ +
+
+ + {/* ── Compressor (collapsible) ─────────────────────────── */} +
+ + {showCompressor && ( +
+ +
+ + Hi {fmt(crac.high_pressure_bar, 1)} bar + + + Lo {fmt(crac.low_pressure_bar, 2)} bar + + SH {fmt(crac.discharge_superheat_c, 1)}°C + SC {fmt(crac.liquid_subcooling_c, 1)}°C +
+
+ )} +
+ + {/* ── Electrical (one line) ────────────────────────────── */} +
+ + {fmt(crac.total_unit_power_kw, 2)} kW + · + {fmt(crac.input_voltage_v, 0)} V + · + {fmt(crac.input_current_a, 1)} A + · + PF {fmt(crac.power_factor, 3)} +
+ + {/* ── Status banner ────────────────────────────────────── */} +
+ {deltaCrit || capCrit + ? "Heat load is high — check airflow or redistribute rack density." + : deltaWarn || capWarn + ? "Heat load is elevated — monitor for further rises." + : filterWarn + ? "Filter requires attention — airflow may be restricted." + : copWarn + ? "Running inefficiently — check refrigerant charge." + : "Operating efficiently within normal parameters."} +
+ + )} + + + ); +} + +// ── Filter replacement estimate ──────────────────────────────────── +// Assumes ~1.2 Pa/day rate of rise — replace at 120 Pa threshold + +const FILTER_REPLACE_PA = 120; +const FILTER_RATE_PA_DAY = 1.2; + +function FilterEstimate({ cracs }: { cracs: CracStatus[] }) { + const units = cracs + .filter((c) => c.state === "online" && c.filter_dp_pa != null) + .map((c) => { + const dp = c.filter_dp_pa!; + const days = Math.max(0, Math.round((FILTER_REPLACE_PA - dp) / FILTER_RATE_PA_DAY)); + const urgent = dp >= 120; + const warn = dp >= 80; + return { crac_id: c.crac_id, dp, days, urgent, warn }; + }) + .sort((a, b) => a.days - b.days); + + if (units.length === 0) return null; + + const anyUrgent = units.some((u) => u.urgent); + const anyWarn = units.some((u) => u.warn); + + return ( + + +
+ + + Predictive Filter Replacement + + + {anyUrgent ? "Overdue" : anyWarn ? "Attention needed" : "All filters OK"} + +
+
+ +
+ {units.map((u) => ( +
+
+ {u.crac_id.toUpperCase()} +
+ + {u.dp} Pa + + + {u.urgent ? "Replace now" : `~${u.days}d`} + +
+
+
+
+
+
+ ))} +

+ Estimated at {FILTER_RATE_PA_DAY} Pa/day increase · replace at {FILTER_REPLACE_PA} Pa threshold +

+
+ + + ); +} + +// ── Chiller card ────────────────────────────────────────────────── + +function ChillerCard({ chiller }: { chiller: ChillerStatus }) { + const online = chiller.state === "online"; + const loadWarn = (chiller.cooling_load_pct ?? 0) > 80; + + return ( + + +
+ + + {chiller.chiller_id.toUpperCase()} — Chiller Plant + + + {online ? <> Online : <> Fault} + +
+
+ + {!online ? ( +
+ Chiller fault — CHW supply lost. CRAC/CRAH units relying on local refrigerant circuits only. +
+ ) : ( + <> + {/* CHW temps */} +
+
+
+

CHW Supply

+

{fmt(chiller.chw_supply_c, 1)}°C

+
+
+

ΔT {fmt(chiller.chw_delta_c, 1)}°C

+
+
+ +
+
+
+
+

CHW Return

+

{fmt(chiller.chw_return_c, 1)}°C

+
+
+
+ {/* Load */} +
+
+ Cooling Load + + {fmt(chiller.cooling_load_kw, 1)} kW + · + COP {fmt(chiller.cop, 2)} + +
+ +

{fmt(chiller.cooling_load_pct, 1)}% load

+
+ {/* Details */} +
+
Flow rate{fmt(chiller.flow_gpm, 0)} GPM
+
Comp load{fmt(chiller.compressor_load_pct, 1)}%
+
Cond press{fmt(chiller.condenser_pressure_bar, 2)} bar
+
Evap press{fmt(chiller.evaporator_pressure_bar, 2)} bar
+
CW supply{fmt(chiller.cw_supply_c, 1)}°C
+
CW return{fmt(chiller.cw_return_c, 1)}°C
+
+
+ Run hours: {chiller.run_hours != null ? chiller.run_hours.toFixed(0) : "—"} h +
+ + )} + + + ); +} + +// ── Page ────────────────────────────────────────────────────────────────────── + +export default function CoolingPage() { + const [cracs, setCracs] = useState([]); + const [chillers, setChillers] = useState([]); + const [loading, setLoading] = useState(true); + const [selectedCrac, setSelected] = useState(null); + + const load = useCallback(async () => { + try { + const [c, ch] = await Promise.all([ + fetchCracStatus(SITE_ID), + fetchChillerStatus(SITE_ID).catch(() => []), + ]); + setCracs(c); + setChillers(ch); + } + catch { toast.error("Failed to load cooling data"); } + finally { setLoading(false); } + }, []); + + useEffect(() => { + load(); + const id = setInterval(load, 30_000); + return () => clearInterval(id); + }, [load]); + + const online = cracs.filter(c => c.state === "online"); + const anyFaulted = cracs.some(c => c.state === "fault"); + const totalCoolingKw = online.reduce((s, c) => s + (c.cooling_capacity_kw ?? 0), 0); + const totalRatedKw = cracs.reduce((s, c) => s + (c.rated_capacity_kw ?? 0), 0); + const copUnits = online.filter(c => c.cop != null); + const avgCop = copUnits.length > 0 + ? copUnits.reduce((s, c) => s + (c.cop ?? 0), 0) / copUnits.length + : null; + const totalUnitPower = online.reduce((s, c) => s + (c.total_unit_power_kw ?? 0), 0); + const totalAirflowCfm = online.reduce((s, c) => s + (c.airflow_cfm ?? 0), 0); + + return ( +
+ {/* ── Page header ───────────────────────────────────────────── */} +
+
+

Cooling Systems

+

+ Singapore DC01 · click a unit to drill down · refreshes every 30s +

+
+ {!loading && ( + + {anyFaulted + ? <> Cooling fault detected + : <> All {cracs.length} units operational} + + )} +
+ + {/* ── Filter alert banner ───────────────────────────────────── */} + {!loading && (() => { + const urgent = cracs + .filter(c => c.state === "online" && c.filter_dp_pa != null) + .map(c => ({ id: c.crac_id, days: Math.max(0, Math.round((120 - c.filter_dp_pa!) / 1.2)) })) + .filter(c => c.days < 14) + .sort((a, b) => a.days - b.days); + if (urgent.length === 0) return null; + return ( +
+ + + Filter replacement due:{" "} + {urgent.map(u => `${u.id.toUpperCase()} in ${u.days === 0 ? "now" : `~${u.days}d`}`).join(", ")} + +
+ ); + })()} + + {/* ── Fleet summary KPI cards ───────────────────────────────── */} + {loading && ( +
+ {Array.from({ length: 5 }).map((_, i) => )} +
+ )} + {!loading && cracs.length > 0 && ( +
+ + +

Cooling Load

+

{totalCoolingKw.toFixed(1)} kW

+

of {totalRatedKw} kW rated

+
+
+ + +

Avg COP

+

+ {avgCop != null ? avgCop.toFixed(2) : "—"} +

+
+
+ + +

Unit Power Draw

+

{totalUnitPower.toFixed(1)} kW

+

total electrical input

+
+
+ + +

Units Online

+

+ {online.length} / {cracs.length} +

+
+
+ {totalAirflowCfm > 0 && ( + + +

Total Airflow

+

{Math.round(totalAirflowCfm).toLocaleString()}

+

CFM combined output

+
+
+ )} +
+ )} + + {/* ── Chiller plant ─────────────────────────────────────────── */} + {(loading || chillers.length > 0) && ( + <> +

Chiller Plant

+ {loading ? ( + + ) : ( +
+ {chillers.map(ch => )} +
+ )} + + )} + + {/* ── Filter health (moved before CRAC cards) ───────────────── */} + {!loading && } + + {/* ── CRAC cards ────────────────────────────────────────────── */} +

CRAC / CRAH Units

+ {loading ? ( +
+ + +
+ ) : ( +
+ {cracs.map(crac => ( + setSelected(crac.crac_id)} /> + ))} +
+ )} + + setSelected(null)} + /> +
+ ); +} diff --git a/frontend/app/(dashboard)/dashboard/page.tsx b/frontend/app/(dashboard)/dashboard/page.tsx new file mode 100644 index 0000000..cdb1b6d --- /dev/null +++ b/frontend/app/(dashboard)/dashboard/page.tsx @@ -0,0 +1,246 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { useRouter } from "next/navigation"; +import { Zap, Thermometer, Wind, AlertTriangle, Wifi, WifiOff, Fuel, Droplets } from "lucide-react"; +import { KpiCard } from "@/components/dashboard/kpi-card"; +import { PowerTrendChart } from "@/components/dashboard/power-trend-chart"; +import { TemperatureTrendChart } from "@/components/dashboard/temperature-trend-chart"; +import { AlarmFeed } from "@/components/dashboard/alarm-feed"; +import { MiniFloorMap } from "@/components/dashboard/mini-floor-map"; +import { RackDetailSheet } from "@/components/dashboard/rack-detail-sheet"; +import { + fetchKpis, fetchPowerHistory, fetchTempHistory, + fetchAlarms, fetchGeneratorStatus, fetchLeakStatus, + fetchCapacitySummary, fetchFloorLayout, + type KpiData, type PowerBucket, type TempBucket, + type Alarm, type GeneratorStatus, type LeakSensorStatus, + type RackCapacity, +} from "@/lib/api"; +import { TimeRangePicker } from "@/components/ui/time-range-picker"; +import Link from "next/link"; +import { PageShell } from "@/components/layout/page-shell"; + +const SITE_ID = "sg-01"; +const KPI_INTERVAL = 15_000; +const CHART_INTERVAL = 30_000; + +// Fallback static data shown when the API is unreachable +const FALLBACK_KPIS: KpiData = { + total_power_kw: 0, pue: 0, avg_temperature: 0, active_alarms: 0, +}; + +export default function DashboardPage() { + const router = useRouter(); + const [kpis, setKpis] = useState(FALLBACK_KPIS); + const [prevKpis, setPrevKpis] = useState(null); + const [powerHistory, setPowerHistory] = useState([]); + const [tempHistory, setTempHistory] = useState([]); + const [alarms, setAlarms] = useState([]); + const [generators, setGenerators] = useState([]); + const [leakSensors, setLeakSensors] = useState([]); + const [mapRacks, setMapRacks] = useState([]); + const [mapLayout, setMapLayout] = useState | null>(null); + const [chartHours, setChartHours] = useState(1); + const [loading, setLoading] = useState(true); + const [liveError, setLiveError] = useState(false); + const [lastUpdated, setLastUpdated] = useState(null); + const [selectedRack, setSelectedRack] = useState(null); + + const refreshKpis = useCallback(async () => { + try { + const [k, a, g, l, cap] = await Promise.all([ + fetchKpis(SITE_ID), + fetchAlarms(SITE_ID), + fetchGeneratorStatus(SITE_ID).catch(() => []), + fetchLeakStatus(SITE_ID).catch(() => []), + fetchCapacitySummary(SITE_ID).catch(() => null), + ]); + setKpis((current) => { + if (current !== FALLBACK_KPIS) setPrevKpis(current); + return k; + }); + setAlarms(a); + setGenerators(g); + setLeakSensors(l); + if (cap) setMapRacks(cap.racks); + setLiveError(false); + setLastUpdated(new Date()); + } catch { + setLiveError(true); + } + }, []); + + const refreshCharts = useCallback(async () => { + try { + const [p, t] = await Promise.all([ + fetchPowerHistory(SITE_ID, chartHours), + fetchTempHistory(SITE_ID, chartHours), + ]); + setPowerHistory(p); + setTempHistory(t); + } catch { + // keep previous chart data on failure + } + }, []); + + // Initial load + useEffect(() => { + Promise.all([refreshKpis(), refreshCharts()]).finally(() => setLoading(false)); + fetchFloorLayout(SITE_ID) + .then(l => setMapLayout(l as typeof mapLayout)) + .catch(() => {}); + }, [refreshKpis, refreshCharts]); + + // Re-fetch charts when time range changes + useEffect(() => { refreshCharts(); }, [chartHours, refreshCharts]); + + // Polling + useEffect(() => { + const kpiTimer = setInterval(refreshKpis, KPI_INTERVAL); + const chartTimer = setInterval(refreshCharts, CHART_INTERVAL); + return () => { clearInterval(kpiTimer); clearInterval(chartTimer); }; + }, [refreshKpis, refreshCharts]); + + function handleAlarmClick(alarm: Alarm) { + if (alarm.rack_id) { + setSelectedRack(alarm.rack_id); + } else if (alarm.room_id) { + router.push("/environmental"); + } else { + router.push("/alarms"); + } + } + + // Derived KPI display values + const alarmStatus = kpis.active_alarms === 0 ? "ok" + : kpis.active_alarms <= 2 ? "warning" : "critical"; + + const tempStatus = kpis.avg_temperature === 0 ? "ok" + : kpis.avg_temperature >= 28 ? "critical" + : kpis.avg_temperature >= 25 ? "warning" : "ok"; + + // Trends vs previous poll + const powerTrend = prevKpis ? Math.round((kpis.total_power_kw - prevKpis.total_power_kw) * 10) / 10 : null; + const tempTrend = prevKpis ? Math.round((kpis.avg_temperature - prevKpis.avg_temperature) * 10) / 10 : null; + const alarmTrend = prevKpis ? kpis.active_alarms - prevKpis.active_alarms : null; + + // Generator derived + const gen = generators[0] ?? null; + const genFuel = gen?.fuel_pct ?? null; + const genState = gen?.state ?? "unknown"; + const genStatus: "ok" | "warning" | "critical" = + genState === "fault" ? "critical" : + genState === "running" ? "warning" : + genFuel !== null && genFuel < 25 ? "warning" : "ok"; + + // Leak derived + const activeLeaks = leakSensors.filter(s => s.state === "detected").length; + const leakStatus: "ok" | "warning" | "critical" = activeLeaks > 0 ? "critical" : "ok"; + + return ( + + setSelectedRack(null)} /> + {/* Live status bar */} +
+
+ {liveError ? ( + <> Live data unavailable + ) : ( + <> Live · updates every 15s + )} +
+ {lastUpdated && ( + + Last updated {lastUpdated.toLocaleTimeString()} + + )} +
+ + {/* Unified KPI grid — 3×2 on desktop */} +
+ 0 ? "+" : ""}${powerTrend} kW` : undefined} + href="/power" + /> + + 0 ? "+" : ""}${tempTrend}°C` : undefined} + trendInvert + href="/environmental" + /> + 0 ? "+" : ""}${alarmTrend}` : undefined} + trendInvert + href="/alarms" + /> + + 0 ? `${activeLeaks} active` : "All clear"} + hint={activeLeaks > 0 ? "Water detected — investigate immediately" : `${leakSensors.length} sensors monitoring`} + icon={Droplets} + iconColor={leakStatus === "critical" ? "text-destructive" : "text-blue-400"} + status={loading ? "ok" : leakStatus} + loading={loading} + href="/environmental" + /> +
+ + {/* Charts row */} +
+

Trends

+ +
+
+ + +
+ + {/* Bottom row — 50/50 */} +
+ + +
+
+ ); +} diff --git a/frontend/app/(dashboard)/energy/page.tsx b/frontend/app/(dashboard)/energy/page.tsx new file mode 100644 index 0000000..297f0e3 --- /dev/null +++ b/frontend/app/(dashboard)/energy/page.tsx @@ -0,0 +1,330 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { toast } from "sonner"; +import { + fetchEnergyReport, fetchUtilityPower, + type EnergyReport, type UtilityPower, +} from "@/lib/api"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + AreaChart, Area, LineChart, Line, + XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer, ReferenceLine, +} from "recharts"; +import { Zap, Leaf, RefreshCw, TrendingDown, DollarSign, Activity } from "lucide-react"; +import { cn } from "@/lib/utils"; + +const SITE_ID = "sg-01"; + +// Singapore grid emission factor (kgCO2e/kWh) — Energy Market Authority 2023 +const GRID_EF_KG_CO2_KWH = 0.4168; +// Approximate WUE for air-cooled DC in Singapore climate +const WUE_EST = 1.4; + +function KpiTile({ + label, value, sub, icon: Icon, iconClass, warn, +}: { + label: string; value: string; sub?: string; + icon?: React.ElementType; iconClass?: string; warn?: boolean; +}) { + return ( +
+
+ {Icon && } +

{label}

+
+

{value}

+ {sub &&

{sub}

} +
+ ); +} + +function SectionHeader({ children }: { children: React.ReactNode }) { + return ( +

{children}

+ ); +} + +export default function EnergyPage() { + const [energy, setEnergy] = useState(null); + const [utility, setUtility] = useState(null); + const [loading, setLoading] = useState(true); + + const load = useCallback(async () => { + try { + const [e, u] = await Promise.all([ + fetchEnergyReport(SITE_ID, 30), + fetchUtilityPower(SITE_ID).catch(() => null), + ]); + setEnergy(e); + setUtility(u); + } catch { toast.error("Failed to load energy data"); } + finally { setLoading(false); } + }, []); + + useEffect(() => { + load(); + const id = setInterval(load, 60_000); + return () => clearInterval(id); + }, [load]); + + const co2e_kg = energy ? Math.round(energy.kwh_total * GRID_EF_KG_CO2_KWH) : null; + const co2e_t = co2e_kg ? (co2e_kg / 1000).toFixed(2) : null; + const wue_water = energy ? (energy.kwh_total * (WUE_EST - 1)).toFixed(0) : null; + + const itKwChart = (energy?.pue_trend ?? []).map((d) => ({ + day: new Date(d.day).toLocaleDateString("en-GB", { month: "short", day: "numeric" }), + kw: d.avg_it_kw, + pue: d.pue_est, + })); + + const avgPue30 = energy?.pue_estimated ?? null; + const pueWarn = avgPue30 != null && avgPue30 > 1.5; + + return ( +
+ {/* Header */} +
+
+

Energy & Sustainability

+

Singapore DC01 — 30-day energy analysis · refreshes every 60s

+
+
+ {!loading && ( +
+ {co2e_t ? `${co2e_t} tCO₂e this month` : "—"} +
+ )} + +
+
+ + {/* Site energy banner */} + {!loading && utility && ( +
+
+ Current IT load: + {utility.total_kw.toFixed(1)} kW +
+
+ Tariff: + SGD {utility.tariff_sgd_kwh.toFixed(3)}/kWh +
+
+ Month-to-date: + {utility.kwh_month_to_date.toFixed(0)} kWh + + (SGD {utility.cost_sgd_mtd.toFixed(0)}) + +
+
+ Singapore · SP Group grid +
+
+ )} + + {/* 30-day KPIs */} + 30-Day Energy Summary + {loading ? ( +
+ {Array.from({ length: 4 }).map((_, i) => )} +
+ ) : ( +
+ + + + +
+ )} + + {/* IT Load trend */} + {(loading || itKwChart.length > 0) && ( + + + + + Daily IT Load — 30 Days + + + + {loading ? ( + + ) : ( + + + + + + + + + + + `${v} kW`} + domain={["auto", "auto"]} + /> + [`${Number(v).toFixed(1)} kW`, "IT Load"]} + /> + + + + )} + + + )} + + {/* PUE trend */} + {(loading || itKwChart.length > 0) && ( + + + + + PUE Trend — 30 Days + (target: < 1.4) + + + + {loading ? ( + + ) : ( + + + + + + [Number(v).toFixed(3), "PUE"]} + /> + + + + + + )} + + + )} + + {/* Sustainability */} + Sustainability Metrics + {loading ? ( +
+ {Array.from({ length: 3 }).map((_, i) => )} +
+ ) : ( +
+
+
+ +

Carbon Footprint

+
+

{co2e_t ?? "—"} tCO₂e

+

+ 30-day estimate · {energy?.kwh_total.toFixed(0) ?? "—"} kWh × {GRID_EF_KG_CO2_KWH} kgCO₂e/kWh +

+

+ Singapore grid emission factor (EMA 2023) +

+
+ +
+
+ +

Water Usage (WUE)

+
+

{WUE_EST.toFixed(1)}

+

+ Estimated WUE (L/kWh) · air-cooled DC +

+

+ Est. {wue_water ? `${Number(wue_water).toLocaleString()} L` : "—"} consumed (30d) +

+
+ +
+
+ +

Efficiency

+
+

+ {avgPue30?.toFixed(3) ?? "—"} +

+

+ Avg PUE · {avgPue30 != null && avgPue30 < 1.4 ? "Excellent — Tier IV class" : + avgPue30 != null && avgPue30 < 1.6 ? "Good — industry average" : + "Above average — optimise cooling"} +

+

+ IT energy efficiency: {avgPue30 != null ? `${(1 / avgPue30 * 100).toFixed(1)}%` : "—"} of total power to IT +

+
+
+ )} + + {/* Reference info */} +
+

Singapore Energy Context

+

Grid emission factor: {GRID_EF_KG_CO2_KWH} kgCO₂e/kWh (EMA 2023, predominantly natural gas + growing solar)

+

Electricity tariff: SGD {utility?.tariff_sgd_kwh.toFixed(3) ?? "0.298"}/kWh (SP Group commercial rate)

+

BCA Green Mark: Targeting GoldPLUS certification · PUE target < 1.4

+

+ CO₂e and WUE estimates are indicative. Actual values depend on metered chilled water and cooling tower data. +

+
+
+ ); +} diff --git a/frontend/app/(dashboard)/environmental/page.tsx b/frontend/app/(dashboard)/environmental/page.tsx new file mode 100644 index 0000000..7270e64 --- /dev/null +++ b/frontend/app/(dashboard)/environmental/page.tsx @@ -0,0 +1,846 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { toast } from "sonner"; +import { + fetchRackEnvReadings, fetchHumidityHistory, fetchTempHistory as fetchRoomTempHistory, + fetchCracStatus, fetchLeakStatus, fetchFireStatus, fetchParticleStatus, + type RoomEnvReadings, type HumidityBucket, type TempBucket, type CracStatus, + type LeakSensorStatus, type FireZoneStatus, type ParticleStatus, +} from "@/lib/api"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { useThresholds } from "@/lib/threshold-context"; +import { TimeRangePicker } from "@/components/ui/time-range-picker"; +import { + ComposedChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, + ResponsiveContainer, ReferenceLine, ReferenceArea, +} from "recharts"; +import { Thermometer, Droplets, WifiOff, CheckCircle2, AlertTriangle, Flame, Wind } from "lucide-react"; +import { RackDetailSheet } from "@/components/dashboard/rack-detail-sheet"; +import { cn } from "@/lib/utils"; +import Link from "next/link"; + +const SITE_ID = "sg-01"; +const ROOM_COLORS: Record = { + "hall-a": { temp: "oklch(0.62 0.17 212)", hum: "oklch(0.55 0.18 270)" }, + "hall-b": { temp: "oklch(0.7 0.15 162)", hum: "oklch(0.60 0.15 145)" }, +}; +const roomLabels: Record = { "hall-a": "Hall A", "hall-b": "Hall B" }; + +function formatTime(iso: string) { + return new Date(iso).toLocaleTimeString([], { hour: "2-digit", minute: "2-digit" }); +} + +// ── Utility functions ───────────────────────────────────────────────────────── + +/** Magnus formula dew point (°C) from temperature (°C) and relative humidity (%) */ +function dewPoint(temp: number, rh: number): number { + const gamma = Math.log(rh / 100) + (17.625 * temp) / (243.04 + temp); + return Math.round((243.04 * gamma / (17.625 - gamma)) * 10) / 10; +} + +function humidityColor(hum: number | null): string { + if (hum === null) return "oklch(0.25 0.02 265)"; + if (hum > 80) return "oklch(0.55 0.22 25)"; // critical high + if (hum > 65) return "oklch(0.65 0.20 45)"; // warning high + if (hum > 50) return "oklch(0.72 0.18 84)"; // elevated + if (hum >= 30) return "oklch(0.68 0.14 162)"; // optimal + return "oklch(0.62 0.17 212)"; // low (static risk) +} + +// ── Temperature heatmap ─────────────────────────────────────────────────────── + +function tempColor(temp: number | null, warn = 26, crit = 28): string { + if (temp === null) return "oklch(0.25 0.02 265)"; + if (temp >= crit + 4) return "oklch(0.55 0.22 25)"; + if (temp >= crit) return "oklch(0.65 0.20 45)"; + if (temp >= warn) return "oklch(0.72 0.18 84)"; + if (temp >= warn - 2) return "oklch(0.78 0.14 140)"; + if (temp >= warn - 4) return "oklch(0.68 0.14 162)"; + return "oklch(0.60 0.15 212)"; +} + +type HeatmapOverlay = "temp" | "humidity"; + +function TempHeatmap({ + rooms, onRackClick, activeRoom, tempWarn = 26, tempCrit = 28, humWarn = 65, humCrit = 80, +}: { + rooms: RoomEnvReadings[]; + onRackClick: (rackId: string) => void; + activeRoom: string; + tempWarn?: number; + tempCrit?: number; + humWarn?: number; + humCrit?: number; +}) { + const [overlay, setOverlay] = useState("temp"); + const room = rooms.find((r) => r.room_id === activeRoom); + + return ( + + +
+ + {overlay === "temp" + ? + : } + {overlay === "temp" ? "Temperature" : "Humidity"} Heatmap + +
+ {/* Overlay toggle */} +
+ + +
+
+
+
+ + {/* Callout — hottest or most humid */} + {(() => { + if (overlay === "temp") { + const hottest = room?.racks.reduce((a, b) => + (a.temperature ?? 0) > (b.temperature ?? 0) ? a : b + ); + if (!hottest || hottest.temperature === null) return null; + const isHot = hottest.temperature >= tempWarn; + return ( +
= tempCrit ? "bg-destructive/10 text-destructive" : + isHot ? "bg-amber-500/10 text-amber-400" : "bg-muted/40 text-muted-foreground" + )}> + + + Hottest: {hottest.rack_id.toUpperCase()} at {hottest.temperature}°C + {hottest.temperature >= tempCrit ? " — above critical threshold" : isHot ? " — above warning threshold" : " — within normal range"} + +
+ ); + } else { + const humid = room?.racks.reduce((a, b) => + (a.humidity ?? 0) > (b.humidity ?? 0) ? a : b + ); + if (!humid || humid.humidity === null) return null; + const isHigh = humid.humidity > humWarn; + return ( +
humCrit ? "bg-destructive/10 text-destructive" : + isHigh ? "bg-amber-500/10 text-amber-400" : "bg-muted/40 text-muted-foreground" + )}> + + + Highest humidity: {humid.rack_id.toUpperCase()} at {humid.humidity}% + {humid.humidity > humCrit ? " — above critical threshold" : isHigh ? " — above warning threshold" : " — within normal range"} + +
+ ); + } + })()} + + {/* Rack grid */} +
+ {room?.racks.map((rack) => { + const offline = rack.temperature === null && rack.humidity === null; + const bg = overlay === "temp" ? tempColor(rack.temperature, tempWarn, tempCrit) : humidityColor(rack.humidity); + const mainVal = overlay === "temp" + ? (rack.temperature !== null ? `${rack.temperature}°` : null) + : (rack.humidity !== null ? `${rack.humidity}%` : null); + const subVal = overlay === "temp" + ? (rack.humidity !== null && rack.temperature !== null + ? `DP ${dewPoint(rack.temperature, rack.humidity)}°` : null) + : (rack.temperature !== null ? `${rack.temperature}°C` : null); + + return ( +
onRackClick(rack.rack_id)} + className={cn( + "relative rounded-lg p-3 flex flex-col items-center justify-center gap-0.5 min-h-[72px] transition-all cursor-pointer hover:ring-2 hover:ring-white/20", + offline ? "hover:opacity-70" : "hover:opacity-80" + )} + style={{ + backgroundColor: offline ? "oklch(0.22 0.02 265)" : bg, + backgroundImage: offline + ? "repeating-linear-gradient(45deg, transparent, transparent 4px, oklch(1 0 0 / 4%) 4px, oklch(1 0 0 / 4%) 8px)" + : undefined, + }} + > + + {rack.rack_id.replace("rack-", "").toUpperCase()} + + {offline ? ( + + ) : ( + {mainVal ?? "—"} + )} + {subVal && ( + {subVal} + )} +
+ ); + })} +
+ + {/* Legend */} +
+ {overlay === "temp" ? ( + <> + Cool + {(["oklch(0.60 0.15 212)", "oklch(0.68 0.14 162)", "oklch(0.78 0.14 140)", "oklch(0.72 0.18 84)", "oklch(0.65 0.20 45)", "oklch(0.55 0.22 25)"] as string[]).map((c, i) => ( + + ))} + Hot + Warn: {tempWarn}°C  |  Crit: {tempCrit}°C · Tiles show dew point (DP) + + ) : ( + <> + Dry + {(["oklch(0.62 0.17 212)", "oklch(0.68 0.14 162)", "oklch(0.72 0.18 84)", "oklch(0.65 0.20 45)", "oklch(0.55 0.22 25)"] as string[]).map((c, i) => ( + + ))} + Humid + Optimal: 30–65%  |  ASHRAE A1 max: 80% + + )} +
+
+
+ ); +} + +// ── Dual-axis trend chart ───────────────────────────────────────────────────── + +function EnvTrendChart({ + tempData, humData, hours, activeRoom, tempWarn = 26, tempCrit = 28, humWarn = 65, +}: { + tempData: TempBucket[]; + humData: HumidityBucket[]; + hours: number; + activeRoom: string; + tempWarn?: number; + tempCrit?: number; + humWarn?: number; +}) { + const roomIds = [...new Set(tempData.map((d) => d.room_id))].sort(); + + // Build combined rows for the active room + type ComboRow = { time: string; temp: number | null; hum: number | null }; + const buckets = new Map(); + + for (const d of tempData.filter((d) => d.room_id === activeRoom)) { + const time = formatTime(d.bucket); + if (!buckets.has(time)) buckets.set(time, { time, temp: null, hum: null }); + buckets.get(time)!.temp = d.avg_temp; + } + for (const d of humData.filter((d) => d.room_id === activeRoom)) { + const time = formatTime(d.bucket); + if (!buckets.has(time)) buckets.set(time, { time, temp: null, hum: null }); + buckets.get(time)!.hum = d.avg_humidity; + } + const chartData = Array.from(buckets.values()); + const colors = ROOM_COLORS[activeRoom] ?? ROOM_COLORS["hall-a"]; + + const labelSuffix = hours <= 1 ? "1h" : hours <= 6 ? "6h" : hours <= 24 ? "24h" : "7d"; + + return ( + + +
+ + + + Temp & Humidity — last {labelSuffix} + +
+ {/* Legend */} +
+ + + Temp (°C, left axis) + + + + Humidity (%, right axis) + + + + ASHRAE A1 safe zone + +
+
+ + {chartData.length === 0 ? ( +
+ Waiting for data... +
+ ) : ( + + + {/* ASHRAE A1 safe zones */} + + + + + + {/* Left axis — temperature */} + `${v}°`} + /> + {/* Right axis — humidity */} + `${v}%`} + /> + + name === "temp" ? [`${Number(v).toFixed(1)}°C`, "Temperature"] : + [`${Number(v).toFixed(0)}%`, "Humidity"] + } + /> + + {/* Temp reference lines */} + + + + {/* Humidity reference line */} + + + {/* Lines */} + + + + + )} +

+ Green shaded band = ASHRAE A1 thermal envelope (18–27°C / 20–80% RH) +

+
+
+ ); +} + +// ── ASHRAE A1 Compliance Table ──────────────────────────────────────────────── + +// ASHRAE A1: 15–32°C, 20–80% RH +function AshraeTable({ rooms }: { rooms: RoomEnvReadings[] }) { + const allRacks = rooms.flatMap(r => + r.racks.map(rack => ({ ...rack, room_id: r.room_id })) + ).filter(r => r.temperature !== null || r.humidity !== null); + + type Issue = { type: string; detail: string }; + const rows = allRacks.map(rack => { + const issues: Issue[] = []; + if (rack.temperature !== null) { + if (rack.temperature < 15) issues.push({ type: "Temp", detail: `${rack.temperature}°C — below 15°C min` }); + if (rack.temperature > 32) issues.push({ type: "Temp", detail: `${rack.temperature}°C — above 32°C max` }); + } + if (rack.humidity !== null) { + if (rack.humidity < 20) issues.push({ type: "RH", detail: `${rack.humidity}% — below 20% min` }); + if (rack.humidity > 80) issues.push({ type: "RH", detail: `${rack.humidity}% — above 80% max` }); + } + const dp = rack.temperature !== null && rack.humidity !== null + ? dewPoint(rack.temperature, rack.humidity) : null; + return { rack, issues, dp }; + }); + + const violations = rows.filter(r => r.issues.length > 0); + const compliant = rows.filter(r => r.issues.length === 0); + + return ( + + +
+ + ASHRAE A1 Compliance + + 0 ? "bg-destructive/10 text-destructive" : "bg-green-500/10 text-green-400" + )}> + {violations.length === 0 ? `All ${compliant.length} racks compliant` : `${violations.length} violation${violations.length > 1 ? "s" : ""}`} + +
+
+ + {violations.length === 0 ? ( +

+ + All racks within ASHRAE A1 envelope (15–32°C, 20–80% RH) +

+ ) : ( +
+ {violations.map(({ rack, issues, dp }) => ( +
+ +
+ {rack.rack_id.toUpperCase()} + {roomLabels[rack.room_id] ?? rack.room_id} +
+ {issues.map((iss, i) => {iss.type}: {iss.detail})} + {dp !== null && DP: {dp}°C} +
+
+
+ ))} +

+ ASHRAE A1 envelope: 15–32°C dry bulb, 20–80% relative humidity +

+
+ )} +
+
+ ); +} + +// ── Dew Point Panel ─────────────────────────────────────────────────────────── + +function DewPointPanel({ + rooms, cracs, activeRoom, +}: { + rooms: RoomEnvReadings[]; + cracs: CracStatus[]; + activeRoom: string; +}) { + const room = rooms.find(r => r.room_id === activeRoom); + const crac = cracs.find(c => c.room_id === activeRoom); + const supplyTemp = crac?.supply_temp ?? null; + + const rackDps = (room?.racks ?? []) + .filter(r => r.temperature !== null && r.humidity !== null) + .map(r => ({ + rack_id: r.rack_id, + dp: dewPoint(r.temperature!, r.humidity!), + temp: r.temperature!, + hum: r.humidity!, + })) + .sort((a, b) => b.dp - a.dp); + + return ( + + +
+ + Dew Point by Rack + + {supplyTemp !== null && ( + + CRAC supply: {supplyTemp}°C + {rackDps.some(r => r.dp >= supplyTemp - 1) && ( + — condensation risk! + )} + + )} +
+
+ + {rackDps.length === 0 ? ( +

No data available

+ ) : ( +
+ {rackDps.map(({ rack_id, dp, temp, hum }) => { + const nearCondensation = supplyTemp !== null && dp >= supplyTemp - 1; + const dpColor = nearCondensation ? "text-destructive" + : dp > 15 ? "text-amber-400" : "text-foreground"; + return ( +
+ + {rack_id.replace("rack-", "").toUpperCase()} + +
+
15 ? "bg-amber-500" : "bg-blue-500")} + style={{ width: `${Math.min(100, Math.max(0, (dp / 30) * 100))}%` }} + /> +
+ + {dp}°C DP + + + {temp}° / {hum}% + +
+ ); + })} +

+ Dew point approaching CRAC supply temp = condensation risk on cold surfaces +

+
+ )} + + + ); +} + +// ── Leak sensor panel ───────────────────────────────────────────────────────── + +function LeakPanel({ sensors }: { sensors: LeakSensorStatus[] }) { + const detected = sensors.filter(s => s.state === "detected"); + const anyDetected = detected.length > 0; + + return ( + + +
+ + Water / Leak Detection + +
+ View full page → + + {anyDetected ? `${detected.length} leak detected` : "All clear"} + +
+
+
+ + {sensors.map(s => { + const detected = s.state === "detected"; + return ( +
+
+

+ {detected ? : } + {s.sensor_id} +

+

+ Zone: {s.floor_zone} + {s.under_floor ? " · under-floor" : ""} + {s.near_crac ? " · near CRAC" : ""} + {s.room_id ? ` · ${s.room_id}` : ""} +

+
+ + {s.state === "detected" ? "DETECTED" : s.state === "clear" ? "Clear" : "Unknown"} + +
+ ); + })} + {sensors.length === 0 && ( +

No sensors configured

+ )} +
+
+ ); +} + +// ── VESDA / Fire panel ──────────────────────────────────────────────────────── + +const VESDA_LEVEL_CONFIG: Record = { + normal: { label: "Normal", color: "text-green-400", bg: "bg-green-500/10" }, + alert: { label: "Alert", color: "text-amber-400", bg: "bg-amber-500/10" }, + action: { label: "Action", color: "text-orange-400", bg: "bg-orange-500/10" }, + fire: { label: "FIRE", color: "text-destructive", bg: "bg-destructive/10" }, +}; + +function FirePanel({ zones }: { zones: FireZoneStatus[] }) { + const elevated = zones.filter(z => z.level !== "normal"); + + return ( + 0 && elevated.some(z => z.level === "fire") && "border-destructive/50")}> + +
+ + VESDA / Smoke Detection + +
+ View full page → + z.level === "fire") ? "bg-destructive/10 text-destructive animate-pulse" : + "bg-amber-500/10 text-amber-400", + )}> + {elevated.length === 0 ? "All normal" : `${elevated.length} zone${elevated.length !== 1 ? "s" : ""} elevated`} + +
+
+
+ + {zones.map(zone => { + const cfg = VESDA_LEVEL_CONFIG[zone.level] ?? VESDA_LEVEL_CONFIG.normal; + return ( +
+
+
+

{zone.zone_id}

+ {zone.room_id &&

{zone.room_id}

} +
+ {cfg.label} +
+
+ + Obscuration: {zone.obscuration_pct_m != null ? `${zone.obscuration_pct_m.toFixed(3)} %/m` : "—"} + +
+ {!zone.detector_1_ok && Det1 fault} + {!zone.detector_2_ok && Det2 fault} + {!zone.power_ok && Power fault} + {!zone.flow_ok && Flow fault} + {zone.detector_1_ok && zone.detector_2_ok && zone.power_ok && zone.flow_ok && ( + Systems OK + )} +
+
+
+ ); + })} + {zones.length === 0 && ( +

No VESDA zones configured

+ )} +
+
+ ); +} + +// ── Particle count panel (ISO 14644) ────────────────────────────────────────── + +const ISO_LABELS: Record = { + 5: { label: "ISO 5", color: "text-green-400" }, + 6: { label: "ISO 6", color: "text-green-400" }, + 7: { label: "ISO 7", color: "text-green-400" }, + 8: { label: "ISO 8", color: "text-amber-400" }, + 9: { label: "ISO 9", color: "text-destructive" }, +}; + +const ISO8_0_5UM = 3_520_000; +const ISO8_5UM = 29_300; + +function ParticlePanel({ rooms }: { rooms: ParticleStatus[] }) { + if (rooms.length === 0) return null; + return ( + + + + + Air Quality — ISO 14644 + + + + {rooms.map(r => { + const cls = r.iso_class ? ISO_LABELS[r.iso_class] : null; + const p05pct = r.particles_0_5um !== null ? Math.min(100, (r.particles_0_5um / ISO8_0_5UM) * 100) : null; + const p5pct = r.particles_5um !== null ? Math.min(100, (r.particles_5um / ISO8_5UM) * 100) : null; + return ( +
+
+ {r.room_id === "hall-a" ? "Hall A" : r.room_id === "hall-b" ? "Hall B" : r.room_id} + {cls ? ( + + {cls.label} + + ) : ( + No data + )} +
+
+
+ ≥0.5 µm +
+ {p05pct !== null && ( +
= 100 ? "bg-destructive" : p05pct >= 70 ? "bg-amber-500" : "bg-green-500")} + style={{ width: `${p05pct}%` }} + /> + )} +
+ + {r.particles_0_5um !== null ? r.particles_0_5um.toLocaleString() : "—"} /m³ + +
+
+ ≥5 µm +
+ {p5pct !== null && ( +
= 100 ? "bg-destructive" : p5pct >= 70 ? "bg-amber-500" : "bg-green-500")} + style={{ width: `${p5pct}%` }} + /> + )} +
+ + {r.particles_5um !== null ? r.particles_5um.toLocaleString() : "—"} /m³ + +
+
+
+ ); + })} +

+ DC target: ISO 8 (≤3,520,000 particles ≥0.5 µm/m³ · ≤29,300 ≥5 µm/m³) +

+ + + ); +} + +// ── Page ────────────────────────────────────────────────────────────────────── + +export default function EnvironmentalPage() { + const { thresholds } = useThresholds(); + const [rooms, setRooms] = useState([]); + const [tempHist, setTempHist] = useState([]); + const [humHist, setHumHist] = useState([]); + const [cracs, setCracs] = useState([]); + const [leakSensors, setLeak] = useState([]); + const [fireZones, setFire] = useState([]); + const [particles, setParticles] = useState([]); + const [hours, setHours] = useState(6); + const [loading, setLoading] = useState(true); + const [selectedRack, setSelectedRack] = useState(null); + const [activeRoom, setActiveRoom] = useState("hall-a"); + + const load = useCallback(async () => { + try { + const [r, t, h, c, l, f, p] = await Promise.all([ + fetchRackEnvReadings(SITE_ID), + fetchRoomTempHistory(SITE_ID, hours), + fetchHumidityHistory(SITE_ID, hours), + fetchCracStatus(SITE_ID), + fetchLeakStatus(SITE_ID).catch(() => []), + fetchFireStatus(SITE_ID).catch(() => []), + fetchParticleStatus(SITE_ID).catch(() => []), + ]); + setRooms(r); + setTempHist(t); + setHumHist(h); + setCracs(c); + setLeak(l); + setFire(f); + setParticles(p); + } catch { + toast.error("Failed to load environmental data"); + } finally { + setLoading(false); + } + }, [hours]); + + useEffect(() => { + load(); + const id = setInterval(load, 30_000); + return () => clearInterval(id); + }, [load]); + + return ( +
+
+
+

Environmental Monitoring

+

Singapore DC01 — refreshes every 30s

+
+ +
+ + {loading ? ( +
+ + +
+ ) : ( + <> + setSelectedRack(null)} /> + + {/* Page-level room tab selector */} + {rooms.length > 0 && ( + + + {rooms.map(r => ( + + {roomLabels[r.room_id] ?? r.room_id} + + ))} + + + )} + + {rooms.length > 0 && ( + + )} +
+ {rooms.length > 0 && } + {rooms.length > 0 && ( + + )} +
+ {/* Leak + VESDA panels */} +
+ + +
+ + + + )} +
+ ); +} diff --git a/frontend/app/(dashboard)/fire/page.tsx b/frontend/app/(dashboard)/fire/page.tsx new file mode 100644 index 0000000..04a64ad --- /dev/null +++ b/frontend/app/(dashboard)/fire/page.tsx @@ -0,0 +1,285 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { toast } from "sonner"; +import { fetchFireStatus, type FireZoneStatus } from "@/lib/api"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Flame, RefreshCw, CheckCircle2, AlertTriangle, Zap, Wind, Activity } from "lucide-react"; +import { cn } from "@/lib/utils"; + +const SITE_ID = "sg-01"; + +const LEVEL_CONFIG: Record = { + normal: { + label: "Normal", + bg: "bg-green-500/10", border: "border-green-500/20", text: "text-green-400", + icon: CheckCircle2, pulsing: false, + }, + alert: { + label: "Alert", + bg: "bg-amber-500/10", border: "border-amber-500/40", text: "text-amber-400", + icon: AlertTriangle, pulsing: false, + }, + action: { + label: "Action", + bg: "bg-orange-500/10", border: "border-orange-500/40", text: "text-orange-400", + icon: AlertTriangle, pulsing: true, + }, + fire: { + label: "FIRE", + bg: "bg-destructive/10", border: "border-destructive/60", text: "text-destructive", + icon: Flame, pulsing: true, + }, +}; + +function ObscurationBar({ value }: { value: number | null }) { + if (value == null) return null; + const pct = Math.min(100, value * 20); // 0–5 %/m mapped to 0–100% + const color = value > 3 ? "#ef4444" : value > 1.5 ? "#f59e0b" : "#94a3b8"; + return ( +
+
+ Obscuration + {value.toFixed(2)} %/m +
+
+
+
+
+ ); +} + +function StatusIndicator({ label, ok, icon: Icon }: { + label: string; ok: boolean; icon: React.ElementType; +}) { + return ( +
+ +
+

{label}

+

+ {ok ? "OK" : "Fault"} +

+
+
+ ); +} + +function VesdaCard({ zone }: { zone: FireZoneStatus }) { + const level = zone.level; + const cfg = LEVEL_CONFIG[level] ?? LEVEL_CONFIG.normal; + const Icon = cfg.icon; + const isAlarm = level !== "normal"; + + return ( + + +
+ + + {zone.zone_id.toUpperCase()} + + + + {cfg.label} + +
+
+ + + {level === "fire" && ( +
+ FIRE ALARM — Initiate evacuation and contact emergency services immediately +
+ )} + {level === "action" && ( +
+ Action threshold reached — investigate smoke source immediately +
+ )} + {level === "alert" && ( +
+ Alert level — elevated smoke particles detected, monitor closely +
+ )} + + + + {/* Detector status */} +
+ {[ + { label: "Detector 1", ok: zone.detector_1_ok }, + { label: "Detector 2", ok: zone.detector_2_ok }, + ].map(({ label, ok }) => ( +
+ + {ok ? : } + {label} + + + {ok ? "Online" : "Fault"} + +
+ ))} +
+ + {/* System status */} +
+ + +
+
+
+ ); +} + +export default function FireSafetyPage() { + const [zones, setZones] = useState([]); + const [loading, setLoading] = useState(true); + + const load = useCallback(async () => { + try { + setZones(await fetchFireStatus(SITE_ID)); + } catch { toast.error("Failed to load fire safety data"); } + finally { setLoading(false); } + }, []); + + useEffect(() => { + load(); + const id = setInterval(load, 10_000); + return () => clearInterval(id); + }, [load]); + + const fireZones = zones.filter((z) => z.level === "fire"); + const actionZones = zones.filter((z) => z.level === "action"); + const alertZones = zones.filter((z) => z.level === "alert"); + const normalZones = zones.filter((z) => z.level === "normal"); + const anyAlarm = fireZones.length + actionZones.length + alertZones.length > 0; + + const worstLevel = + fireZones.length > 0 ? "fire" : + actionZones.length > 0 ? "action" : + alertZones.length > 0 ? "alert" : "normal"; + const worstCfg = LEVEL_CONFIG[worstLevel]; + const WIcon = worstCfg.icon; + + return ( +
+ {/* Header */} +
+
+

Fire & Life Safety

+

Singapore DC01 — VESDA aspirating detector network · refreshes every 10s

+
+
+ {!loading && ( + + + {anyAlarm + ? `${fireZones.length + actionZones.length + alertZones.length} zone${fireZones.length + actionZones.length + alertZones.length > 1 ? "s" : ""} in alarm` + : `All ${zones.length} zones normal`} + + )} + +
+
+ + {/* System summary bar */} + {!loading && ( +
+
+ + VESDA zones monitored: + {zones.length} +
+ {[ + { label: "Fire", count: fireZones.length, cls: "text-destructive" }, + { label: "Action", count: actionZones.length, cls: "text-orange-400" }, + { label: "Alert", count: alertZones.length, cls: "text-amber-400" }, + { label: "Normal", count: normalZones.length, cls: "text-green-400" }, + ].map(({ label, count, cls }) => ( +
+ {label}: + {count} +
+ ))} +
+ All detectors use VESDA aspirating smoke detection technology +
+
+ )} + + {/* Fire alarm banner */} + {!loading && fireZones.length > 0 && ( +
+
+ +

+ FIRE ALARM ACTIVE — {fireZones.length} zone{fireZones.length > 1 ? "s" : ""} +

+
+

+ Initiate building evacuation. Contact SCDF (995). Do not re-enter until cleared by fire services. +

+
+ )} + + {/* Zone cards — alarms first */} + {loading ? ( +
+ {Array.from({ length: 4 }).map((_, i) => )} +
+ ) : zones.length === 0 ? ( +
No VESDA zone data available
+ ) : ( +
+ {[...fireZones, ...actionZones, ...alertZones, ...normalZones].map((zone) => ( + + ))} +
+ )} + + {/* Legend */} +
+

VESDA Alert Levels

+
+ {Object.entries(LEVEL_CONFIG).map(([key, cfg]) => { + const Icon = cfg.icon; + return ( +
+
+ + {cfg.label} +
+

+ {key === "normal" ? "No smoke detected, system clear" : + key === "alert" ? "Trace smoke particles, monitor" : + key === "action" ? "Significant smoke, investigate now" : + "Confirmed fire, evacuate immediately"} +

+
+ ); + })} +
+
+
+ ); +} diff --git a/frontend/app/(dashboard)/floor-map/page.tsx b/frontend/app/(dashboard)/floor-map/page.tsx new file mode 100644 index 0000000..ed53de8 --- /dev/null +++ b/frontend/app/(dashboard)/floor-map/page.tsx @@ -0,0 +1,963 @@ +"use client"; + +import { useEffect, useState, useCallback, useRef } from "react"; +import { TransformWrapper, TransformComponent } from "react-zoom-pan-pinch"; +import { + fetchCapacitySummary, fetchCracStatus, fetchAlarms, fetchLeakStatus, + fetchFloorLayout, saveFloorLayout, + type CapacitySummary, type CracStatus, type Alarm, type LeakSensorStatus, +} from "@/lib/api"; +import { RackDetailSheet } from "@/components/dashboard/rack-detail-sheet"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Sheet, SheetContent, SheetTrigger } from "@/components/ui/sheet"; +import { Button } from "@/components/ui/button"; +import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { + Wind, WifiOff, Thermometer, Zap, CheckCircle2, AlertTriangle, + Droplets, Cable, Flame, Snowflake, Settings2, Plus, Trash2, + GripVertical, ChevronDown, ChevronUp, +} from "lucide-react"; +import { cn } from "@/lib/utils"; +import { useThresholds } from "@/lib/threshold-context"; + +const SITE_ID = "sg-01"; + +// ── Layout type ─────────────────────────────────────────────────────────────── + +type RowLayout = { label: string; racks: string[] }; +type RoomLayout = { label: string; crac_id: string; rows: RowLayout[] }; +type FloorLayout = Record; + +const DEFAULT_LAYOUT: FloorLayout = { + "hall-a": { + label: "Hall A", + crac_id: "crac-01", + rows: [ + { label: "Row 1", racks: Array.from({ length: 20 }, (_, i) => `SG1A01.${String(i + 1).padStart(2, "0")}`) }, + { label: "Row 2", racks: Array.from({ length: 20 }, (_, i) => `SG1A02.${String(i + 1).padStart(2, "0")}`) }, + ], + }, + "hall-b": { + label: "Hall B", + crac_id: "crac-02", + rows: [ + { label: "Row 1", racks: Array.from({ length: 20 }, (_, i) => `SG1B01.${String(i + 1).padStart(2, "0")}`) }, + { label: "Row 2", racks: Array.from({ length: 20 }, (_, i) => `SG1B02.${String(i + 1).padStart(2, "0")}`) }, + ], + }, +}; + +// Derive feed from row index: even index → "A", odd → "B" +function getFeed(layout: FloorLayout, rackId: string): "A" | "B" | undefined { + for (const room of Object.values(layout)) { + for (let i = 0; i < room.rows.length; i++) { + if (room.rows[i].racks.includes(rackId)) return i % 2 === 0 ? "A" : "B"; + } + } + return undefined; +} + +// ── Colour helpers ──────────────────────────────────────────────────────────── + +function tempBg(temp: number | null, warn = 26, crit = 28) { + if (temp === null) return "oklch(0.22 0.02 265)"; + if (temp >= crit + 4) return "oklch(0.55 0.22 25)"; + if (temp >= crit) return "oklch(0.65 0.20 45)"; + if (temp >= warn) return "oklch(0.72 0.18 84)"; + if (temp >= warn - 2) return "oklch(0.78 0.14 140)"; + if (temp >= warn - 4) return "oklch(0.68 0.14 162)"; + return "oklch(0.60 0.15 212)"; +} + +function powerBg(pct: number | null) { + if (pct === null) return "oklch(0.22 0.02 265)"; + if (pct >= 90) return "oklch(0.55 0.22 25)"; + if (pct >= 75) return "oklch(0.65 0.20 45)"; + if (pct >= 55) return "oklch(0.72 0.18 84)"; + if (pct >= 35) return "oklch(0.68 0.14 162)"; + return "oklch(0.60 0.15 212)"; +} + +type Overlay = "temp" | "power" | "alarms" | "feed" | "crac"; + +function alarmBg(count: number): string { + if (count === 0) return "oklch(0.22 0.02 265)"; + if (count >= 3) return "oklch(0.55 0.22 25)"; + if (count >= 1) return "oklch(0.65 0.20 45)"; + return "oklch(0.68 0.14 162)"; +} + +function feedBg(feed: "A" | "B" | undefined): string { + if (feed === "A") return "oklch(0.55 0.18 255)"; + if (feed === "B") return "oklch(0.60 0.18 40)"; + return "oklch(0.22 0.02 265)"; +} + +const CRAC_ZONE_COLORS = [ + "oklch(0.55 0.18 255)", // blue — zone 1 + "oklch(0.60 0.18 40)", // amber — zone 2 + "oklch(0.60 0.16 145)", // teal — zone 3 + "oklch(0.58 0.18 310)", // purple — zone 4 +]; + +// ── Rack tile ───────────────────────────────────────────────────────────────── + +function RackTile({ + rackId, temp, powerPct, alarmCount, overlay, feed, cracColor, onClick, tempWarn = 26, tempCrit = 28, +}: { + rackId: string; temp: number | null; powerPct: number | null; + alarmCount: number; overlay: Overlay; feed?: "A" | "B"; cracColor?: string; onClick: () => void; + tempWarn?: number; tempCrit?: number; +}) { + const offline = temp === null && powerPct === null; + const bg = offline ? "oklch(0.22 0.02 265)" + : overlay === "temp" ? tempBg(temp, tempWarn, tempCrit) + : overlay === "power" ? powerBg(powerPct) + : overlay === "feed" ? feedBg(feed) + : overlay === "crac" ? (cracColor ?? "oklch(0.22 0.02 265)") + : alarmBg(alarmCount); + + const shortId = rackId.replace("rack-", "").toUpperCase(); + const mainVal = overlay === "temp" ? (temp !== null ? `${temp}°` : null) + : overlay === "power" ? (powerPct !== null ? `${Math.round(powerPct)}%` : null) + : overlay === "feed" ? (feed ?? null) + : (alarmCount > 0 ? String(alarmCount) : null); + const subVal = overlay === "temp" ? (powerPct !== null ? `${Math.round(powerPct)}%` : null) + : overlay === "power" ? (temp !== null ? `${temp}°C` : null) + : overlay === "feed" ? (temp !== null ? `${temp}°C` : null) + : (temp !== null ? `${temp}°C` : null); + + return ( + + ); +} + +// ── CRAC strip ──────────────────────────────────────────────────────────────── + +function CracStrip({ crac }: { crac: CracStatus | undefined }) { + const online = crac?.state === "online"; + return ( +
+ +
+ {crac?.crac_id.toUpperCase() ?? "CRAC"} + + {online ? : } + {online ? "Online" : "Fault"} + +
+ {crac && online && ( +
+ + + Supply {crac.supply_temp ?? "—"}°C + + + + Return {crac.return_temp ?? "—"}°C + + {crac.delta !== null && ( + ΔT 14 ? "text-destructive" : crac.delta > 11 ? "text-amber-400" : "text-green-400" + )}>+{crac.delta}°C + )} + {crac.fan_pct !== null && ( + Fan {crac.fan_pct}% + )} + {crac.cooling_capacity_pct !== null && ( + Cap = 90 ? "text-destructive" : + (crac.cooling_capacity_pct ?? 0) >= 75 ? "text-amber-400" : "text-foreground" + )}>{crac.cooling_capacity_pct?.toFixed(0)}% + )} +
+ )} +
+ ); +} + +// ── Room plan ───────────────────────────────────────────────────────────────── + +function RoomPlan({ + roomId, layout, data, cracs, overlay, alarmsByRack, onRackClick, tempWarn = 26, tempCrit = 28, +}: { + roomId: string; + layout: FloorLayout; + data: CapacitySummary; + cracs: CracStatus[]; + overlay: Overlay; + alarmsByRack: Map; + onRackClick: (id: string) => void; + tempWarn?: number; + tempCrit?: number; +}) { + const roomLayout = layout[roomId]; + if (!roomLayout) return null; + + const rackMap = new Map(data.racks.map((r) => [r.rack_id, r])); + const crac = cracs.find((c) => c.crac_id === roomLayout.crac_id); + const roomRacks = data.racks.filter((r) => r.room_id === roomId); + const offlineCount = roomRacks.filter((r) => r.temp === null && r.power_kw === null).length; + + const avgTemp = (() => { + const temps = roomRacks.map((r) => r.temp).filter((t): t is number => t !== null); + return temps.length ? Math.round((temps.reduce((a, b) => a + b, 0) / temps.length) * 10) / 10 : null; + })(); + const totalPower = (() => { + const powers = roomRacks.map((r) => r.power_kw).filter((p): p is number => p !== null); + return powers.length ? Math.round(powers.reduce((a, b) => a + b, 0) * 10) / 10 : null; + })(); + + return ( +
+
+ {roomRacks.length} racks + {avgTemp !== null && ( + + + Avg {avgTemp}°C + + )} + {totalPower !== null && ( + + + {totalPower} kW IT load + + )} + {offlineCount > 0 && ( + + + {offlineCount} offline + + )} +
+ + + {({ zoomIn, zoomOut, resetTransform }) => ( + <> +
+ + + + Drag to pan +
+ +
+ + +
+
+ + HOT AISLE + +
+
+ + {roomLayout.rows.map((row, rowIdx) => { + const rowCracColor = CRAC_ZONE_COLORS[rowIdx % CRAC_ZONE_COLORS.length]; + return ( +
+
+ + {row.label} + +
+ {row.racks.map((rackId) => { + const rack = rackMap.get(rackId); + return ( + onRackClick(rackId)} + tempWarn={tempWarn} + tempCrit={tempCrit} + /> + ); + })} +
+
+ {rowIdx < roomLayout.rows.length - 1 && ( +
+
+ + COLD AISLE + +
+
+ )} +
+ ); + })} + +
+
+ + HOT AISLE + +
+
+
+ + + )} + +
+ ); +} + +// ── Leak sensor panel ───────────────────────────────────────────────────────── + +function LeakSensorPanel({ sensors }: { sensors: LeakSensorStatus[] }) { + if (sensors.length === 0) return null; + const active = sensors.filter((s) => s.state === "detected"); + const byZone = sensors.reduce>((acc, s) => { + const zone = s.floor_zone ?? "unknown"; + (acc[zone] ??= []).push(s); + return acc; + }, {}); + + return ( + 0 && "border-destructive/50")}> + +
+ + 0 ? "text-destructive" : "text-blue-400")} /> + Leak Sensor Status + + 0 ? "bg-destructive/10 text-destructive" : "bg-green-500/10 text-green-400", + )}> + {active.length > 0 ? `${active.length} leak${active.length > 1 ? "s" : ""} detected` : "All clear"} + +
+
+ +
+ {Object.entries(byZone).map(([zone, zoneSensors]) => ( +
+

{zone}

+ {zoneSensors.map((s) => { + const detected = s.state === "detected"; + return ( +
+
+
+
+ {s.sensor_id} +
+

+ {[ + s.under_floor ? "Under floor" : "Surface mount", + s.near_crac ? "near CRAC" : null, + s.room_id ?? null, + ].filter(Boolean).join(" · ")} +

+
+ + {detected ? "LEAK" : s.state === "unknown" ? "unknown" : "clear"} + +
+ ); + })} +
+ ))} +
+ + + ); +} + +// ── Layout editor ───────────────────────────────────────────────────────────── + +function LayoutEditor({ + layout, onSave, saving, +}: { + layout: FloorLayout; + onSave: (l: FloorLayout) => void; + saving?: boolean; +}) { + const [draft, setDraft] = useState(() => JSON.parse(JSON.stringify(layout))); + const [newRoomId, setNewRoomId] = useState(""); + const [newRoomLabel, setNewRoomLabel] = useState(""); + const [newRoomCrac, setNewRoomCrac] = useState(""); + const [expandedRoom, setExpandedRoom] = useState(Object.keys(draft)[0] ?? null); + + function updateRoom(roomId: string, patch: Partial) { + setDraft(d => ({ ...d, [roomId]: { ...d[roomId], ...patch } })); + } + + function deleteRoom(roomId: string) { + setDraft(d => { const n = { ...d }; delete n[roomId]; return n; }); + if (expandedRoom === roomId) setExpandedRoom(null); + } + + function addRoom() { + const id = newRoomId.trim().toLowerCase().replace(/\s+/g, "-"); + if (!id || !newRoomLabel.trim() || draft[id]) return; + setDraft(d => ({ + ...d, + [id]: { label: newRoomLabel.trim(), crac_id: newRoomCrac.trim(), rows: [] }, + })); + setNewRoomId(""); setNewRoomLabel(""); setNewRoomCrac(""); + setExpandedRoom(id); + } + + function addRow(roomId: string) { + const room = draft[roomId]; + const label = `Row ${room.rows.length + 1}`; + updateRoom(roomId, { rows: [...room.rows, { label, racks: [] }] }); + } + + function deleteRow(roomId: string, rowIdx: number) { + const rows = draft[roomId].rows.filter((_, i) => i !== rowIdx); + updateRoom(roomId, { rows }); + } + + function updateRowLabel(roomId: string, rowIdx: number, label: string) { + const rows = draft[roomId].rows.map((r, i) => i === rowIdx ? { ...r, label } : r); + updateRoom(roomId, { rows }); + } + + function addRack(roomId: string, rowIdx: number, rackId: string) { + const id = rackId.trim(); + if (!id) return; + const rows = draft[roomId].rows.map((r, i) => + i === rowIdx ? { ...r, racks: [...r.racks, id] } : r + ); + updateRoom(roomId, { rows }); + } + + function removeRack(roomId: string, rowIdx: number, rackIdx: number) { + const rows = draft[roomId].rows.map((r, i) => + i === rowIdx ? { ...r, racks: r.racks.filter((_, j) => j !== rackIdx) } : r + ); + updateRoom(roomId, { rows }); + } + + function moveRow(roomId: string, rowIdx: number, dir: -1 | 1) { + const rows = [...draft[roomId].rows]; + const target = rowIdx + dir; + if (target < 0 || target >= rows.length) return; + [rows[rowIdx], rows[target]] = [rows[target], rows[rowIdx]]; + updateRoom(roomId, { rows }); + } + + return ( +
+
+

+ Floor Layout Editor +

+

+ Configure rooms, rows, and rack positions. Changes are saved for all users. +

+
+ +
+ {Object.entries(draft).map(([roomId, room]) => ( +
+ {/* Room header */} +
+ + +
+ + {expandedRoom === roomId && ( +
+ {/* Room fields */} +
+
+ + updateRoom(roomId, { label: e.target.value })} + className="w-full h-7 rounded border border-border bg-background px-2 text-xs focus:outline-none focus:ring-1 focus:ring-primary" + /> +
+
+ + updateRoom(roomId, { crac_id: e.target.value })} + placeholder="e.g. crac-01" + className="w-full h-7 rounded border border-border bg-background px-2 text-xs focus:outline-none focus:ring-1 focus:ring-primary" + /> +
+
+ + {/* Rows */} +
+
+

Rows

+ +
+ + {room.rows.length === 0 && ( +

No rows — click Add Row

+ )} + + {room.rows.map((row, rowIdx) => ( +
+
+ + updateRowLabel(roomId, rowIdx, e.target.value)} + className="flex-1 h-6 rounded border border-border bg-background px-2 text-xs focus:outline-none focus:ring-1 focus:ring-primary" + /> + + + +
+ + {/* Racks in this row */} +
+ {row.racks.map((rackId, rackIdx) => ( + + {rackId} + + + ))} + addRack(roomId, rowIdx, id)} /> +
+

+ Feed: {rowIdx % 2 === 0 ? "A (even rows)" : "B (odd rows)"} · {row.racks.length} rack{row.racks.length !== 1 ? "s" : ""} +

+
+ ))} +
+
+ )} +
+ ))} + + {/* Add new room */} +
+

Add New Room

+
+ setNewRoomId(e.target.value)} + placeholder="room-id (e.g. hall-c)" + className="h-7 rounded border border-border bg-background px-2 text-xs focus:outline-none focus:ring-1 focus:ring-primary" + /> + setNewRoomLabel(e.target.value)} + placeholder="Label (e.g. Hall C)" + className="h-7 rounded border border-border bg-background px-2 text-xs focus:outline-none focus:ring-1 focus:ring-primary" + /> + setNewRoomCrac(e.target.value)} + placeholder="CRAC ID (e.g. crac-03)" + className="h-7 rounded border border-border bg-background px-2 text-xs focus:outline-none focus:ring-1 focus:ring-primary" + /> +
+ +
+
+ + {/* Footer actions */} +
+ + +
+
+ ); +} + +// Small inline input to add a rack ID to a row +function RackAdder({ onAdd }: { onAdd: (id: string) => void }) { + const [val, setVal] = useState(""); + function submit() { + if (val.trim()) { onAdd(val.trim()); setVal(""); } + } + return ( + + setVal(e.target.value)} + onKeyDown={e => e.key === "Enter" && submit()} + placeholder="rack-id" + className="h-5 w-20 rounded border border-dashed border-border bg-background px-1.5 text-[10px] font-mono focus:outline-none focus:ring-1 focus:ring-primary" + /> + + + ); +} + +// ── Page ────────────────────────────────────────────────────────────────────── + +export default function FloorMapPage() { + const { thresholds } = useThresholds(); + const [layout, setLayout] = useState(DEFAULT_LAYOUT); + const [data, setData] = useState(null); + const [cracs, setCracs] = useState([]); + const [alarms, setAlarms] = useState([]); + const [leakSensors, setLeakSensors] = useState([]); + const [loading, setLoading] = useState(true); + const [overlay, setOverlay] = useState("temp"); + const [activeRoom, setActiveRoom] = useState(""); + const [selectedRack, setSelectedRack] = useState(null); + const [editorOpen, setEditorOpen] = useState(false); + const [layoutSaving, setLayoutSaving] = useState(false); + + // Load layout from backend on mount + useEffect(() => { + fetchFloorLayout(SITE_ID) + .then((remote) => { + const parsed = remote as FloorLayout; + setLayout(parsed); + setActiveRoom(Object.keys(parsed)[0] ?? ""); + }) + .catch(() => { + // No saved layout yet — use default + setActiveRoom(Object.keys(DEFAULT_LAYOUT)[0] ?? ""); + }); + }, []); + + const alarmsByRack = new Map(); + for (const a of alarms) { + if (a.rack_id) alarmsByRack.set(a.rack_id, (alarmsByRack.get(a.rack_id) ?? 0) + 1); + } + + const load = useCallback(async () => { + try { + const [d, c, a, ls] = await Promise.all([ + fetchCapacitySummary(SITE_ID), + fetchCracStatus(SITE_ID), + fetchAlarms(SITE_ID, "active", 200), + fetchLeakStatus(SITE_ID).catch(() => [] as LeakSensorStatus[]), + ]); + setData(d); + setCracs(c); + setAlarms(a); + setLeakSensors(ls); + } catch { /* keep stale */ } + finally { setLoading(false); } + }, []); + + useEffect(() => { + load(); + const id = setInterval(load, 30_000); + return () => clearInterval(id); + }, [load]); + + async function handleSaveLayout(newLayout: FloorLayout) { + setLayoutSaving(true); + try { + await saveFloorLayout(SITE_ID, newLayout as unknown as Record); + setLayout(newLayout); + if (!newLayout[activeRoom]) setActiveRoom(Object.keys(newLayout)[0] ?? ""); + setEditorOpen(false); + } catch { + // save failed — keep editor open so user can retry + } finally { + setLayoutSaving(false); + } + } + + const roomIds = Object.keys(layout); + + return ( +
+
+
+

Floor Map

+

Singapore DC01 — live rack layout · refreshes every 30s

+
+
+ {/* Overlay selector */} +
+ {([ + { val: "temp" as Overlay, icon: Thermometer, label: "Temperature" }, + { val: "power" as Overlay, icon: Zap, label: "Power %" }, + { val: "alarms" as Overlay, icon: AlertTriangle, label: "Alarms" }, + { val: "feed" as Overlay, icon: Cable, label: "Power Feed" }, + { val: "crac" as Overlay, icon: Wind, label: "CRAC Coverage" }, + ]).map(({ val, icon: Icon, label }) => ( + + ))} +
+ + {/* Layout editor trigger */} + + + + + + + + +
+
+ + {loading ? ( + + ) : !data ? ( +
+ Unable to load floor map data. +
+ ) : ( + <> + setSelectedRack(null)} /> + + + +
+ Room View + {roomIds.length > 0 && ( + + + {roomIds.map((id) => ( + + {layout[id].label} + + ))} + + + )} +
+
+ + {activeRoom && layout[activeRoom] ? ( + + ) : ( +
+ +

No rooms configured

+

Use Edit Layout to add rooms and racks

+
+ )} +
+
+ + + + {/* Legend */} +
+ {overlay === "alarms" ? ( + <> + Alarm count: +
+ {([ + { c: "oklch(0.22 0.02 265)", l: "0" }, + { c: "oklch(0.65 0.20 45)", l: "1–2" }, + { c: "oklch(0.55 0.22 25)", l: "3+" }, + ]).map(({ c, l }) => ( + + + {l} + + ))} +
+ + ) : overlay === "feed" ? ( + <> + Power feed: +
+ + + Feed A (even rows) + + + + Feed B (odd rows) + +
+ + ) : overlay === "crac" ? ( + <> + CRAC thermal zones: +
+ {CRAC_ZONE_COLORS.slice(0, layout[activeRoom]?.rows.length ?? 2).map((c, i) => ( + + + Zone {i + 1} + + ))} +
+ + ) : ( + <> + {overlay === "temp" ? "Temperature:" : "Power utilisation:"} +
+ {overlay === "temp" + ? (["oklch(0.60 0.15 212)","oklch(0.68 0.14 162)","oklch(0.78 0.14 140)","oklch(0.72 0.18 84)","oklch(0.65 0.20 45)","oklch(0.55 0.22 25)"] as string[]).map((c, i) => ( + + )) + : (["oklch(0.60 0.15 212)","oklch(0.68 0.14 162)","oklch(0.72 0.18 84)","oklch(0.65 0.20 45)","oklch(0.55 0.22 25)"] as string[]).map((c, i) => ( + + ))} + {overlay === "temp" ? "Cool → Hot" : "Low → High"} +
+ {overlay === "temp" && Warn: {thresholds.temp.warn}°C  |  Critical: {thresholds.temp.critical}°C} + {overlay === "power" && Warn: 75%  |  Critical: 90%} + + )} + Click any rack to drill down +
+ + )} +
+ ); +} diff --git a/frontend/app/(dashboard)/generator/page.tsx b/frontend/app/(dashboard)/generator/page.tsx new file mode 100644 index 0000000..cb413f1 --- /dev/null +++ b/frontend/app/(dashboard)/generator/page.tsx @@ -0,0 +1,412 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { toast } from "sonner"; +import { + fetchGeneratorStatus, fetchAtsStatus, fetchPhaseBreakdown, + type GeneratorStatus, type AtsStatus, type RoomPhase, +} from "@/lib/api"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + Fuel, Zap, Activity, RefreshCw, CheckCircle2, AlertTriangle, + ArrowLeftRight, Gauge, Thermometer, Battery, +} from "lucide-react"; +import { cn } from "@/lib/utils"; +import { GeneratorDetailSheet } from "@/components/dashboard/generator-detail-sheet"; + +const SITE_ID = "sg-01"; + +const STATE_COLOR: Record = { + running: "bg-green-500/10 text-green-400", + standby: "bg-blue-500/10 text-blue-400", + test: "bg-amber-500/10 text-amber-400", + fault: "bg-destructive/10 text-destructive", + unknown: "bg-muted/30 text-muted-foreground", +}; + +const ATS_FEED_COLOR: Record = { + "utility-a": "bg-blue-500/10 text-blue-400", + "utility-b": "bg-sky-500/10 text-sky-400", + "generator": "bg-amber-500/10 text-amber-400", +}; + +function FillBar({ + value, max, color = "#22c55e", warn, crit, +}: { + value: number | null; max: number; color?: string; warn?: number; crit?: number; +}) { + const pct = value != null ? Math.min(100, (value / max) * 100) : 0; + const bg = crit && value != null && value >= crit ? "#ef4444" + : warn && value != null && value >= warn ? "#f59e0b" + : color; + return ( +
+
+
+ ); +} + +function StatRow({ label, value, warn }: { label: string; value: string; warn?: boolean }) { + return ( +
+ {label} + {value} +
+ ); +} + +function GeneratorCard({ gen, onClick }: { gen: GeneratorStatus; onClick: () => void }) { + const fuelLow = (gen.fuel_pct ?? 100) < 25; + const fuelCrit = (gen.fuel_pct ?? 100) < 10; + const isFault = gen.state === "fault"; + const isRun = gen.state === "running" || gen.state === "test"; + + return ( + + +
+ + + {gen.gen_id.toUpperCase()} + +
+ + {gen.state} + +
+
+
+ + + {/* Fuel level */} +
+
+ + Fuel Level + + + {gen.fuel_pct != null ? `${gen.fuel_pct.toFixed(1)}%` : "—"} + +
+ + {gen.fuel_litres != null && ( +

+ {gen.fuel_litres.toFixed(0)} L remaining +

+ )} + {gen.fuel_litres != null && gen.load_kw != null && gen.load_kw > 0 && (() => { + const runtimeH = gen.fuel_litres / (gen.load_kw * 0.27); + const hours = Math.floor(runtimeH); + const mins = Math.round((runtimeH - hours) * 60); + const cls = runtimeH < 4 ? "text-destructive" : runtimeH < 12 ? "text-amber-400" : "text-green-400"; + return ( +

+ Est. runtime: {hours}h {mins}m +

+ ); + })()} +
+ + {/* Load */} + {gen.load_kw != null && ( +
+
+ + Load + + + {gen.load_kw.toFixed(1)} kW + {gen.load_pct != null && ( + ({gen.load_pct.toFixed(0)}%) + )} + +
+ +
+ )} + + {/* Engine stats */} +
+

Engine

+ {gen.voltage_v != null && } + {gen.frequency_hz != null && 0.5} />} + {gen.run_hours != null && } + {gen.oil_pressure_bar != null && } + {gen.coolant_temp_c != null && ( +
+ + Coolant temp + + 95 ? "text-destructive" : gen.coolant_temp_c > 85 ? "text-amber-400" : "")}> + {gen.coolant_temp_c.toFixed(1)}°C + +
+ )} + {gen.battery_v != null && ( +
+ + Battery + + + {gen.battery_v.toFixed(1)} V + +
+ )} +
+
+
+ ); +} + +function AtsCard({ ats }: { ats: AtsStatus }) { + const feedColor = ATS_FEED_COLOR[ats.active_feed] ?? "bg-muted/30 text-muted-foreground"; + const isGen = ats.active_feed === "generator"; + + return ( + + + + + {ats.ats_id.toUpperCase()} — ATS Transfer Switch + + + +
+ Active feed + + {ats.active_feed} + + {isGen && Running on generator power} +
+ +
+ {[ + { label: "Utility A", v: ats.utility_a_v }, + { label: "Utility B", v: ats.utility_b_v }, + { label: "Generator", v: ats.generator_v }, + ].map(({ label, v }) => ( +
+

{label}

+

{v != null ? `${v.toFixed(0)} V` : "—"}

+
+ ))} +
+ +
+ {ats.transfer_count != null && ( +
+

Transfers (total)

+

{ats.transfer_count}

+
+ )} + {ats.last_transfer_ms != null && ( +
+

Last transfer time

+

{ats.last_transfer_ms} ms

+
+ )} +
+
+
+ ); +} + +function PhaseImbalancePanel({ rooms }: { rooms: RoomPhase[] }) { + const allRacks = rooms.flatMap((r) => r.racks); + const flagged = allRacks + .filter((r) => (r.imbalance_pct ?? 0) >= 5) + .sort((a, b) => (b.imbalance_pct ?? 0) - (a.imbalance_pct ?? 0)); + + if (flagged.length === 0) return ( +
+ + No PDU phase imbalance detected across all racks +
+ ); + + return ( + + + + + PDU Phase Imbalance + + + +
+ {flagged.map((rack) => { + const crit = (rack.imbalance_pct ?? 0) >= 15; + return ( +
+ {rack.rack_id.toUpperCase()} + + {rack.imbalance_pct?.toFixed(1)}% imbalance + + A: {rack.phase_a_kw?.toFixed(2) ?? "—"} kW + B: {rack.phase_b_kw?.toFixed(2) ?? "—"} kW + C: {rack.phase_c_kw?.toFixed(2) ?? "—"} kW +
+ ); + })} +
+
+
+ ); +} + +export default function GeneratorPage() { + const [generators, setGenerators] = useState([]); + const [atsUnits, setAtsUnits] = useState([]); + const [phases, setPhases] = useState([]); + const [loading, setLoading] = useState(true); + const [selectedGen, setSelectedGen] = useState(null); + + const load = useCallback(async () => { + try { + const [g, a, p] = await Promise.all([ + fetchGeneratorStatus(SITE_ID), + fetchAtsStatus(SITE_ID).catch(() => [] as AtsStatus[]), + fetchPhaseBreakdown(SITE_ID).catch(() => [] as RoomPhase[]), + ]); + setGenerators(g); + setAtsUnits(a); + setPhases(p); + } catch { toast.error("Failed to load generator data"); } + finally { setLoading(false); } + }, []); + + useEffect(() => { + load(); + const id = setInterval(load, 15_000); + return () => clearInterval(id); + }, [load]); + + const anyFault = generators.some((g) => g.state === "fault"); + const anyRun = generators.some((g) => g.state === "running" || g.state === "test"); + const onGen = atsUnits.some((a) => a.active_feed === "generator"); + + return ( +
+ {/* Header */} +
+
+

Generator & Power Path

+

Singapore DC01 — backup power systems · refreshes every 15s

+
+
+ {!loading && ( + + {anyFault ? <> Generator fault : + onGen ? <> Running on generator : + anyRun ? <> Generator running (test) : + <> Utility power — all standby} + + )} + +
+
+ + {/* Site power status bar */} + {!loading && atsUnits.length > 0 && ( +
+ +
+ Power path: + {onGen ? "Generator (utility lost)" : "Utility mains"} +
+
+ Generators: + {generators.length} total + + ({generators.filter((g) => g.state === "standby").length} standby,{" "} + {generators.filter((g) => g.state === "running").length} running,{" "} + {generators.filter((g) => g.state === "fault").length} fault) + +
+
+ )} + + {/* Generators */} +

+ Diesel Generators +

+ {loading ? ( +
+ + +
+ ) : generators.length === 0 ? ( +
No generator data available
+ ) : ( +
+ {generators.map((g) => ( + setSelectedGen(g.gen_id)} /> + ))} +
+ )} + + {/* ATS */} +

+ Automatic Transfer Switches +

+ {loading ? ( + + ) : atsUnits.length === 0 ? ( +
No ATS data available
+ ) : ( +
+ {atsUnits.map((a) => )} +
+ )} + + setSelectedGen(null)} + /> + + {/* Phase imbalance */} +

+ PDU Phase Balance +

+ {loading ? ( + + ) : ( + + )} +
+ ); +} diff --git a/frontend/app/(dashboard)/layout.tsx b/frontend/app/(dashboard)/layout.tsx new file mode 100644 index 0000000..8900523 --- /dev/null +++ b/frontend/app/(dashboard)/layout.tsx @@ -0,0 +1,33 @@ +import { Sidebar } from "@/components/layout/sidebar"; +import { Topbar } from "@/components/layout/topbar"; +import { AlarmProvider } from "@/lib/alarm-context"; +import { ThresholdProvider } from "@/lib/threshold-context"; +import { ErrorBoundary } from "@/components/error-boundary"; +import { Toaster } from "sonner"; + +export default function DashboardLayout({ + children, +}: { + children: React.ReactNode; +}) { + return ( + + +
+
+ +
+
+ +
+ + {children} + +
+
+ +
+
+
+ ); +} diff --git a/frontend/app/(dashboard)/leak/page.tsx b/frontend/app/(dashboard)/leak/page.tsx new file mode 100644 index 0000000..2686eba --- /dev/null +++ b/frontend/app/(dashboard)/leak/page.tsx @@ -0,0 +1,244 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { toast } from "sonner"; +import { fetchLeakStatus, type LeakSensorStatus } from "@/lib/api"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Droplets, RefreshCw, CheckCircle2, AlertTriangle, MapPin, Wind, Clock } from "lucide-react"; +import { cn } from "@/lib/utils"; + +const SITE_ID = "sg-01"; + +function SensorBadge({ state }: { state: string }) { + const cfg = { + detected: { cls: "bg-destructive/10 text-destructive border-destructive/30", label: "LEAK DETECTED" }, + clear: { cls: "bg-green-500/10 text-green-400 border-green-500/20", label: "Clear" }, + unknown: { cls: "bg-muted/30 text-muted-foreground border-border", label: "Unknown" }, + }[state] ?? { cls: "bg-muted/30 text-muted-foreground border-border", label: state }; + return ( + + {cfg.label} + + ); +} + +function SensorCard({ sensor }: { sensor: LeakSensorStatus }) { + const detected = sensor.state === "detected"; + const sensorAny = sensor as LeakSensorStatus & { last_triggered_at?: string | null; trigger_count_30d?: number }; + const triggerCount30d = sensorAny.trigger_count_30d ?? 0; + const lastTriggeredAt = sensorAny.last_triggered_at ?? null; + + let lastTriggeredText: string; + if (lastTriggeredAt) { + const daysAgo = Math.floor((Date.now() - new Date(lastTriggeredAt).getTime()) / (1000 * 60 * 60 * 24)); + lastTriggeredText = daysAgo === 0 ? "Today" : `${daysAgo}d ago`; + } else if (detected) { + lastTriggeredText = "Currently active"; + } else { + lastTriggeredText = "No recent events"; + } + + return ( +
+
+
+
+
+

{sensor.sensor_id}

+ {sensor.floor_zone && ( +

+ {sensor.floor_zone} +

+ )} +
+
+
+ {triggerCount30d} events (30d) + +
+
+ +
+ {sensor.room_id && ( +
+ Room: + {sensor.room_id} +
+ )} + {sensor.near_crac && ( +
+ + Near CRAC +
+ )} +
+ {sensor.under_floor ? "Under raised floor" : "Above floor level"} +
+
+ + {lastTriggeredText} +
+
+ + {detected && ( +
+ Water detected — inspect immediately and isolate if necessary +
+ )} +
+ ); +} + +export default function LeakDetectionPage() { + const [sensors, setSensors] = useState([]); + const [loading, setLoading] = useState(true); + + const load = useCallback(async () => { + try { + setSensors(await fetchLeakStatus(SITE_ID)); + } catch { toast.error("Failed to load leak sensor data"); } + finally { setLoading(false); } + }, []); + + useEffect(() => { + load(); + const id = setInterval(load, 15_000); + return () => clearInterval(id); + }, [load]); + + const active = sensors.filter((s) => s.state === "detected"); + const offline = sensors.filter((s) => s.state === "unknown"); + const dry = sensors.filter((s) => s.state === "clear"); + + // Group by floor_zone + const byZone = sensors.reduce>((acc, s) => { + const zone = s.floor_zone ?? "Unassigned"; + (acc[zone] ??= []).push(s); + return acc; + }, {}); + + const zoneEntries = Object.entries(byZone).sort(([a], [b]) => a.localeCompare(b)); + + return ( +
+ {/* Header */} +
+
+

Leak Detection

+

Singapore DC01 — water sensor site map · refreshes every 15s

+
+
+ {!loading && ( + 0 + ? "bg-destructive/10 text-destructive" + : "bg-green-500/10 text-green-400", + )}> + {active.length > 0 + ? <> {active.length} leak{active.length > 1 ? "s" : ""} detected + : <> No leaks detected} + + )} + +
+
+ + {/* KPI bar */} + {!loading && ( +
+ {[ + { + label: "Active Leaks", + value: active.length, + sub: "require immediate action", + cls: active.length > 0 ? "border-destructive/40 bg-destructive/5 text-destructive" : "border-border bg-muted/10 text-green-400", + }, + { + label: "Sensors Clear", + value: dry.length, + sub: `of ${sensors.length} total sensors`, + cls: "border-border bg-muted/10 text-foreground", + }, + { + label: "Offline", + value: offline.length, + sub: "no signal", + cls: offline.length > 0 ? "border-amber-500/30 bg-amber-500/5 text-amber-400" : "border-border bg-muted/10 text-muted-foreground", + }, + ].map(({ label, value, sub, cls }) => ( +
+

{label}

+

{value}

+

{sub}

+
+ ))} +
+ )} + + {/* Active leak alert */} + {!loading && active.length > 0 && ( +
+
+ +

+ {active.length} water leak{active.length > 1 ? "s" : ""} detected — immediate action required +

+
+
+ {active.map((s) => ( +

+ • {s.sensor_id} + {s.floor_zone ? ` — ${s.floor_zone}` : ""} + {s.near_crac ? ` (near ${s.near_crac})` : ""} + {s.under_floor ? " — under raised floor" : ""} +

+ ))} +
+
+ )} + + {/* Zone panels */} + {loading ? ( +
+ {Array.from({ length: 4 }).map((_, i) => )} +
+ ) : ( + zoneEntries.map(([zone, zoneSensors]) => { + const zoneActive = zoneSensors.filter((s) => s.state === "detected"); + return ( +
+
+

{zone}

+ {zoneActive.length > 0 && ( + + {zoneActive.length} LEAK + + )} +
+
+ {zoneSensors.map((s) => )} +
+
+ ); + }) + )} +
+ ); +} diff --git a/frontend/app/(dashboard)/maintenance/page.tsx b/frontend/app/(dashboard)/maintenance/page.tsx new file mode 100644 index 0000000..4259857 --- /dev/null +++ b/frontend/app/(dashboard)/maintenance/page.tsx @@ -0,0 +1,440 @@ +"use client"; + +import { useEffect, useState, useCallback } from "react"; +import { toast } from "sonner"; +import { + fetchMaintenanceWindows, createMaintenanceWindow, deleteMaintenanceWindow, + type MaintenanceWindow, +} from "@/lib/api"; +import { PageShell } from "@/components/layout/page-shell"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Skeleton } from "@/components/ui/skeleton"; +import { Button } from "@/components/ui/button"; +import { + CalendarClock, Plus, Trash2, CheckCircle2, Clock, AlertTriangle, + BellOff, RefreshCw, X, +} from "lucide-react"; +import { cn } from "@/lib/utils"; + +const SITE_ID = "sg-01"; + +const TARGET_GROUPS = [ + { + label: "Site", + targets: [{ value: "all", label: "Entire Site" }], + }, + { + label: "Halls", + targets: [ + { value: "hall-a", label: "Hall A" }, + { value: "hall-b", label: "Hall B" }, + ], + }, + { + label: "Racks — Hall A", + targets: [ + { value: "rack-A01", label: "Rack A01" }, + { value: "rack-A02", label: "Rack A02" }, + { value: "rack-A03", label: "Rack A03" }, + { value: "rack-A04", label: "Rack A04" }, + { value: "rack-A05", label: "Rack A05" }, + ], + }, + { + label: "Racks — Hall B", + targets: [ + { value: "rack-B01", label: "Rack B01" }, + { value: "rack-B02", label: "Rack B02" }, + { value: "rack-B03", label: "Rack B03" }, + { value: "rack-B04", label: "Rack B04" }, + { value: "rack-B05", label: "Rack B05" }, + ], + }, + { + label: "CRAC Units", + targets: [ + { value: "crac-01", label: "CRAC-01" }, + { value: "crac-02", label: "CRAC-02" }, + ], + }, + { + label: "UPS", + targets: [ + { value: "ups-01", label: "UPS-01" }, + { value: "ups-02", label: "UPS-02" }, + ], + }, + { + label: "Generator", + targets: [ + { value: "gen-01", label: "Generator GEN-01" }, + ], + }, +]; + +// Flat list for looking up labels +const TARGETS_FLAT = TARGET_GROUPS.flatMap(g => g.targets); + +const statusCfg = { + active: { label: "Active", cls: "bg-green-500/10 text-green-400 border-green-500/20", icon: CheckCircle2 }, + scheduled: { label: "Scheduled", cls: "bg-blue-500/10 text-blue-400 border-blue-500/20", icon: Clock }, + expired: { label: "Expired", cls: "bg-muted/50 text-muted-foreground border-border", icon: AlertTriangle }, +}; + +function StatusChip({ status }: { status: MaintenanceWindow["status"] }) { + const cfg = statusCfg[status]; + const Icon = cfg.icon; + return ( + + {cfg.label} + + ); +} + +function formatDt(iso: string): string { + return new Date(iso).toLocaleString([], { dateStyle: "short", timeStyle: "short" }); +} + +// 7-day timeline strip +const DAY_LABELS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; + +function TimelineStrip({ windows }: { windows: MaintenanceWindow[] }) { + const relevant = windows.filter(w => w.status === "active" || w.status === "scheduled"); + if (relevant.length === 0) return null; + + const today = new Date(); + today.setHours(0, 0, 0, 0); + const totalMs = 7 * 24 * 3600_000; + + // Day labels + const days = Array.from({ length: 7 }, (_, i) => { + const d = new Date(today.getTime() + i * 24 * 3600_000); + return DAY_LABELS[d.getDay()]; + }); + + return ( +
+

+ 7-Day Maintenance Timeline +

+ + {/* Day column labels */} +
+
+ {days.map((day, i) => ( +
{day}
+ ))} +
+ + {/* Grid lines */} +
+
+ {Array.from({ length: 8 }, (_, i) => ( +
+ ))} +
+ + {/* Window bars */} +
+ {relevant.map(w => { + const startMs = Math.max(0, new Date(w.start_dt).getTime() - today.getTime()); + const endMs = Math.min(totalMs, new Date(w.end_dt).getTime() - today.getTime()); + if (endMs <= 0 || startMs >= totalMs) return null; + + const leftPct = (startMs / totalMs) * 100; + const widthPct = ((endMs - startMs) / totalMs) * 100; + + const barCls = w.status === "active" + ? "bg-green-500/30 border-green-500/50 text-green-300" + : "bg-blue-500/20 border-blue-500/40 text-blue-300"; + + return ( +
+
+ {w.title} +
+
+ ); + })} +
+
+
+
+ ); +} + +// Defaults for new window form: now → +2h +function defaultStart() { + const d = new Date(); + d.setSeconds(0, 0); + return d.toISOString().slice(0, 16); +} +function defaultEnd() { + const d = new Date(Date.now() + 2 * 3600_000); + d.setSeconds(0, 0); + return d.toISOString().slice(0, 16); +} + +export default function MaintenancePage() { + const [windows, setWindows] = useState([]); + const [loading, setLoading] = useState(true); + const [showForm, setShowForm] = useState(false); + const [submitting, setSubmitting] = useState(false); + const [deleting, setDeleting] = useState(null); + + // Form state + const [title, setTitle] = useState(""); + const [target, setTarget] = useState("all"); + const [startDt, setStartDt] = useState(defaultStart); + const [endDt, setEndDt] = useState(defaultEnd); + const [suppress, setSuppress] = useState(true); + const [notes, setNotes] = useState(""); + + const load = useCallback(async () => { + try { + const data = await fetchMaintenanceWindows(SITE_ID); + setWindows(data); + } catch { toast.error("Failed to load maintenance windows"); } + finally { setLoading(false); } + }, []); + + useEffect(() => { load(); }, [load]); + + async function handleCreate(e: React.FormEvent) { + e.preventDefault(); + if (!title.trim()) return; + setSubmitting(true); + try { + const targetLabel = TARGETS_FLAT.find(t => t.value === target)?.label ?? target; + await createMaintenanceWindow({ + site_id: SITE_ID, + title: title.trim(), + target, + target_label: targetLabel, + start_dt: new Date(startDt).toISOString(), + end_dt: new Date(endDt).toISOString(), + suppress_alarms: suppress, + notes: notes.trim(), + }); + await load(); + toast.success("Maintenance window created"); + setShowForm(false); + setTitle(""); setNotes(""); setStartDt(defaultStart()); setEndDt(defaultEnd()); + } catch { toast.error("Failed to create maintenance window"); } + finally { setSubmitting(false); } + } + + async function handleDelete(id: string) { + setDeleting(id); + try { await deleteMaintenanceWindow(id); toast.success("Maintenance window deleted"); await load(); } + catch { toast.error("Failed to delete maintenance window"); } + finally { setDeleting(null); } + } + + const active = windows.filter(w => w.status === "active").length; + const scheduled = windows.filter(w => w.status === "scheduled").length; + + return ( + +
+
+

Maintenance Windows

+

Singapore DC01 — planned outages & alarm suppression

+
+
+ + +
+
+ + {/* Summary */} + {!loading && ( +
+ {active > 0 && ( + + + {active} + active + + )} + {scheduled > 0 && ( + + + {scheduled} + scheduled + + )} + {active === 0 && scheduled === 0 && ( + No active or scheduled maintenance + )} +
+ )} + + {/* Create form */} + {showForm && ( + + +
+ + New Maintenance Window + + +
+
+ +
+
+
+ + setTitle(e.target.value)} + placeholder="e.g. UPS-01 firmware update" + className="w-full h-9 rounded-md border border-border bg-muted/30 px-3 text-sm focus:outline-none focus:ring-1 focus:ring-primary" + /> +
+
+ + +
+
+ +
+
+ + setStartDt(e.target.value)} + className="w-full h-9 rounded-md border border-border bg-muted/30 px-3 text-sm focus:outline-none focus:ring-1 focus:ring-primary" + /> +
+
+ + setEndDt(e.target.value)} + className="w-full h-9 rounded-md border border-border bg-muted/30 px-3 text-sm focus:outline-none focus:ring-1 focus:ring-primary" + /> +
+
+ +