first commit

This commit is contained in:
mega 2026-03-19 11:32:17 +00:00
commit 4b98219bf7
144 changed files with 31561 additions and 0 deletions

17
backend/Dockerfile Normal file
View file

@ -0,0 +1,17 @@
FROM python:3.12-slim
WORKDIR /app
# curl needed for Docker healthcheck
RUN apt-get update && apt-get install -y --no-install-recommends curl && rm -rf /var/lib/apt/lists/*
# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy app code
COPY . .
EXPOSE 8000
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]

0
backend/__init__.py Normal file
View file

0
backend/api/__init__.py Normal file
View file

View file

View file

@ -0,0 +1,82 @@
from fastapi import APIRouter, Depends, Query, HTTPException
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
@router.get("")
async def list_alarms(
site_id: str = Query(...),
state: str = Query("active", description="active | resolved | acknowledged | all"),
limit: int = Query(50, ge=1, le=200),
session: AsyncSession = Depends(get_session),
):
where = "WHERE site_id = :site_id"
if state != "all":
where += " AND state = :state"
result = await session.execute(text(f"""
SELECT id, sensor_id, site_id, room_id, rack_id,
severity, message, state, triggered_at,
acknowledged_at, resolved_at
FROM alarms
{where}
ORDER BY triggered_at DESC
LIMIT :limit
"""), {"site_id": site_id, "state": state, "limit": limit})
return [dict(r) for r in result.mappings().all()]
@router.post("/{alarm_id}/acknowledge")
async def acknowledge_alarm(
alarm_id: int,
session: AsyncSession = Depends(get_session),
):
result = await session.execute(text("""
UPDATE alarms
SET state = 'acknowledged', acknowledged_at = NOW()
WHERE id = :id AND state = 'active'
RETURNING id
"""), {"id": alarm_id})
await session.commit()
if not result.fetchone():
raise HTTPException(status_code=404, detail="Alarm not found or not active")
return {"id": alarm_id, "state": "acknowledged"}
@router.post("/{alarm_id}/resolve")
async def resolve_alarm(
alarm_id: int,
session: AsyncSession = Depends(get_session),
):
result = await session.execute(text("""
UPDATE alarms
SET state = 'resolved', resolved_at = NOW()
WHERE id = :id AND state IN ('active', 'acknowledged')
RETURNING id
"""), {"id": alarm_id})
await session.commit()
if not result.fetchone():
raise HTTPException(status_code=404, detail="Alarm not found or already resolved")
return {"id": alarm_id, "state": "resolved"}
@router.get("/stats")
async def alarm_stats(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
result = await session.execute(text("""
SELECT
COUNT(*) FILTER (WHERE state = 'active') AS active,
COUNT(*) FILTER (WHERE state = 'acknowledged') AS acknowledged,
COUNT(*) FILTER (WHERE state = 'resolved') AS resolved,
COUNT(*) FILTER (WHERE state = 'active' AND severity = 'critical') AS critical,
COUNT(*) FILTER (WHERE state = 'active' AND severity = 'warning') AS warning
FROM alarms
WHERE site_id = :site_id
"""), {"site_id": site_id})
row = result.mappings().one()
return {k: int(v) for k, v in row.items()}

View file

@ -0,0 +1,344 @@
import hashlib
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
# Mirrors the simulator topology — single source of truth for site layout
TOPOLOGY = {
"sg-01": {
"rooms": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
],
"ups_units": ["ups-01", "ups-02"],
"leak_sensors": ["leak-01"],
}
}
# ── Device catalog ────────────────────────────────────────────────────────────
# Each tuple: (name, u_height, power_draw_w)
_SERVERS = [
("Dell PowerEdge R750", 2, 420),
("HPE ProLiant DL380 Gen10 Plus", 2, 380),
("Supermicro SuperServer 2029P", 2, 350),
("Dell PowerEdge R650xs", 1, 280),
("HPE ProLiant DL360 Gen10 Plus", 1, 260),
]
_SWITCHES = [
("Cisco Catalyst C9300-48P", 1, 60),
("Arista 7050CX3-32S", 1, 180),
("Juniper EX4300-48T", 1, 75),
]
_PATCHES = [
("Leviton 24-Port Cat6A Patch Panel", 1, 5),
("Panduit 48-Port Cat6A Patch Panel", 1, 5),
]
_PDUS = [
("APC AP8888 Metered Rack PDU", 1, 10),
("Raritan PX3-5190R Metered PDU", 1, 10),
]
_STORAGE = [
("Dell EMC PowerVault ME5024", 2, 280),
("NetApp AFF C190", 2, 200),
]
_FIREWALL = [
("Palo Alto PA-5220", 2, 150),
("Fortinet FortiGate 3000F",2, 180),
]
_KVM = [("Raritan KX III-464", 1, 15)]
def _serial(rack_id: str, u: int) -> str:
return hashlib.md5(f"{rack_id}-u{u}".encode()).hexdigest()[:10].upper()
def _rack_seq(rack_id: str) -> int:
"""SG1A01.05 → 5, SG1A02.05 → 25, SG1B01.05 → 5"""
# Format: SG1A01.05 — row at [4:6], rack num after dot
row = int(rack_id[4:6]) # "01" or "02"
num = int(rack_id[7:]) # "01" to "20"
return (row - 1) * 20 + num
def _generate_devices(site_id: str, room_id: str, rack_id: str) -> list[dict]:
s = _rack_seq(rack_id)
room_oct = "1" if room_id == "hall-a" else "2"
devices: list[dict] = []
u = 1
def add(name: str, dtype: str, u_start: int, u_height: int, power_w: int, ip: str = "-"):
devices.append({
"device_id": f"{rack_id}-u{u_start:02d}",
"name": name,
"type": dtype,
"rack_id": rack_id,
"room_id": room_id,
"site_id": site_id,
"u_start": u_start,
"u_height": u_height,
"ip": ip,
"serial": _serial(rack_id, u_start),
"model": name,
"status": "online",
"power_draw_w": power_w,
})
# U1: Patch panel
p = _PATCHES[s % len(_PATCHES)]
add(p[0], "patch_panel", u, p[1], p[2]); u += p[1]
# U2: Switch
sw = _SWITCHES[s % len(_SWITCHES)]
add(sw[0], "switch", u, sw[1], sw[2], f"10.10.{room_oct}.{s}"); u += sw[1]
# KVM in rack 5 / 15
if s in (5, 15):
kvm = _KVM[0]
add(kvm[0], "kvm", u, kvm[1], kvm[2], f"10.10.{room_oct}.{s + 100}"); u += kvm[1]
# Firewall in first rack of each room
if rack_id in ("SG1A01.01", "SG1B01.01"):
fw = _FIREWALL[s % len(_FIREWALL)]
add(fw[0], "firewall", u, fw[1], fw[2], f"10.10.{room_oct}.254"); u += fw[1]
# Storage in rack 3 and 13
if s in (3, 13):
stor = _STORAGE[s % len(_STORAGE)]
add(stor[0], "storage", u, stor[1], stor[2], f"10.10.{room_oct}.{s + 50}"); u += stor[1]
# Servers filling U slots up to U41
srv_pool = (_SERVERS * 3)
ip_counter = (s - 1) * 15 + 10
for idx, (name, u_h, pwr) in enumerate(srv_pool):
if u + u_h > 41:
break
# Occasional empty gap for realism
if idx > 0 and (s + idx) % 8 == 0 and u + u_h + 1 <= 41:
u += 1
if u + u_h > 41:
break
add(name, "server", u, u_h, pwr, f"10.10.{room_oct}.{ip_counter}"); u += u_h
ip_counter += 1
# U42: PDU
pdu = _PDUS[s % len(_PDUS)]
add(pdu[0], "pdu", 42, pdu[1], pdu[2])
return devices
# ── Endpoints ─────────────────────────────────────────────────────────────────
@router.get("")
async def get_assets(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
site = TOPOLOGY.get(site_id)
if not site:
return {"site_id": site_id, "rooms": [], "ups_units": []}
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, room_id, rack_id, value
FROM readings
WHERE site_id = :site_id
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
readings = result.mappings().all()
alarm_result = await session.execute(text("""
SELECT rack_id, COUNT(*) AS cnt, MAX(severity) AS worst
FROM alarms
WHERE site_id = :site_id AND state = 'active' AND rack_id IS NOT NULL
GROUP BY rack_id
"""), {"site_id": site_id})
alarm_map: dict[str, tuple[int, str]] = {
r["rack_id"]: (int(r["cnt"]), r["worst"])
for r in alarm_result.mappings().all()
}
by_sensor: dict[str, float] = {r["sensor_id"]: float(r["value"]) for r in readings}
def rack_reading(site: str, room: str, rack: str, suffix: str) -> float | None:
return by_sensor.get(f"{site}/{room}/{rack}/{suffix}")
def cooling_reading(site: str, crac: str, suffix: str) -> float | None:
return by_sensor.get(f"{site}/cooling/{crac}/{suffix}")
def ups_reading(site: str, ups: str, suffix: str) -> float | None:
return by_sensor.get(f"{site}/power/{ups}/{suffix}")
rooms = []
for room in site["rooms"]:
room_id = room["room_id"]
crac_id = room["crac_id"]
supply = cooling_reading(site_id, crac_id, "supply_temp")
return_t = cooling_reading(site_id, crac_id, "return_temp")
fan = cooling_reading(site_id, crac_id, "fan_pct")
crac_has_data = any(sid.startswith(f"{site_id}/cooling/{crac_id}") for sid in by_sensor)
if supply is not None:
crac_state = "online"
elif crac_has_data:
crac_state = "fault"
else:
crac_state = "unknown"
racks = []
for rack_id in room["racks"]:
temp = rack_reading(site_id, room_id, rack_id, "temperature")
power = rack_reading(site_id, room_id, rack_id, "power_kw")
alarm_cnt, worst_sev = alarm_map.get(rack_id, (0, None))
status = "ok"
if worst_sev == "critical" or (temp is not None and temp >= 30):
status = "critical"
elif worst_sev == "warning" or (temp is not None and temp >= 26):
status = "warning"
elif temp is None and power is None:
status = "unknown"
racks.append({
"rack_id": rack_id,
"temp": round(temp, 1) if temp is not None else None,
"power_kw": round(power, 2) if power is not None else None,
"status": status,
"alarm_count": alarm_cnt,
})
rooms.append({
"room_id": room_id,
"crac": {
"crac_id": crac_id,
"state": crac_state,
"supply_temp": round(supply, 1) if supply is not None else None,
"return_temp": round(return_t, 1) if return_t is not None else None,
"fan_pct": round(fan, 1) if fan is not None else None,
},
"racks": racks,
})
ups_units = []
for ups_id in site["ups_units"]:
charge = ups_reading(site_id, ups_id, "charge_pct")
load = ups_reading(site_id, ups_id, "load_pct")
runtime = ups_reading(site_id, ups_id, "runtime_min")
state_raw = ups_reading(site_id, ups_id, "state")
if state_raw is not None:
state = "battery" if state_raw == 1.0 else "online"
elif charge is not None:
state = "battery" if charge < 20.0 else "online"
else:
state = "unknown"
ups_units.append({
"ups_id": ups_id,
"state": state,
"charge_pct": round(charge, 1) if charge is not None else None,
"load_pct": round(load, 1) if load is not None else None,
"runtime_min": round(runtime, 0) if runtime is not None else None,
})
return {"site_id": site_id, "rooms": rooms, "ups_units": ups_units}
@router.get("/devices")
async def get_all_devices(site_id: str = Query(...)):
"""All devices across all racks for the site."""
site = TOPOLOGY.get(site_id)
if not site:
return []
devices = []
for room in site["rooms"]:
for rack_id in room["racks"]:
devices.extend(_generate_devices(site_id, room["room_id"], rack_id))
return devices
@router.get("/rack-devices")
async def get_rack_devices(site_id: str = Query(...), rack_id: str = Query(...)):
"""Devices in a specific rack."""
site = TOPOLOGY.get(site_id)
if not site:
return []
for room in site["rooms"]:
if rack_id in room["racks"]:
return _generate_devices(site_id, room["room_id"], rack_id)
return []
@router.get("/pdus")
async def get_pdus(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Per-rack PDU live phase data."""
site = TOPOLOGY.get(site_id)
if not site:
return []
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, room_id, rack_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN (
'power_kw', 'pdu_phase_a_kw', 'pdu_phase_b_kw', 'pdu_phase_c_kw',
'pdu_phase_a_a', 'pdu_phase_b_a', 'pdu_phase_c_a', 'pdu_imbalance'
)
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
# Build per-rack dict keyed by rack_id
rack_data: dict[str, dict] = {}
for row in result.mappings().all():
rack_id = row["rack_id"]
if not rack_id:
continue
if rack_id not in rack_data:
rack_data[rack_id] = {"rack_id": rack_id, "room_id": row["room_id"]}
field_map = {
"power_kw": "total_kw",
"pdu_phase_a_kw": "phase_a_kw",
"pdu_phase_b_kw": "phase_b_kw",
"pdu_phase_c_kw": "phase_c_kw",
"pdu_phase_a_a": "phase_a_a",
"pdu_phase_b_a": "phase_b_a",
"pdu_phase_c_a": "phase_c_a",
"pdu_imbalance": "imbalance_pct",
}
field = field_map.get(row["sensor_type"])
if field:
rack_data[rack_id][field] = round(float(row["value"]), 2)
# Emit rows for every rack in topology order, filling in None for missing data
out = []
for room in site["rooms"]:
for rack_id in room["racks"]:
d = rack_data.get(rack_id, {"rack_id": rack_id, "room_id": room["room_id"]})
imb = d.get("imbalance_pct")
status = (
"critical" if imb is not None and imb >= 10
else "warning" if imb is not None and imb >= 5
else "ok"
)
out.append({
"rack_id": rack_id,
"room_id": d.get("room_id", room["room_id"]),
"total_kw": d.get("total_kw"),
"phase_a_kw": d.get("phase_a_kw"),
"phase_b_kw": d.get("phase_b_kw"),
"phase_c_kw": d.get("phase_c_kw"),
"phase_a_a": d.get("phase_a_a"),
"phase_b_a": d.get("phase_b_a"),
"phase_c_a": d.get("phase_c_a"),
"imbalance_pct": imb,
"status": status,
})
return out

View file

@ -0,0 +1,110 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
ROOMS = {
"sg-01": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
]
}
# Rated capacity config — would be per-asset configurable in production
RACK_POWER_CAPACITY_KW = 10.0 # max kW per rack
ROOM_POWER_CAPACITY_KW = 400.0 # 40 racks × 10 kW
CRAC_COOLING_CAPACITY_KW = 160.0 # rated cooling per CRAC
RACK_U_TOTAL = 42
@router.get("/summary")
async def capacity_summary(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Per-rack and per-room capacity: power used vs rated, cooling load vs rated, rack space."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
rack_id, room_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('power_kw', 'temperature')
AND rack_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
rows = result.mappings().all()
# Index: rack_id → {power_kw, temperature, room_id}
rack_idx: dict[str, dict] = {}
for row in rows:
rid = row["rack_id"]
if rid not in rack_idx:
rack_idx[rid] = {"room_id": row["room_id"]}
if row["sensor_type"] == "power_kw":
rack_idx[rid]["power_kw"] = round(float(row["value"]), 2)
elif row["sensor_type"] == "temperature":
rack_idx[rid]["temperature"] = round(float(row["value"]), 1)
rooms_out = []
racks_out = []
for room in ROOMS.get(site_id, []):
room_id = room["room_id"]
room_power = 0.0
populated = 0
for rack_id in room["racks"]:
d = rack_idx.get(rack_id, {})
power = d.get("power_kw")
temp = d.get("temperature")
if power is not None:
room_power += power
populated += 1
power_pct = round((power / RACK_POWER_CAPACITY_KW) * 100, 1) if power is not None else None
racks_out.append({
"rack_id": rack_id,
"room_id": room_id,
"power_kw": power,
"power_capacity_kw": RACK_POWER_CAPACITY_KW,
"power_pct": power_pct,
"temp": temp,
})
room_power = round(room_power, 2)
rooms_out.append({
"room_id": room_id,
"power": {
"used_kw": room_power,
"capacity_kw": ROOM_POWER_CAPACITY_KW,
"pct": round((room_power / ROOM_POWER_CAPACITY_KW) * 100, 1),
"headroom_kw": round(ROOM_POWER_CAPACITY_KW - room_power, 2),
},
"cooling": {
"load_kw": room_power, # IT power ≈ heat generated
"capacity_kw": CRAC_COOLING_CAPACITY_KW,
"pct": round(min(100.0, (room_power / CRAC_COOLING_CAPACITY_KW) * 100), 1),
"headroom_kw": round(max(0.0, CRAC_COOLING_CAPACITY_KW - room_power), 2),
},
"space": {
"racks_total": len(room["racks"]),
"racks_populated": populated,
"pct": round((populated / len(room["racks"])) * 100, 1),
},
})
return {
"site_id": site_id,
"config": {
"rack_power_kw": RACK_POWER_CAPACITY_KW,
"room_power_kw": ROOM_POWER_CAPACITY_KW,
"crac_cooling_kw": CRAC_COOLING_CAPACITY_KW,
"rack_u_total": RACK_U_TOTAL,
},
"rooms": rooms_out,
"racks": racks_out,
}

View file

@ -0,0 +1,131 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
CHILLERS = {"sg-01": ["chiller-01"]}
CHILLER_FIELD_MAP = {
"chiller_chw_supply": "chw_supply_c",
"chiller_chw_return": "chw_return_c",
"chiller_chw_delta": "chw_delta_c",
"chiller_flow_gpm": "flow_gpm",
"chiller_load_kw": "cooling_load_kw",
"chiller_load_pct": "cooling_load_pct",
"chiller_cop": "cop",
"chiller_comp_load": "compressor_load_pct",
"chiller_cond_press": "condenser_pressure_bar",
"chiller_evap_press": "evaporator_pressure_bar",
"chiller_cw_supply": "cw_supply_c",
"chiller_cw_return": "cw_return_c",
"chiller_run_hours": "run_hours",
}
@router.get("/status")
async def chiller_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest chiller plant readings."""
types_sql = ", ".join(f"'{t}'" for t in [*CHILLER_FIELD_MAP.keys(), "chiller_state"])
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
chiller_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
# sensor_id: {site}/{cooling/chiller}/{chiller_id}/{key} → parts[3]
if len(parts) < 4:
continue
chiller_id = parts[3]
if chiller_id not in chiller_data:
chiller_data[chiller_id] = {"chiller_id": chiller_id}
field = CHILLER_FIELD_MAP.get(row["sensor_type"])
if field:
chiller_data[chiller_id][field] = round(float(row["value"]), 2)
elif row["sensor_type"] == "chiller_state":
chiller_data[chiller_id]["state"] = "online" if float(row["value"]) > 0.5 else "fault"
out = []
for chiller_id in CHILLERS.get(site_id, []):
d = chiller_data.get(chiller_id, {"chiller_id": chiller_id, "state": "unknown"})
d.setdefault("state", "online")
out.append(d)
return out
@router.get("/history")
async def chiller_history(
site_id: str = Query(...),
chiller_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Time-series COP, load kW, and CHW temps for a chiller."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
METRICS = ("chiller_cop", "chiller_load_kw", "chiller_load_pct",
"chiller_chw_supply", "chiller_chw_return", "chiller_comp_load")
types_sql = ", ".join(f"'{t}'" for t in METRICS)
try:
result = await session.execute(text(f"""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 3) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/cooling/chiller/{chiller_id}/%",
"from_time": from_time})
except Exception:
result = await session.execute(text(f"""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 3) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/cooling/chiller/{chiller_id}/%",
"from_time": from_time})
bucket_map: dict[str, dict] = {}
for row in result.mappings().all():
b = str(row["bucket"])
if b not in bucket_map:
bucket_map[b] = {"bucket": b}
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
points = []
for b, vals in sorted(bucket_map.items()):
points.append({
"bucket": b,
"cop": vals.get("chiller_cop"),
"load_kw": vals.get("chiller_load_kw"),
"load_pct": vals.get("chiller_load_pct"),
"chw_supply_c": vals.get("chiller_chw_supply"),
"chw_return_c": vals.get("chiller_chw_return"),
"comp_load": vals.get("chiller_comp_load"),
})
return points

440
backend/api/routes/env.py Normal file
View file

@ -0,0 +1,440 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
ROOMS = {
"sg-01": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
]
}
@router.get("/rack-readings")
async def rack_env_readings(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest temperature and humidity per rack, grouped by room."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
rack_id, room_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('temperature', 'humidity')
AND rack_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
rows = result.mappings().all()
# Index by (rack_id, sensor_type)
data: dict[tuple, float] = {(r["rack_id"], r["sensor_type"]): float(r["value"]) for r in rows}
rooms = []
for room in ROOMS.get(site_id, []):
racks = []
for rack_id in room["racks"]:
temp = data.get((rack_id, "temperature"))
hum = data.get((rack_id, "humidity"))
racks.append({
"rack_id": rack_id,
"temperature": round(temp, 1) if temp is not None else None,
"humidity": round(hum, 1) if hum is not None else None,
})
rooms.append({"room_id": room["room_id"], "racks": racks})
return rooms
@router.get("/humidity-history")
async def humidity_history(
site_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Average humidity per room bucketed by 5 minutes."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 1) AS avg_humidity
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'humidity'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 1) AS avg_humidity
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'humidity'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
return [dict(r) for r in result.mappings().all()]
# All CRAC sensor types stored in the readings table
CRAC_SENSOR_TYPES = (
"cooling_supply", "cooling_return", "cooling_fan",
"cooling_supply_hum", "cooling_return_hum", "cooling_airflow", "cooling_filter_dp",
"cooling_cap_kw", "cooling_cap_pct", "cooling_cop", "cooling_shr",
"cooling_comp_state", "cooling_comp_load", "cooling_comp_power", "cooling_comp_hours",
"cooling_high_press", "cooling_low_press", "cooling_superheat", "cooling_subcooling",
"cooling_fan_rpm", "cooling_fan_power", "cooling_fan_hours",
"cooling_unit_power", "cooling_voltage", "cooling_current", "cooling_pf",
)
# sensor_type → response field name
CRAC_FIELD_MAP = {
"cooling_supply": "supply_temp",
"cooling_return": "return_temp",
"cooling_fan": "fan_pct",
"cooling_supply_hum": "supply_humidity",
"cooling_return_hum": "return_humidity",
"cooling_airflow": "airflow_cfm",
"cooling_filter_dp": "filter_dp_pa",
"cooling_cap_kw": "cooling_capacity_kw",
"cooling_cap_pct": "cooling_capacity_pct",
"cooling_cop": "cop",
"cooling_shr": "sensible_heat_ratio",
"cooling_comp_state": "compressor_state",
"cooling_comp_load": "compressor_load_pct",
"cooling_comp_power": "compressor_power_kw",
"cooling_comp_hours": "compressor_run_hours",
"cooling_high_press": "high_pressure_bar",
"cooling_low_press": "low_pressure_bar",
"cooling_superheat": "discharge_superheat_c",
"cooling_subcooling": "liquid_subcooling_c",
"cooling_fan_rpm": "fan_rpm",
"cooling_fan_power": "fan_power_kw",
"cooling_fan_hours": "fan_run_hours",
"cooling_unit_power": "total_unit_power_kw",
"cooling_voltage": "input_voltage_v",
"cooling_current": "input_current_a",
"cooling_pf": "power_factor",
}
RATED_CAPACITY_KW = 80.0
@router.get("/crac-status")
async def crac_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest CRAC readings — full sensor set."""
types_sql = ", ".join(f"'{t}'" for t in CRAC_SENSOR_TYPES)
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
crac_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
crac_id = parts[2]
if crac_id not in crac_data:
crac_data[crac_id] = {"crac_id": crac_id}
field = CRAC_FIELD_MAP.get(row["sensor_type"])
if field:
crac_data[crac_id][field] = round(float(row["value"]), 3)
room_map = {room["crac_id"]: room["room_id"] for room in ROOMS.get(site_id, [])}
result_list = []
for crac_id, d in sorted(crac_data.items()):
supply = d.get("supply_temp")
ret = d.get("return_temp")
delta = round(ret - supply, 1) if (ret is not None and supply is not None) else None
state = "online" if supply is not None else "fault"
result_list.append({
"crac_id": crac_id,
"room_id": room_map.get(crac_id),
"state": state,
"delta": delta,
"rated_capacity_kw": RATED_CAPACITY_KW,
**{k: round(v, 2) if isinstance(v, float) else v for k, v in d.items() if k != "crac_id"},
})
# Surface CRACs with no recent readings as faulted
known = set(crac_data.keys())
for room in ROOMS.get(site_id, []):
if room["crac_id"] not in known:
result_list.append({
"crac_id": room["crac_id"],
"room_id": room["room_id"],
"state": "fault",
"delta": None,
"rated_capacity_kw": RATED_CAPACITY_KW,
})
return sorted(result_list, key=lambda x: x["crac_id"])
@router.get("/crac-history")
async def crac_history(
site_id: str = Query(...),
crac_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Time-series history for a single CRAC unit — capacity, COP, compressor load, filter ΔP, temps."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
METRICS = (
"cooling_supply", "cooling_return", "cooling_cap_kw",
"cooling_cap_pct", "cooling_cop", "cooling_comp_load",
"cooling_filter_dp", "cooling_fan",
)
types_sql = ", ".join(f"'{t}'" for t in METRICS)
try:
result = await session.execute(text(f"""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 3) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
except Exception:
result = await session.execute(text(f"""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 3) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
bucket_map: dict[str, dict] = {}
for row in result.mappings().all():
b = str(row["bucket"])
if b not in bucket_map:
bucket_map[b] = {"bucket": b}
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
points = []
for b, vals in sorted(bucket_map.items()):
supply = vals.get("cooling_supply")
ret = vals.get("cooling_return")
points.append({
"bucket": b,
"supply_temp": round(supply, 1) if supply is not None else None,
"return_temp": round(ret, 1) if ret is not None else None,
"delta_t": round(ret - supply, 1) if (supply is not None and ret is not None) else None,
"capacity_kw": vals.get("cooling_cap_kw"),
"capacity_pct": vals.get("cooling_cap_pct"),
"cop": vals.get("cooling_cop"),
"comp_load": vals.get("cooling_comp_load"),
"filter_dp": vals.get("cooling_filter_dp"),
"fan_pct": vals.get("cooling_fan"),
})
return points
@router.get("/crac-delta-history")
async def crac_delta_history(
site_id: str = Query(...),
crac_id: str = Query(...),
hours: int = Query(1, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""ΔT (return - supply) over time for a single CRAC unit."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
AVG(value) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ('cooling_supply', 'cooling_return')
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
AVG(value) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ('cooling_supply', 'cooling_return')
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
rows = result.mappings().all()
bucket_map: dict[str, dict] = {}
for row in rows:
b = str(row["bucket"])
if b not in bucket_map:
bucket_map[b] = {"bucket": b}
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
points = []
for b, vals in bucket_map.items():
supply = vals.get("cooling_supply")
ret = vals.get("cooling_return")
if supply is not None and ret is not None:
points.append({"bucket": b, "delta": round(ret - supply, 2)})
return sorted(points, key=lambda x: x["bucket"])
@router.get("/rack-history")
async def rack_history(
site_id: str = Query(...),
rack_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Temperature and power history for a single rack."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_value
FROM readings
WHERE site_id = :site_id
AND rack_id = :rack_id
AND sensor_type IN ('temperature', 'humidity', 'power_kw')
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "rack_id": rack_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_value
FROM readings
WHERE site_id = :site_id
AND rack_id = :rack_id
AND sensor_type IN ('temperature', 'humidity', 'power_kw')
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "rack_id": rack_id, "from_time": from_time})
rows = result.mappings().all()
# Pivot into {bucket, temperature, humidity, power_kw}
bucket_map: dict[str, dict] = {}
for row in rows:
b = str(row["bucket"])
if b not in bucket_map:
bucket_map[b] = {"bucket": b}
bucket_map[b][row["sensor_type"]] = float(row["avg_value"])
# Fetch active alarms for this rack
alarms = await session.execute(text("""
SELECT id, severity, message, state, triggered_at
FROM alarms
WHERE site_id = :site_id AND rack_id = :rack_id
ORDER BY triggered_at DESC
LIMIT 10
"""), {"site_id": site_id, "rack_id": rack_id})
return {
"rack_id": rack_id,
"site_id": site_id,
"history": list(bucket_map.values()),
"alarms": [dict(r) for r in alarms.mappings().all()],
}
@router.get("/particles")
async def particle_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest particle counts per room."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
room_id, sensor_type, value, recorded_at
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('particles_0_5um', 'particles_5um')
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
room_data: dict[str, dict] = {}
for row in result.mappings().all():
rid = row["room_id"]
if rid not in room_data:
room_data[rid] = {}
room_data[rid][row["sensor_type"]] = round(float(row["value"]))
rooms_cfg = ROOMS.get(site_id, [])
out = []
for room in rooms_cfg:
rid = room["room_id"]
d = room_data.get(rid, {})
p05 = d.get("particles_0_5um")
p5 = d.get("particles_5um")
# Derive ISO 14644-1 class (simplified: class 8 = 3.52M @ 0.5µm)
iso_class = None
if p05 is not None:
if p05 <= 10_000: iso_class = 5
elif p05 <= 100_000: iso_class = 6
elif p05 <= 1_000_000: iso_class = 7
elif p05 <= 3_520_000: iso_class = 8
else: iso_class = 9
out.append({
"room_id": rid,
"particles_0_5um": p05,
"particles_5um": p5,
"iso_class": iso_class,
})
return out

View file

@ -0,0 +1,75 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
VESDA_ZONES = {
"sg-01": [
{"zone_id": "vesda-hall-a", "room_id": "hall-a"},
{"zone_id": "vesda-hall-b", "room_id": "hall-b"},
]
}
LEVEL_MAP = {0: "normal", 1: "alert", 2: "action", 3: "fire"}
VESDA_TYPES = ("vesda_level", "vesda_obscuration", "vesda_det1", "vesda_det2",
"vesda_power", "vesda_flow")
@router.get("/status")
async def fire_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest VESDA readings per fire zone."""
types_sql = ", ".join(f"'{t}'" for t in VESDA_TYPES)
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '2 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
zone_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
zone_id = parts[2]
if zone_id not in zone_data:
zone_data[zone_id] = {"zone_id": zone_id}
v = float(row["value"])
s_type = row["sensor_type"]
if s_type == "vesda_level":
zone_data[zone_id]["level"] = LEVEL_MAP.get(round(v), "normal")
elif s_type == "vesda_obscuration":
zone_data[zone_id]["obscuration_pct_m"] = round(v, 3)
elif s_type == "vesda_det1":
zone_data[zone_id]["detector_1_ok"] = v > 0.5
elif s_type == "vesda_det2":
zone_data[zone_id]["detector_2_ok"] = v > 0.5
elif s_type == "vesda_power":
zone_data[zone_id]["power_ok"] = v > 0.5
elif s_type == "vesda_flow":
zone_data[zone_id]["flow_ok"] = v > 0.5
zone_room_map = {z["zone_id"]: z["room_id"] for z in VESDA_ZONES.get(site_id, [])}
out = []
for zone_cfg in VESDA_ZONES.get(site_id, []):
zone_id = zone_cfg["zone_id"]
d = zone_data.get(zone_id, {"zone_id": zone_id})
d.setdefault("level", "normal")
d.setdefault("obscuration_pct_m", None)
d.setdefault("detector_1_ok", True)
d.setdefault("detector_2_ok", True)
d.setdefault("power_ok", True)
d.setdefault("flow_ok", True)
d["room_id"] = zone_room_map.get(zone_id)
out.append(d)
return out

View file

@ -0,0 +1,33 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import text
from core.database import get_session
router = APIRouter()
@router.get("")
async def get_floor_layout(site_id: str, db: AsyncSession = Depends(get_session)):
row = await db.execute(
text("SELECT value FROM site_config WHERE site_id = :site_id AND key = 'floor_layout'"),
{"site_id": site_id},
)
result = row.fetchone()
if result is None:
raise HTTPException(status_code=404, detail="No floor layout saved for this site")
return result[0]
@router.put("")
async def save_floor_layout(site_id: str, layout: dict, db: AsyncSession = Depends(get_session)):
await db.execute(
text("""
INSERT INTO site_config (site_id, key, value, updated_at)
VALUES (:site_id, 'floor_layout', CAST(:value AS jsonb), NOW())
ON CONFLICT (site_id, key)
DO UPDATE SET value = EXCLUDED.value, updated_at = NOW()
"""),
{"site_id": site_id, "value": __import__("json").dumps(layout)},
)
await db.commit()
return {"ok": True}

View file

@ -0,0 +1,138 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
GENERATORS = {"sg-01": ["gen-01"]}
GEN_FIELD_MAP = {
"gen_fuel_pct": "fuel_pct",
"gen_fuel_l": "fuel_litres",
"gen_fuel_rate": "fuel_rate_lph",
"gen_load_kw": "load_kw",
"gen_load_pct": "load_pct",
"gen_run_hours": "run_hours",
"gen_voltage_v": "voltage_v",
"gen_freq_hz": "frequency_hz",
"gen_rpm": "engine_rpm",
"gen_oil_press": "oil_pressure_bar",
"gen_coolant_c": "coolant_temp_c",
"gen_exhaust_c": "exhaust_temp_c",
"gen_alt_temp_c": "alternator_temp_c",
"gen_pf": "power_factor",
"gen_batt_v": "battery_v",
}
STATE_MAP = {-1.0: "fault", 0.0: "standby", 1.0: "running", 2.0: "test"}
@router.get("/status")
async def generator_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest reading for each generator."""
types_sql = ", ".join(f"'{t}'" for t in [*GEN_FIELD_MAP.keys(), "gen_state"])
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
gen_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
gen_id = parts[2]
if gen_id not in gen_data:
gen_data[gen_id] = {"gen_id": gen_id}
field = GEN_FIELD_MAP.get(row["sensor_type"])
if field:
gen_data[gen_id][field] = round(float(row["value"]), 2)
elif row["sensor_type"] == "gen_state":
v = round(float(row["value"]))
gen_data[gen_id]["state"] = STATE_MAP.get(v, "standby")
out = []
for gen_id in GENERATORS.get(site_id, []):
d = gen_data.get(gen_id, {"gen_id": gen_id, "state": "unknown"})
if "state" not in d:
d["state"] = "standby"
out.append(d)
return out
HISTORY_METRICS = (
"gen_load_pct", "gen_fuel_pct", "gen_coolant_c",
"gen_exhaust_c", "gen_freq_hz", "gen_alt_temp_c",
)
@router.get("/history")
async def generator_history(
site_id: str = Query(...),
gen_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""5-minute bucketed time-series for a single generator."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
types_sql = ", ".join(f"'{t}'" for t in HISTORY_METRICS)
try:
result = await session.execute(text(f"""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/generator/{gen_id}/%",
"from_time": from_time})
except Exception:
result = await session.execute(text(f"""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/generator/{gen_id}/%",
"from_time": from_time})
# Pivot: bucket → {metric: value}
buckets: dict[str, dict] = {}
for row in result.mappings().all():
b = row["bucket"].isoformat()
buckets.setdefault(b, {"bucket": b})
key_map = {
"gen_load_pct": "load_pct",
"gen_fuel_pct": "fuel_pct",
"gen_coolant_c": "coolant_temp_c",
"gen_exhaust_c": "exhaust_temp_c",
"gen_freq_hz": "frequency_hz",
"gen_alt_temp_c":"alternator_temp_c",
}
field = key_map.get(row["sensor_type"])
if field:
buckets[b][field] = float(row["avg_val"])
return list(buckets.values())

View file

@ -0,0 +1,13 @@
from fastapi import APIRouter
from datetime import datetime, timezone
router = APIRouter()
@router.get("/health")
async def health_check():
return {
"status": "ok",
"service": "DemoBMS API",
"timestamp": datetime.now(timezone.utc).isoformat(),
}

View file

@ -0,0 +1,57 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
# Static topology metadata — mirrors simulator config
LEAK_SENSORS = {
"sg-01": [
{"sensor_id": "leak-01", "floor_zone": "crac-zone-a", "under_floor": True, "near_crac": True, "room_id": "hall-a"},
{"sensor_id": "leak-02", "floor_zone": "server-row-b1", "under_floor": True, "near_crac": False, "room_id": "hall-b"},
{"sensor_id": "leak-03", "floor_zone": "ups-room", "under_floor": False, "near_crac": False, "room_id": None},
]
}
@router.get("/status")
async def leak_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest state for all leak sensors, enriched with location metadata."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, value, recorded_at
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'leak'
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
# sensor_id format: {site_id}/leak/{sensor_id}
state_map: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
sid = parts[2]
state_map[sid] = {
"state": "detected" if float(row["value"]) > 0.5 else "clear",
"recorded_at": str(row["recorded_at"]),
}
out = []
for cfg in LEAK_SENSORS.get(site_id, []):
sid = cfg["sensor_id"]
entry = {**cfg}
if sid in state_map:
entry["state"] = state_map[sid]["state"]
entry["recorded_at"] = state_map[sid]["recorded_at"]
else:
entry["state"] = "unknown"
entry["recorded_at"] = None
out.append(entry)
return out

View file

@ -0,0 +1,77 @@
import uuid
from datetime import datetime, timezone
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
router = APIRouter()
# In-memory store (demo — resets on restart)
_windows: list[dict] = []
class WindowCreate(BaseModel):
site_id: str
title: str
target: str # "all", a room_id like "hall-a", or a rack_id like "rack-A01"
target_label: str # human-readable label
start_dt: str # ISO 8601
end_dt: str # ISO 8601
suppress_alarms: bool = True
notes: str = ""
def _window_status(w: dict) -> str:
now = datetime.now(timezone.utc).isoformat()
if w["end_dt"] < now:
return "expired"
if w["start_dt"] <= now:
return "active"
return "scheduled"
@router.get("")
async def list_windows(site_id: str = "sg-01"):
return [
{**w, "status": _window_status(w)}
for w in _windows
if w["site_id"] == site_id
]
@router.post("", status_code=201)
async def create_window(body: WindowCreate):
window = {
"id": str(uuid.uuid4())[:8],
"site_id": body.site_id,
"title": body.title,
"target": body.target,
"target_label": body.target_label,
"start_dt": body.start_dt,
"end_dt": body.end_dt,
"suppress_alarms": body.suppress_alarms,
"notes": body.notes,
"created_at": datetime.now(timezone.utc).isoformat(),
}
_windows.append(window)
return {**window, "status": _window_status(window)}
@router.delete("/{window_id}", status_code=204)
async def delete_window(window_id: str):
global _windows
before = len(_windows)
_windows = [w for w in _windows if w["id"] != window_id]
if len(_windows) == before:
raise HTTPException(status_code=404, detail="Window not found")
@router.get("/active")
async def active_windows(site_id: str = "sg-01"):
"""Returns only currently active windows — used by alarm page for suppression check."""
now = datetime.now(timezone.utc).isoformat()
return [
w for w in _windows
if w["site_id"] == site_id
and w["start_dt"] <= now <= w["end_dt"]
and w["suppress_alarms"]
]

View file

@ -0,0 +1,69 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
SWITCHES = {
"sg-01": [
{"switch_id": "sw-core-01", "name": "Core Switch — Hall A", "model": "Cisco Catalyst C9300-48P", "room_id": "hall-a", "rack_id": "SG1A01.01", "port_count": 48, "role": "core"},
{"switch_id": "sw-core-02", "name": "Core Switch — Hall B", "model": "Arista 7050CX3-32S", "room_id": "hall-b", "rack_id": "SG1B01.01", "port_count": 32, "role": "core"},
{"switch_id": "sw-edge-01", "name": "Edge / Uplink Switch", "model": "Juniper EX4300-48T", "room_id": "hall-a", "rack_id": "SG1A01.05", "port_count": 48, "role": "edge"},
]
}
NET_FIELD_MAP = {
"net_uptime_s": "uptime_s",
"net_active_ports": "active_ports",
"net_bw_in_mbps": "bandwidth_in_mbps",
"net_bw_out_mbps": "bandwidth_out_mbps",
"net_cpu_pct": "cpu_pct",
"net_mem_pct": "mem_pct",
"net_temp_c": "temperature_c",
"net_pkt_loss_pct": "packet_loss_pct",
}
STATE_MAP = {0.0: "up", 1.0: "degraded", 2.0: "down"}
@router.get("/status")
async def network_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest reading for each network switch."""
types_sql = ", ".join(f"'{t}'" for t in [*NET_FIELD_MAP.keys(), "net_state"])
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
sw_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
sw_id = parts[2]
if sw_id not in sw_data:
sw_data[sw_id] = {}
field = NET_FIELD_MAP.get(row["sensor_type"])
if field:
sw_data[sw_id][field] = round(float(row["value"]), 2)
elif row["sensor_type"] == "net_state":
v = round(float(row["value"]))
sw_data[sw_id]["state"] = STATE_MAP.get(v, "unknown")
out = []
for sw_cfg in SWITCHES.get(site_id, []):
sw_id = sw_cfg["switch_id"]
d = {**sw_cfg, **sw_data.get(sw_id, {})}
if "state" not in d:
d["state"] = "unknown"
out.append(d)
return out

460
backend/api/routes/power.py Normal file
View file

@ -0,0 +1,460 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
# Topology — mirrors simulator config
ROOMS = {
"sg-01": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)]},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)]},
]
}
ATS_UNITS = {"sg-01": ["ats-01"]}
GENERATORS = {"sg-01": ["gen-01"]}
ACTIVE_FEED_MAP = {0.0: "utility-a", 1.0: "utility-b", 2.0: "generator"}
# Singapore commercial electricity tariff (SGD / kWh, approximate)
TARIFF_SGD_KWH = 0.298
@router.get("/rack-breakdown")
async def rack_power_breakdown(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest kW reading per rack, grouped by room."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
rack_id, room_id, value AS power_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND rack_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
rows = result.mappings().all()
rack_map: dict[str, dict] = {r["rack_id"]: dict(r) for r in rows}
rooms = []
for room in ROOMS.get(site_id, []):
racks = []
for rack_id in room["racks"]:
reading = rack_map.get(rack_id)
racks.append({
"rack_id": rack_id,
"power_kw": round(float(reading["power_kw"]), 2) if reading else None,
})
rooms.append({"room_id": room["room_id"], "racks": racks})
return rooms
@router.get("/room-history")
async def room_power_history(
site_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Total power per room bucketed by 5 minutes — for a multi-line trend chart."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(SUM(avg_per_rack)::numeric, 1) AS total_kw
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(SUM(avg_per_rack)::numeric, 1) AS total_kw
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
return [dict(r) for r in result.mappings().all()]
@router.get("/ups")
async def ups_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest UPS readings."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('ups_charge', 'ups_load', 'ups_runtime', 'ups_state', 'ups_voltage')
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
rows = result.mappings().all()
# sensor_id format: sg-01/power/ups-01/charge_pct
ups_data: dict[str, dict] = {}
for row in rows:
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
ups_id = parts[2]
if ups_id not in ups_data:
ups_data[ups_id] = {"ups_id": ups_id}
key_map = {
"ups_charge": "charge_pct",
"ups_load": "load_pct",
"ups_runtime": "runtime_min",
"ups_state": "_state_raw",
"ups_voltage": "voltage_v",
}
field = key_map.get(row["sensor_type"])
if field:
ups_data[ups_id][field] = round(float(row["value"]), 1)
STATE_MAP = {0.0: "online", 1.0: "battery", 2.0: "overload"}
result_list = []
for ups_id, d in sorted(ups_data.items()):
# Use stored state if available; fall back to charge heuristic only if state never arrived
state_raw = d.get("_state_raw")
if state_raw is not None:
state = STATE_MAP.get(round(state_raw), "online")
else:
charge = d.get("charge_pct")
state = "battery" if (charge is not None and charge < 20.0) else "online"
result_list.append({
"ups_id": ups_id,
"state": state,
"charge_pct": d.get("charge_pct"),
"load_pct": d.get("load_pct"),
"runtime_min": d.get("runtime_min"),
"voltage_v": d.get("voltage_v"),
})
return result_list
@router.get("/ups/history")
async def ups_history(
site_id: str = Query(...),
ups_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""5-minute bucketed trend for a single UPS: charge, load, runtime, voltage."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
types_sql = "'ups_charge', 'ups_load', 'ups_runtime', 'ups_voltage'"
try:
result = await session.execute(text(f"""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/power/{ups_id}/%",
"from_time": from_time})
except Exception:
result = await session.execute(text(f"""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/power/{ups_id}/%",
"from_time": from_time})
KEY_MAP = {
"ups_charge": "charge_pct",
"ups_load": "load_pct",
"ups_runtime": "runtime_min",
"ups_voltage": "voltage_v",
}
buckets: dict[str, dict] = {}
for row in result.mappings().all():
b = row["bucket"].isoformat()
buckets.setdefault(b, {"bucket": b})
field = KEY_MAP.get(row["sensor_type"])
if field:
buckets[b][field] = float(row["avg_val"])
return list(buckets.values())
@router.get("/ats")
async def ats_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest ATS transfer switch readings."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('ats_active', 'ats_state', 'ats_xfer_count',
'ats_xfer_ms', 'ats_ua_v', 'ats_ub_v', 'ats_gen_v')
AND recorded_at > NOW() - INTERVAL '2 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
ats_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
# sensor_id: {site}/power/ats/{ats_id}/{key} → parts[3]
if len(parts) < 4:
continue
ats_id = parts[3]
if ats_id not in ats_data:
ats_data[ats_id] = {"ats_id": ats_id}
v = float(row["value"])
s_type = row["sensor_type"]
if s_type == "ats_active":
ats_data[ats_id]["active_feed"] = ACTIVE_FEED_MAP.get(round(v), "utility-a")
elif s_type == "ats_state":
ats_data[ats_id]["state"] = "transferring" if v > 0.5 else "stable"
elif s_type == "ats_xfer_count":
ats_data[ats_id]["transfer_count"] = int(v)
elif s_type == "ats_xfer_ms":
ats_data[ats_id]["last_transfer_ms"] = round(v, 0) if v > 0 else None
elif s_type == "ats_ua_v":
ats_data[ats_id]["utility_a_v"] = round(v, 1)
elif s_type == "ats_ub_v":
ats_data[ats_id]["utility_b_v"] = round(v, 1)
elif s_type == "ats_gen_v":
ats_data[ats_id]["generator_v"] = round(v, 1)
out = []
for ats_id in ATS_UNITS.get(site_id, []):
d = ats_data.get(ats_id, {"ats_id": ats_id})
d.setdefault("state", "stable")
d.setdefault("active_feed", "utility-a")
d.setdefault("transfer_count", 0)
d.setdefault("last_transfer_ms", None)
out.append(d)
return out
@router.get("/phase")
async def pdu_phase_breakdown(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Per-phase kW, amps, and imbalance % for every rack PDU."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
rack_id, room_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('pdu_phase_a_kw', 'pdu_phase_b_kw', 'pdu_phase_c_kw',
'pdu_phase_a_a', 'pdu_phase_b_a', 'pdu_phase_c_a',
'pdu_imbalance')
AND rack_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
FIELD_MAP = {
"pdu_phase_a_kw": "phase_a_kw",
"pdu_phase_b_kw": "phase_b_kw",
"pdu_phase_c_kw": "phase_c_kw",
"pdu_phase_a_a": "phase_a_a",
"pdu_phase_b_a": "phase_b_a",
"pdu_phase_c_a": "phase_c_a",
"pdu_imbalance": "imbalance_pct",
}
rack_map: dict[tuple, float] = {}
rack_rooms: dict[str, str] = {}
for row in result.mappings().all():
rack_id = row["rack_id"]
room_id = row["room_id"]
s_type = row["sensor_type"]
if rack_id:
rack_map[(rack_id, s_type)] = round(float(row["value"]), 2)
if room_id:
rack_rooms[rack_id] = room_id
rooms = []
for room in ROOMS.get(site_id, []):
racks = []
for rack_id in room["racks"]:
entry: dict = {"rack_id": rack_id, "room_id": room["room_id"]}
for s_type, field in FIELD_MAP.items():
entry[field] = rack_map.get((rack_id, s_type))
racks.append(entry)
rooms.append({"room_id": room["room_id"], "racks": racks})
return rooms
@router.get("/redundancy")
async def power_redundancy(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Compute power redundancy level: 2N, N+1, or N."""
# Count UPS units online
ups_result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'ups_charge'
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
ups_rows = ups_result.mappings().all()
ups_online = len([r for r in ups_rows if float(r["value"]) > 10.0])
ups_total = len(ups_rows)
# ATS active feed
ats_result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'ats_active'
AND recorded_at > NOW() - INTERVAL '2 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
ats_rows = ats_result.mappings().all()
ats_active_feed = None
if ats_rows:
ats_active_feed = ACTIVE_FEED_MAP.get(round(float(ats_rows[0]["value"])), "utility-a")
# Generator available (not fault)
gen_result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'gen_state'
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
gen_rows = gen_result.mappings().all()
gen_available = len([r for r in gen_rows if float(r["value"]) >= 0.0]) > 0
# Derive level
if ups_total >= 2 and ups_online >= 2 and gen_available:
level = "2N"
elif ups_online >= 1 and gen_available:
level = "N+1"
else:
level = "N"
return {
"site_id": site_id,
"level": level,
"ups_total": ups_total,
"ups_online": ups_online,
"generator_ok": gen_available,
"ats_active_feed": ats_active_feed,
"notes": (
"Dual UPS + generator = 2N" if level == "2N" else
"Single path active — reduced redundancy" if level == "N" else
"N+1 — one redundant path available"
),
}
@router.get("/utility")
async def utility_power(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Current total IT load and estimated monthly energy cost."""
# Latest total IT load
kw_result = await session.execute(text("""
SELECT ROUND(SUM(value)::numeric, 2) AS total_kw
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
"""), {"site_id": site_id})
kw_row = kw_result.mappings().first()
total_kw = float(kw_row["total_kw"] or 0) if kw_row else 0.0
# Estimated month-to-date kWh (from readings since start of month)
from_month = datetime.now(timezone.utc).replace(day=1, hour=0, minute=0, second=0, microsecond=0)
kwh_result = await session.execute(text("""
SELECT ROUND((SUM(value) * 5.0 / 60.0)::numeric, 1) AS kwh_mtd
FROM (
SELECT DISTINCT ON (sensor_id, date_trunc('minute', recorded_at))
sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_month
ORDER BY sensor_id, date_trunc('minute', recorded_at), recorded_at DESC
) bucketed
"""), {"site_id": site_id, "from_month": from_month})
kwh_row = kwh_result.mappings().first()
kwh_mtd = float(kwh_row["kwh_mtd"] or 0) if kwh_row else 0.0
cost_mtd = round(kwh_mtd * TARIFF_SGD_KWH, 2)
# Annualised from month-to-date pace
now = datetime.now(timezone.utc)
day_of_month = now.day
days_in_month = 30
if day_of_month > 0:
kwh_annual_est = round(kwh_mtd / day_of_month * 365, 0)
cost_annual_est = round(kwh_annual_est * TARIFF_SGD_KWH, 2)
else:
kwh_annual_est = 0.0
cost_annual_est = 0.0
return {
"site_id": site_id,
"total_kw": total_kw,
"tariff_sgd_kwh": TARIFF_SGD_KWH,
"kwh_month_to_date": kwh_mtd,
"cost_sgd_mtd": cost_mtd,
"kwh_annual_est": kwh_annual_est,
"cost_sgd_annual_est": cost_annual_est,
"currency": "SGD",
}

View file

@ -0,0 +1,229 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
@router.get("/latest")
async def get_latest_readings(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Most recent reading per sensor for a site (last 10 minutes)."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, site_id, room_id, rack_id, value, unit, recorded_at
FROM readings
WHERE site_id = :site_id
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
return [dict(r) for r in result.mappings().all()]
@router.get("/kpis")
async def get_site_kpis(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Aggregate KPIs for the overview dashboard."""
power = await session.execute(text("""
SELECT COALESCE(SUM(value), 0) AS total_power_kw
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, value
FROM readings
WHERE site_id = :site_id AND sensor_type = 'power_kw'
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
"""), {"site_id": site_id})
temp = await session.execute(text("""
SELECT COALESCE(AVG(value), 0) AS avg_temp
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, value
FROM readings
WHERE site_id = :site_id AND sensor_type = 'temperature'
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
"""), {"site_id": site_id})
alarms = await session.execute(text("""
SELECT COUNT(*) AS alarm_count
FROM alarms
WHERE site_id = :site_id AND state = 'active'
"""), {"site_id": site_id})
total_kw = float(power.mappings().one()["total_power_kw"])
avg_temp = float(temp.mappings().one()["avg_temp"])
alarm_cnt = int(alarms.mappings().one()["alarm_count"])
pue = round(total_kw / (total_kw * 0.87), 2) if total_kw > 0 else 0.0
return {
"total_power_kw": round(total_kw, 1),
"pue": pue,
"avg_temperature": round(avg_temp, 1),
"active_alarms": alarm_cnt,
}
@router.get("/site-power-history")
async def get_site_power_history(
site_id: str = Query(...),
hours: int = Query(1, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Total power (kW) bucketed by 5 minutes — for the power trend chart."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT bucket, ROUND(SUM(avg_per_sensor)::numeric, 1) AS total_kw
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_id,
AVG(value) AS avg_per_sensor
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY bucket, sensor_id
) per_sensor
GROUP BY bucket
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT bucket, ROUND(SUM(avg_per_sensor)::numeric, 1) AS total_kw
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_id,
AVG(value) AS avg_per_sensor
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY bucket, sensor_id
) per_sensor
GROUP BY bucket
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
return [dict(r) for r in result.mappings().all()]
@router.get("/room-temp-history")
async def get_room_temp_history(
site_id: str = Query(...),
hours: int = Query(1, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Average temperature per room bucketed by 5 minutes — for the temp trend chart."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 2) AS avg_temp
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 2) AS avg_temp
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
return [dict(r) for r in result.mappings().all()]
@router.get("/room-status")
async def get_room_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Current per-room summary: avg temp, total power, rack count, alarm count."""
temp = await session.execute(text("""
SELECT room_id, ROUND(AVG(value)::numeric, 1) AS avg_temp
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, room_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND room_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
GROUP BY room_id
"""), {"site_id": site_id})
power = await session.execute(text("""
SELECT room_id, ROUND(SUM(value)::numeric, 1) AS total_kw
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, room_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
GROUP BY room_id
"""), {"site_id": site_id})
alarm_counts = await session.execute(text("""
SELECT room_id, COUNT(*) AS alarm_count, MAX(severity) AS worst_severity
FROM alarms
WHERE site_id = :site_id AND state = 'active' AND room_id IS NOT NULL
GROUP BY room_id
"""), {"site_id": site_id})
temp_map = {r["room_id"]: float(r["avg_temp"]) for r in temp.mappings().all()}
power_map = {r["room_id"]: float(r["total_kw"]) for r in power.mappings().all()}
alarm_map = {r["room_id"]: (int(r["alarm_count"]), r["worst_severity"])
for r in alarm_counts.mappings().all()}
rooms = sorted(set(list(temp_map.keys()) + list(power_map.keys())))
result = []
for room_id in rooms:
avg_temp = temp_map.get(room_id, 0.0)
alarm_cnt, ws = alarm_map.get(room_id, (0, None))
status = "ok"
if ws == "critical" or avg_temp >= 30:
status = "critical"
elif ws == "warning" or avg_temp >= 26:
status = "warning"
result.append({
"room_id": room_id,
"avg_temp": avg_temp,
"total_kw": power_map.get(room_id, 0.0),
"alarm_count": alarm_cnt,
"status": status,
})
return result

View file

@ -0,0 +1,356 @@
import csv
import io
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from fastapi.responses import StreamingResponse
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
TARIFF_SGD_KWH = 0.298
ROOMS = {
"sg-01": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
]
}
UPS_IDS = {"sg-01": ["ups-01", "ups-02"]}
@router.get("/energy")
async def energy_report(
site_id: str = Query(...),
days: int = Query(30, ge=1, le=90),
session: AsyncSession = Depends(get_session),
):
"""kWh consumption, cost, and 30-day PUE trend."""
from_time = datetime.now(timezone.utc) - timedelta(days=days)
# Total kWh over period (5-min buckets × kW / 12 = kWh per bucket)
try:
kwh_result = await session.execute(text("""
SELECT ROUND((SUM(avg_kw) / 12.0)::numeric, 1) AS kwh_total
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
AVG(value) AS avg_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY bucket
) bucketed
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
kwh_result = await session.execute(text("""
SELECT ROUND((SUM(avg_kw) / 12.0)::numeric, 1) AS kwh_total
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
AVG(value) AS avg_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY bucket
) bucketed
"""), {"site_id": site_id, "from_time": from_time})
kwh_row = kwh_result.mappings().first()
kwh_total = float(kwh_row["kwh_total"] or 0) if kwh_row else 0.0
cost_sgd = round(kwh_total * TARIFF_SGD_KWH, 2)
# PUE daily average (IT load / total facility load — approximated as IT load / 0.85 overhead)
# Since we only have IT load, estimate PUE = total_facility / it_load ≈ 1.41.6
# For a proper PUE we'd need facility meter — use a day-by-day IT load trend instead
try:
pue_result = await session.execute(text("""
SELECT
time_bucket('1 day', recorded_at) AS day,
ROUND(AVG(value)::numeric, 2) AS avg_it_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY day
ORDER BY day ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
pue_result = await session.execute(text("""
SELECT
date_trunc('day', recorded_at) AS day,
ROUND(AVG(value)::numeric, 2) AS avg_it_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY day
ORDER BY day ASC
"""), {"site_id": site_id, "from_time": from_time})
# Estimated PUE: assume ~40% overhead (cooling + lighting + UPS losses)
OVERHEAD_FACTOR = 1.40
pue_trend = [
{
"day": str(r["day"]),
"avg_it_kw": float(r["avg_it_kw"]),
"pue_est": round(OVERHEAD_FACTOR, 2),
}
for r in pue_result.mappings().all()
]
return {
"site_id": site_id,
"period_days": days,
"from_date": from_time.date().isoformat(),
"to_date": datetime.now(timezone.utc).date().isoformat(),
"kwh_total": kwh_total,
"cost_sgd": cost_sgd,
"tariff_sgd_kwh": TARIFF_SGD_KWH,
"currency": "SGD",
"pue_estimated": OVERHEAD_FACTOR,
"pue_trend": pue_trend,
}
@router.get("/summary")
async def site_summary(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Site-level summary: KPIs, alarm stats, CRAC uptime%, UPS uptime%."""
# KPIs
kpi_res = await session.execute(text("""
SELECT
ROUND(SUM(CASE WHEN sensor_type = 'power_kw' THEN value END)::numeric, 2) AS total_power_kw,
ROUND(AVG(CASE WHEN sensor_type = 'temperature' THEN value END)::numeric, 1) AS avg_temperature
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('power_kw', 'temperature')
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
"""), {"site_id": site_id})
kpi_row = kpi_res.mappings().first() or {}
# Alarm stats (all-time by state/severity)
alarm_res = await session.execute(text("""
SELECT state, severity, COUNT(*) AS cnt
FROM alarms
WHERE site_id = :site_id
GROUP BY state, severity
"""), {"site_id": site_id})
alarm_stats: dict = {"active": 0, "acknowledged": 0, "resolved": 0, "critical": 0, "warning": 0}
for row in alarm_res.mappings().all():
if row["state"] in alarm_stats:
alarm_stats[row["state"]] += int(row["cnt"])
if row["severity"] in ("critical", "warning"):
alarm_stats[row["severity"]] += int(row["cnt"])
# CRAC uptime % over last 24h
from_24h = datetime.now(timezone.utc) - timedelta(hours=24)
total_buckets = 24 * 12 # one 5-min bucket per 5 minutes
cracs = []
for room in ROOMS.get(site_id, []):
crac_id = room["crac_id"]
try:
r = await session.execute(text("""
SELECT COUNT(DISTINCT time_bucket('5 minutes', recorded_at)) AS buckets
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type = 'cooling_supply'
AND recorded_at > :from_time
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_24h})
except Exception:
r = await session.execute(text("""
SELECT COUNT(DISTINCT date_trunc('minute', recorded_at)) AS buckets
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type = 'cooling_supply'
AND recorded_at > :from_time
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_24h})
row = r.mappings().first()
buckets = int(row["buckets"]) if row and row["buckets"] else 0
cracs.append({
"crac_id": crac_id,
"room_id": room["room_id"],
"uptime_pct": round(min(100.0, buckets / total_buckets * 100), 1),
})
# UPS uptime % over last 24h
ups_units = []
for ups_id in UPS_IDS.get(site_id, []):
try:
r = await session.execute(text("""
SELECT COUNT(DISTINCT time_bucket('5 minutes', recorded_at)) AS buckets
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type = 'ups_charge'
AND recorded_at > :from_time
"""), {"site_id": site_id, "pattern": f"{site_id}/ups/{ups_id}/%", "from_time": from_24h})
except Exception:
r = await session.execute(text("""
SELECT COUNT(DISTINCT date_trunc('minute', recorded_at)) AS buckets
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type = 'ups_charge'
AND recorded_at > :from_time
"""), {"site_id": site_id, "pattern": f"{site_id}/ups/{ups_id}/%", "from_time": from_24h})
row = r.mappings().first()
buckets = int(row["buckets"]) if row and row["buckets"] else 0
ups_units.append({
"ups_id": ups_id,
"uptime_pct": round(min(100.0, buckets / total_buckets * 100), 1),
})
return {
"site_id": site_id,
"generated_at": datetime.now(timezone.utc).isoformat(),
"kpis": {
"total_power_kw": float(kpi_row.get("total_power_kw") or 0),
"avg_temperature": float(kpi_row.get("avg_temperature") or 0),
},
"alarm_stats": alarm_stats,
"crac_uptime": cracs,
"ups_uptime": ups_units,
}
@router.get("/export/power")
async def export_power(
site_id: str = Query(...),
hours: int = Query(24, ge=1, le=168),
session: AsyncSession = Depends(get_session),
):
"""Download power history as CSV."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
room_id,
ROUND(SUM(value)::numeric, 2) AS total_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT
date_trunc('minute', recorded_at) AS bucket,
room_id,
ROUND(SUM(value)::numeric, 2) AS total_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
output = io.StringIO()
writer = csv.writer(output)
writer.writerow(["timestamp", "room_id", "total_kw"])
for row in result.mappings().all():
writer.writerow([row["bucket"], row["room_id"], row["total_kw"]])
output.seek(0)
return StreamingResponse(
iter([output.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename=power_{site_id}_{hours}h.csv"},
)
@router.get("/export/temperature")
async def export_temperature(
site_id: str = Query(...),
hours: int = Query(24, ge=1, le=168),
session: AsyncSession = Depends(get_session),
):
"""Download temperature history per rack as CSV."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
rack_id, room_id,
ROUND(AVG(value)::numeric, 1) AS avg_temp
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND rack_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, rack_id, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT
date_trunc('minute', recorded_at) AS bucket,
rack_id, room_id,
ROUND(AVG(value)::numeric, 1) AS avg_temp
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND rack_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, rack_id, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
output = io.StringIO()
writer = csv.writer(output)
writer.writerow(["timestamp", "room_id", "rack_id", "avg_temp_c"])
for row in result.mappings().all():
writer.writerow([row["bucket"], row["room_id"], row["rack_id"], row["avg_temp"]])
output.seek(0)
return StreamingResponse(
iter([output.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename=temperature_{site_id}_{hours}h.csv"},
)
@router.get("/export/alarms")
async def export_alarms(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Download full alarm log as CSV."""
result = await session.execute(text("""
SELECT id, severity, message, state, room_id, rack_id, triggered_at
FROM alarms
WHERE site_id = :site_id
ORDER BY triggered_at DESC
"""), {"site_id": site_id})
output = io.StringIO()
writer = csv.writer(output)
writer.writerow(["id", "severity", "message", "state", "room_id", "rack_id", "triggered_at"])
for row in result.mappings().all():
writer.writerow([
row["id"], row["severity"], row["message"], row["state"],
row["room_id"], row["rack_id"], row["triggered_at"],
])
output.seek(0)
return StreamingResponse(
iter([output.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename=alarms_{site_id}.csv"},
)

View file

@ -0,0 +1,248 @@
"""
Scenario control API proxies trigger/reset commands to the MQTT broker
so the frontend can fire simulator scenarios over HTTP.
"""
import json
import asyncio
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Optional
import aiomqtt
from core.config import settings
router = APIRouter()
# ── Scenario catalogue ───────────────────────────────────────────────────────
# Mirrors the definitions in simulators/scenarios/runner.py and compound.py.
# Kept here so the frontend has a single typed source of truth.
SCENARIOS = [
# ── Compound (multi-bot, time-sequenced) ─────────────────────────────────
{
"name": "HOT_NIGHT",
"label": "Hot Night",
"description": "CRAC-01 compressor trips silently. The backup unit overworks itself. Rack temps climb, power draw rises, VESDA alert fires.",
"duration": "~10 min",
"compound": True,
"default_target": None,
"targets": [],
},
{
"name": "GENERATOR_TEST_GONE_WRONG",
"label": "Generator Test Gone Wrong",
"description": "Planned ATS transfer to generator. Generator was low on fuel and faults after 15 min. UPS must carry full site load alone.",
"duration": "~16 min",
"compound": True,
"default_target": None,
"targets": [],
},
{
"name": "SLOW_BURN",
"label": "Slow Burn",
"description": "A dirty filter nobody noticed. Airflow degrades for 30 min. Temps creep, humidity climbs, VESDA alerts, then CRAC trips on thermal protection.",
"duration": "~30 min",
"compound": True,
"default_target": None,
"targets": [],
},
{
"name": "LAST_RESORT",
"label": "Last Resort",
"description": "Utility fails. Generator starts then faults after 2 minutes. UPS absorbs the full load, overheats, and VESDA escalates to fire.",
"duration": "~9 min",
"compound": True,
"default_target": None,
"targets": [],
},
# ── Cooling ──────────────────────────────────────────────────────────────
{
"name": "COOLING_FAILURE",
"label": "Cooling Failure",
"description": "CRAC unit goes offline — rack temperatures rise rapidly.",
"duration": "ongoing",
"compound": False,
"default_target": "crac-01",
"targets": ["crac-01", "crac-02"],
},
{
"name": "FAN_DEGRADATION",
"label": "Fan Degradation",
"description": "CRAC fan bearing wear — fan speed drops, ΔT rises over ~25 min.",
"duration": "~25 min",
"compound": False,
"default_target": "crac-01",
"targets": ["crac-01", "crac-02"],
},
{
"name": "COMPRESSOR_FAULT",
"label": "Compressor Fault",
"description": "Compressor trips — unit drops to fan-only, cooling capacity collapses to ~8%.",
"duration": "ongoing",
"compound": False,
"default_target": "crac-01",
"targets": ["crac-01", "crac-02"],
},
{
"name": "DIRTY_FILTER",
"label": "Dirty Filter",
"description": "Filter fouling — ΔP rises, airflow and capacity degrade over time.",
"duration": "ongoing",
"compound": False,
"default_target": "crac-01",
"targets": ["crac-01", "crac-02"],
},
{
"name": "HIGH_TEMPERATURE",
"label": "High Temperature",
"description": "Gradual ambient heat rise — slower than a full cooling failure.",
"duration": "ongoing",
"compound": False,
"default_target": "hall-a",
"targets": ["hall-a", "hall-b"],
},
{
"name": "HUMIDITY_SPIKE",
"label": "Humidity Spike",
"description": "Humidity climbs — condensation / humidifier fault risk.",
"duration": "ongoing",
"compound": False,
"default_target": "hall-a",
"targets": ["hall-a", "hall-b"],
},
{
"name": "CHILLER_FAULT",
"label": "Chiller Fault",
"description": "Chiller plant trips — chilled water supply lost.",
"duration": "ongoing",
"compound": False,
"default_target": "chiller-01",
"targets": ["chiller-01"],
},
# ── Power ────────────────────────────────────────────────────────────────
{
"name": "UPS_MAINS_FAILURE",
"label": "UPS Mains Failure",
"description": "Mains power lost — UPS switches to battery and drains.",
"duration": "~60 min",
"compound": False,
"default_target": "ups-01",
"targets": ["ups-01", "ups-02"],
},
{
"name": "POWER_SPIKE",
"label": "Power Spike",
"description": "PDU load surges across a room by up to 50%.",
"duration": "ongoing",
"compound": False,
"default_target": "hall-a",
"targets": ["hall-a", "hall-b"],
},
{
"name": "RACK_OVERLOAD",
"label": "Rack Overload",
"description": "Single rack redlines at ~8595% of rated 10 kW capacity.",
"duration": "ongoing",
"compound": False,
"default_target": "SG1A01.10",
"targets": ["SG1A01.10", "SG1B01.10"],
},
{
"name": "PHASE_IMBALANCE",
"label": "Phase Imbalance",
"description": "PDU phase A overloads, phase C drops — imbalance flag triggers.",
"duration": "ongoing",
"compound": False,
"default_target": "SG1A01.10/pdu",
"targets": ["SG1A01.10/pdu", "SG1B01.10/pdu"],
},
{
"name": "ATS_TRANSFER",
"label": "ATS Transfer",
"description": "Utility feed lost — ATS transfers load to generator.",
"duration": "ongoing",
"compound": False,
"default_target": "ats-01",
"targets": ["ats-01"],
},
{
"name": "GENERATOR_FAILURE",
"label": "Generator Running",
"description": "Generator starts and runs under load following a utility failure.",
"duration": "ongoing",
"compound": False,
"default_target": "gen-01",
"targets": ["gen-01"],
},
{
"name": "GENERATOR_LOW_FUEL",
"label": "Generator Low Fuel",
"description": "Generator fuel level drains to critical low.",
"duration": "ongoing",
"compound": False,
"default_target": "gen-01",
"targets": ["gen-01"],
},
{
"name": "GENERATOR_FAULT",
"label": "Generator Fault",
"description": "Generator fails — fault state, no output.",
"duration": "ongoing",
"compound": False,
"default_target": "gen-01",
"targets": ["gen-01"],
},
# ── Environmental / Life Safety ──────────────────────────────────────────
{
"name": "LEAK_DETECTED",
"label": "Leak Detected",
"description": "Water leak sensor triggers a critical alarm.",
"duration": "ongoing",
"compound": False,
"default_target": "leak-01",
"targets": ["leak-01", "leak-02", "leak-03"],
},
{
"name": "VESDA_ALERT",
"label": "VESDA Alert",
"description": "Smoke obscuration rises into the Alert/Action band.",
"duration": "ongoing",
"compound": False,
"default_target": "vesda-hall-a",
"targets": ["vesda-hall-a", "vesda-hall-b"],
},
{
"name": "VESDA_FIRE",
"label": "VESDA Fire",
"description": "Smoke obscuration escalates to Fire level.",
"duration": "ongoing",
"compound": False,
"default_target": "vesda-hall-a",
"targets": ["vesda-hall-a", "vesda-hall-b"],
},
]
# ── Request / Response models ────────────────────────────────────────────────
class TriggerRequest(BaseModel):
scenario: str
target: Optional[str] = None
# ── Endpoints ────────────────────────────────────────────────────────────────
@router.get("")
async def list_scenarios():
return SCENARIOS
@router.post("/trigger")
async def trigger_scenario(body: TriggerRequest):
payload = json.dumps({"scenario": body.scenario, "target": body.target})
try:
async with aiomqtt.Client(settings.MQTT_HOST, port=settings.MQTT_PORT) as client:
await client.publish("bms/control/scenario", payload, qos=1)
except Exception as e:
raise HTTPException(status_code=503, detail=f"MQTT unavailable: {e}")
return {"ok": True, "scenario": body.scenario, "target": body.target}

View file

@ -0,0 +1,465 @@
import json
import logging
from typing import Any
from fastapi import APIRouter, Depends, HTTPException, Query
from pydantic import BaseModel
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
from services.alarm_engine import invalidate_threshold_cache
from services.seed import THRESHOLD_SEED_DATA, DEFAULT_SETTINGS, SITE_ID as DEFAULT_SITE
router = APIRouter()
logger = logging.getLogger(__name__)
# ── Pydantic models ────────────────────────────────────────────────────────────
class SensorCreate(BaseModel):
device_id: str
name: str
device_type: str
room_id: str | None = None
rack_id: str | None = None
protocol: str = "mqtt"
protocol_config: dict[str, Any] = {}
enabled: bool = True
class SensorUpdate(BaseModel):
name: str | None = None
device_type: str | None = None
room_id: str | None = None
rack_id: str | None = None
protocol: str | None = None
protocol_config: dict[str, Any] | None = None
enabled: bool | None = None
class ThresholdUpdate(BaseModel):
threshold_value: float | None = None
severity: str | None = None
enabled: bool | None = None
class ThresholdCreate(BaseModel):
sensor_type: str
threshold_value: float
direction: str
severity: str
message_template: str
class SettingsUpdate(BaseModel):
value: dict[str, Any]
# ── Sensors ────────────────────────────────────────────────────────────────────
@router.get("/sensors")
async def list_sensors(
site_id: str = Query(DEFAULT_SITE),
device_type: str | None = Query(None),
room_id: str | None = Query(None),
protocol: str | None = Query(None),
session: AsyncSession = Depends(get_session),
):
"""List all sensor devices, with optional filters."""
conditions = ["site_id = :site_id"]
params: dict = {"site_id": site_id}
if device_type:
conditions.append("device_type = :device_type")
params["device_type"] = device_type
if room_id:
conditions.append("room_id = :room_id")
params["room_id"] = room_id
if protocol:
conditions.append("protocol = :protocol")
params["protocol"] = protocol
where = " AND ".join(conditions)
result = await session.execute(text(f"""
SELECT id, site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled, created_at, updated_at
FROM sensors
WHERE {where}
ORDER BY device_type, room_id NULLS LAST, device_id
"""), params)
return [dict(r) for r in result.mappings().all()]
@router.post("/sensors", status_code=201)
async def create_sensor(
body: SensorCreate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
"""Register a new sensor device."""
result = await session.execute(text("""
INSERT INTO sensors
(site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled)
VALUES
(:site_id, :device_id, :name, :device_type, :room_id, :rack_id,
:protocol, :protocol_config, :enabled)
RETURNING id, site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled, created_at, updated_at
"""), {
"site_id": site_id,
"device_id": body.device_id,
"name": body.name,
"device_type": body.device_type,
"room_id": body.room_id,
"rack_id": body.rack_id,
"protocol": body.protocol,
"protocol_config": json.dumps(body.protocol_config),
"enabled": body.enabled,
})
await session.commit()
return dict(result.mappings().first())
@router.get("/sensors/{sensor_id}")
async def get_sensor(
sensor_id: int,
session: AsyncSession = Depends(get_session),
):
"""Get a single sensor device plus its most recent readings."""
result = await session.execute(text("""
SELECT id, site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled, created_at, updated_at
FROM sensors WHERE id = :id
"""), {"id": sensor_id})
row = result.mappings().first()
if not row:
raise HTTPException(status_code=404, detail="Sensor not found")
sensor = dict(row)
# Fetch latest readings for this device
readings_result = await session.execute(text("""
SELECT DISTINCT ON (sensor_type)
sensor_type, value, unit, recorded_at
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_type, recorded_at DESC
"""), {
"site_id": sensor["site_id"],
"pattern": f"{sensor['site_id']}%{sensor['device_id']}%",
})
sensor["recent_readings"] = [dict(r) for r in readings_result.mappings().all()]
return sensor
@router.put("/sensors/{sensor_id}")
async def update_sensor(
sensor_id: int,
body: SensorUpdate,
session: AsyncSession = Depends(get_session),
):
"""Update a sensor device's config or toggle enabled."""
updates = []
params: dict = {"id": sensor_id}
if body.name is not None:
updates.append("name = :name")
params["name"] = body.name
if body.device_type is not None:
updates.append("device_type = :device_type")
params["device_type"] = body.device_type
if body.room_id is not None:
updates.append("room_id = :room_id")
params["room_id"] = body.room_id
if body.rack_id is not None:
updates.append("rack_id = :rack_id")
params["rack_id"] = body.rack_id
if body.protocol is not None:
updates.append("protocol = :protocol")
params["protocol"] = body.protocol
if body.protocol_config is not None:
updates.append("protocol_config = :protocol_config")
params["protocol_config"] = json.dumps(body.protocol_config)
if body.enabled is not None:
updates.append("enabled = :enabled")
params["enabled"] = body.enabled
if not updates:
raise HTTPException(status_code=400, detail="No fields to update")
updates.append("updated_at = NOW()")
set_clause = ", ".join(updates)
result = await session.execute(text(f"""
UPDATE sensors SET {set_clause}
WHERE id = :id
RETURNING id, site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled, created_at, updated_at
"""), params)
row = result.mappings().first()
if not row:
raise HTTPException(status_code=404, detail="Sensor not found")
await session.commit()
return dict(row)
@router.delete("/sensors/{sensor_id}", status_code=204)
async def delete_sensor(
sensor_id: int,
session: AsyncSession = Depends(get_session),
):
"""Remove a sensor device from the registry."""
result = await session.execute(
text("DELETE FROM sensors WHERE id = :id RETURNING id"),
{"id": sensor_id},
)
if not result.fetchone():
raise HTTPException(status_code=404, detail="Sensor not found")
await session.commit()
# ── Alarm thresholds ───────────────────────────────────────────────────────────
@router.get("/thresholds")
async def list_thresholds(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
"""Return all user-editable threshold rules (locked=false)."""
result = await session.execute(text("""
SELECT id, site_id, sensor_type, threshold_value, direction,
severity, message_template, enabled, locked, created_at, updated_at
FROM alarm_thresholds
WHERE site_id = :site_id AND locked = false
ORDER BY id
"""), {"site_id": site_id})
return [dict(r) for r in result.mappings().all()]
@router.put("/thresholds/{threshold_id}")
async def update_threshold(
threshold_id: int,
body: ThresholdUpdate,
session: AsyncSession = Depends(get_session),
):
"""Update a threshold value, severity, or enabled state."""
# Refuse to update locked rules
locked_result = await session.execute(
text("SELECT locked, site_id FROM alarm_thresholds WHERE id = :id"),
{"id": threshold_id},
)
row = locked_result.mappings().first()
if not row:
raise HTTPException(status_code=404, detail="Threshold not found")
if row["locked"]:
raise HTTPException(status_code=403, detail="Cannot modify locked threshold")
updates = []
params: dict = {"id": threshold_id}
if body.threshold_value is not None:
updates.append("threshold_value = :threshold_value")
params["threshold_value"] = body.threshold_value
if body.severity is not None:
if body.severity not in ("warning", "critical"):
raise HTTPException(status_code=400, detail="severity must be warning or critical")
updates.append("severity = :severity")
params["severity"] = body.severity
if body.enabled is not None:
updates.append("enabled = :enabled")
params["enabled"] = body.enabled
if not updates:
raise HTTPException(status_code=400, detail="No fields to update")
updates.append("updated_at = NOW()")
set_clause = ", ".join(updates)
result = await session.execute(text(f"""
UPDATE alarm_thresholds SET {set_clause}
WHERE id = :id
RETURNING id, site_id, sensor_type, threshold_value, direction,
severity, message_template, enabled, locked, updated_at
"""), params)
await session.commit()
invalidate_threshold_cache(row["site_id"])
return dict(result.mappings().first())
@router.post("/thresholds", status_code=201)
async def create_threshold(
body: ThresholdCreate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
"""Add a custom threshold rule."""
if body.direction not in ("above", "below"):
raise HTTPException(status_code=400, detail="direction must be above or below")
if body.severity not in ("warning", "critical"):
raise HTTPException(status_code=400, detail="severity must be warning or critical")
result = await session.execute(text("""
INSERT INTO alarm_thresholds
(site_id, sensor_type, threshold_value, direction, severity, message_template, enabled, locked)
VALUES
(:site_id, :sensor_type, :threshold_value, :direction, :severity, :message_template, true, false)
RETURNING id, site_id, sensor_type, threshold_value, direction,
severity, message_template, enabled, locked, created_at, updated_at
"""), {
"site_id": site_id,
"sensor_type": body.sensor_type,
"threshold_value": body.threshold_value,
"direction": body.direction,
"severity": body.severity,
"message_template": body.message_template,
})
await session.commit()
invalidate_threshold_cache(site_id)
return dict(result.mappings().first())
@router.delete("/thresholds/{threshold_id}", status_code=204)
async def delete_threshold(
threshold_id: int,
session: AsyncSession = Depends(get_session),
):
"""Delete a custom (non-locked) threshold rule."""
locked_result = await session.execute(
text("SELECT locked, site_id FROM alarm_thresholds WHERE id = :id"),
{"id": threshold_id},
)
row = locked_result.mappings().first()
if not row:
raise HTTPException(status_code=404, detail="Threshold not found")
if row["locked"]:
raise HTTPException(status_code=403, detail="Cannot delete locked threshold")
await session.execute(
text("DELETE FROM alarm_thresholds WHERE id = :id"),
{"id": threshold_id},
)
await session.commit()
invalidate_threshold_cache(row["site_id"])
@router.post("/thresholds/reset")
async def reset_thresholds(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
"""Delete all thresholds for a site and re-seed from defaults."""
await session.execute(
text("DELETE FROM alarm_thresholds WHERE site_id = :site_id"),
{"site_id": site_id},
)
for st, tv, direction, severity, msg, locked in THRESHOLD_SEED_DATA:
await session.execute(text("""
INSERT INTO alarm_thresholds
(site_id, sensor_type, threshold_value, direction, severity, message_template, enabled, locked)
VALUES
(:site_id, :sensor_type, :threshold_value, :direction, :severity, :message_template, true, :locked)
"""), {
"site_id": site_id, "sensor_type": st, "threshold_value": tv,
"direction": direction, "severity": severity,
"message_template": msg, "locked": locked,
})
await session.commit()
invalidate_threshold_cache(site_id)
logger.info(f"Alarm thresholds reset to defaults for {site_id}")
return {"ok": True, "count": len(THRESHOLD_SEED_DATA)}
# ── Generic settings (site / notifications / integrations / page_prefs) ────────
async def _get_settings(session: AsyncSession, site_id: str, category: str) -> dict:
result = await session.execute(text("""
SELECT value FROM site_settings
WHERE site_id = :site_id AND category = :category AND key = 'config'
"""), {"site_id": site_id, "category": category})
row = result.mappings().first()
if row:
return row["value"] if isinstance(row["value"], dict) else json.loads(row["value"])
return DEFAULT_SETTINGS.get(category, {})
async def _put_settings(
session: AsyncSession, site_id: str, category: str, updates: dict
) -> dict:
current = await _get_settings(session, site_id, category)
merged = {**current, **updates}
await session.execute(text("""
INSERT INTO site_settings (site_id, category, key, value, updated_at)
VALUES (:site_id, :category, 'config', :value, NOW())
ON CONFLICT (site_id, category, key)
DO UPDATE SET value = :value, updated_at = NOW()
"""), {"site_id": site_id, "category": category, "value": json.dumps(merged)})
await session.commit()
return merged
@router.get("/site")
async def get_site_settings(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _get_settings(session, site_id, "site")
@router.put("/site")
async def update_site_settings(
body: SettingsUpdate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _put_settings(session, site_id, "site", body.value)
@router.get("/notifications")
async def get_notifications(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _get_settings(session, site_id, "notifications")
@router.put("/notifications")
async def update_notifications(
body: SettingsUpdate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _put_settings(session, site_id, "notifications", body.value)
@router.get("/integrations")
async def get_integrations(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _get_settings(session, site_id, "integrations")
@router.put("/integrations")
async def update_integrations(
body: SettingsUpdate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _put_settings(session, site_id, "integrations", body.value)
@router.get("/page-prefs")
async def get_page_prefs(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _get_settings(session, site_id, "page_prefs")
@router.put("/page-prefs")
async def update_page_prefs(
body: SettingsUpdate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _put_settings(session, site_id, "page_prefs", body.value)

View file

@ -0,0 +1,36 @@
from fastapi import APIRouter
from pydantic import BaseModel
router = APIRouter()
class Site(BaseModel):
id: str
name: str
location: str
status: str
rack_count: int
total_power_kw: float
pue: float
# Static stub data — will be replaced by DB queries in Phase 2
SITES: list[Site] = [
Site(id="sg-01", name="Singapore DC01", location="Singapore", status="ok", rack_count=128, total_power_kw=847.0, pue=1.42),
Site(id="sg-02", name="Singapore DC02", location="Singapore", status="warning", rack_count=64, total_power_kw=412.0, pue=1.51),
Site(id="lon-01", name="London DC01", location="London", status="ok", rack_count=96, total_power_kw=631.0, pue=1.38),
]
@router.get("", response_model=list[Site])
async def list_sites():
return SITES
@router.get("/{site_id}", response_model=Site)
async def get_site(site_id: str):
for site in SITES:
if site.id == site_id:
return site
from fastapi import HTTPException
raise HTTPException(status_code=404, detail="Site not found")

16
backend/api/routes/ws.py Normal file
View file

@ -0,0 +1,16 @@
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from services.ws_manager import manager
router = APIRouter()
@router.websocket("/ws")
async def websocket_endpoint(ws: WebSocket):
await manager.connect(ws)
try:
while True:
# We only push from server → client.
# receive_text() keeps the connection alive.
await ws.receive_text()
except WebSocketDisconnect:
manager.disconnect(ws)

0
backend/core/__init__.py Normal file
View file

27
backend/core/config.py Normal file
View file

@ -0,0 +1,27 @@
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
model_config = SettingsConfigDict(env_file=".env", extra="ignore")
# App
APP_NAME: str = "DemoBMS API"
DEBUG: bool = False
# Database
DATABASE_URL: str = "postgresql+asyncpg://dcim:dcim_pass@db:5432/dcim"
# MQTT broker
MQTT_HOST: str = "localhost"
MQTT_PORT: int = 1883
# CORS
CORS_ORIGINS: list[str] = []
# Clerk
CLERK_PUBLISHABLE_KEY: str = ""
CLERK_SECRET_KEY: str = ""
CLERK_JWKS_URL: str = ""
settings = Settings()

130
backend/core/database.py Normal file
View file

@ -0,0 +1,130 @@
import logging
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy import text
from core.config import settings
logger = logging.getLogger(__name__)
engine = create_async_engine(settings.DATABASE_URL, echo=False, pool_size=10, max_overflow=20)
AsyncSessionLocal = async_sessionmaker(engine, expire_on_commit=False)
async def init_db() -> None:
async with engine.begin() as conn:
# Enable TimescaleDB
await conn.execute(text("CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE"))
# Sensor readings — core time-series table
await conn.execute(text("""
CREATE TABLE IF NOT EXISTS readings (
recorded_at TIMESTAMPTZ NOT NULL,
sensor_id VARCHAR(120) NOT NULL,
sensor_type VARCHAR(50) NOT NULL,
site_id VARCHAR(50) NOT NULL,
room_id VARCHAR(50),
rack_id VARCHAR(50),
value DOUBLE PRECISION NOT NULL,
unit VARCHAR(20)
)
"""))
# Convert to hypertable — no-op if already one
try:
await conn.execute(text(
"SELECT create_hypertable('readings', by_range('recorded_at'), if_not_exists => TRUE)"
))
except Exception:
try:
await conn.execute(text(
"SELECT create_hypertable('readings', 'recorded_at', if_not_exists => TRUE)"
))
except Exception as e:
logger.warning(f"Hypertable setup skipped (table still works): {e}")
await conn.execute(text("""
CREATE INDEX IF NOT EXISTS idx_readings_sensor_time
ON readings (sensor_id, recorded_at DESC)
"""))
# Alarms table
await conn.execute(text("""
CREATE TABLE IF NOT EXISTS alarms (
id BIGSERIAL PRIMARY KEY,
sensor_id VARCHAR(120),
site_id VARCHAR(50),
room_id VARCHAR(50),
rack_id VARCHAR(50),
severity VARCHAR(20) NOT NULL,
message TEXT NOT NULL,
state VARCHAR(20) NOT NULL DEFAULT 'active',
triggered_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
acknowledged_at TIMESTAMPTZ,
resolved_at TIMESTAMPTZ
)
"""))
# Site config — generic key/value JSON store (used for floor layout etc.)
await conn.execute(text("""
CREATE TABLE IF NOT EXISTS site_config (
site_id VARCHAR(50) NOT NULL,
key VARCHAR(100) NOT NULL,
value JSONB NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
PRIMARY KEY (site_id, key)
)
"""))
# Sensor device registry
await conn.execute(text("""
CREATE TABLE IF NOT EXISTS sensors (
id SERIAL PRIMARY KEY,
site_id VARCHAR(50) NOT NULL,
device_id VARCHAR(100) NOT NULL,
name VARCHAR(200) NOT NULL,
device_type VARCHAR(50) NOT NULL,
room_id VARCHAR(50),
rack_id VARCHAR(50),
protocol VARCHAR(30) NOT NULL DEFAULT 'mqtt',
protocol_config JSONB NOT NULL DEFAULT '{}',
enabled BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
UNIQUE(site_id, device_id)
)
"""))
# Configurable alarm thresholds (replaces hard-coded list at runtime)
await conn.execute(text("""
CREATE TABLE IF NOT EXISTS alarm_thresholds (
id SERIAL PRIMARY KEY,
site_id VARCHAR(50) NOT NULL,
sensor_type VARCHAR(50) NOT NULL,
threshold_value FLOAT NOT NULL,
direction VARCHAR(10) NOT NULL,
severity VARCHAR(20) NOT NULL,
message_template TEXT NOT NULL,
enabled BOOLEAN NOT NULL DEFAULT true,
locked BOOLEAN NOT NULL DEFAULT false,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
)
"""))
# Site-level settings (profile, notifications, integrations, page prefs)
await conn.execute(text("""
CREATE TABLE IF NOT EXISTS site_settings (
site_id VARCHAR(50) NOT NULL,
category VARCHAR(50) NOT NULL,
key VARCHAR(100) NOT NULL,
value JSONB NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
PRIMARY KEY (site_id, category, key)
)
"""))
logger.info("Database initialised")
async def get_session():
async with AsyncSessionLocal() as session:
yield session

73
backend/main.py Normal file
View file

@ -0,0 +1,73 @@
import asyncio
import logging
from contextlib import asynccontextmanager
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from core.config import settings
from core.database import init_db, AsyncSessionLocal
from services.mqtt_subscriber import run_subscriber
from services.seed import run_all_seeds
from api.routes import (
health, sites, readings, alarms, ws, assets,
power, env, reports, capacity,
generator, fire, cooling, leak, network, maintenance, floor_layout,
scenarios, settings as settings_router,
)
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(name)s %(levelname)s %(message)s")
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
logger.info(f"Starting {settings.APP_NAME}")
await init_db()
async with AsyncSessionLocal() as session:
await run_all_seeds(session)
# Start MQTT subscriber as a background task
task = asyncio.create_task(run_subscriber())
yield
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
logger.info("Shutdown complete")
app = FastAPI(
title=settings.APP_NAME,
version="0.2.0",
docs_url="/docs",
redoc_url="/redoc",
lifespan=lifespan,
)
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(health.router, prefix="/api", tags=["health"])
app.include_router(sites.router, prefix="/api/sites", tags=["sites"])
app.include_router(readings.router, prefix="/api/readings", tags=["readings"])
app.include_router(alarms.router, prefix="/api/alarms", tags=["alarms"])
app.include_router(ws.router, prefix="/api", tags=["websocket"])
app.include_router(assets.router, prefix="/api/assets", tags=["assets"])
app.include_router(power.router, prefix="/api/power", tags=["power"])
app.include_router(env.router, prefix="/api/env", tags=["env"])
app.include_router(reports.router, prefix="/api/reports", tags=["reports"])
app.include_router(capacity.router, prefix="/api/capacity", tags=["capacity"])
app.include_router(generator.router, prefix="/api/generator", tags=["generator"])
app.include_router(fire.router, prefix="/api/fire", tags=["fire"])
app.include_router(cooling.router, prefix="/api/cooling", tags=["cooling"])
app.include_router(leak.router, prefix="/api/leak", tags=["leak"])
app.include_router(network.router, prefix="/api/network", tags=["network"])
app.include_router(maintenance.router, prefix="/api/maintenance", tags=["maintenance"])
app.include_router(floor_layout.router, prefix="/api/floor-layout", tags=["floor-layout"])
app.include_router(scenarios.router, prefix="/api/scenarios", tags=["scenarios"])
app.include_router(settings_router.router, prefix="/api/settings", tags=["settings"])

View file

11
backend/requirements.txt Normal file
View file

@ -0,0 +1,11 @@
fastapi==0.115.6
uvicorn[standard]==0.32.1
pydantic==2.10.4
pydantic-settings==2.7.0
python-dotenv==1.0.1
asyncpg==0.30.0
sqlalchemy[asyncio]==2.0.36
aiomqtt==2.3.0
httpx==0.28.1
python-jose[cryptography]==3.3.0
python-multipart==0.0.20

View file

View file

@ -0,0 +1,152 @@
import logging
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
logger = logging.getLogger(__name__)
# ── In-memory threshold cache ──────────────────────────────────────────────────
# Loaded from DB on first use; invalidated by settings API after updates.
# Falls back to hard-coded defaults if DB has no rows yet (pre-seed).
_caches: dict[str, list[dict]] = {}
_dirty_sites: set[str] = {"sg-01"} # start dirty so first request loads from DB
def invalidate_threshold_cache(site_id: str = "sg-01") -> None:
"""Mark a site's cache as stale. Called by settings API after threshold changes."""
_dirty_sites.add(site_id)
async def _ensure_cache(session: AsyncSession, site_id: str) -> None:
if site_id not in _dirty_sites and site_id in _caches:
return
result = await session.execute(text("""
SELECT sensor_type, threshold_value, direction, severity, message_template
FROM alarm_thresholds
WHERE site_id = :site_id AND enabled = true
ORDER BY id
"""), {"site_id": site_id})
rows = result.mappings().all()
if rows:
_caches[site_id] = [dict(r) for r in rows]
else:
# DB not yet seeded — fall back to hard-coded defaults
_caches[site_id] = _FALLBACK_RULES
_dirty_sites.discard(site_id)
logger.info(f"Loaded {len(_caches[site_id])} threshold rules for {site_id}")
async def check_and_update_alarms(
session: AsyncSession,
sensor_id: str,
sensor_type: str,
site_id: str,
room_id: str | None,
rack_id: str | None,
value: float,
) -> None:
await _ensure_cache(session, site_id)
for rule in _caches.get(site_id, []):
if rule["sensor_type"] != sensor_type:
continue
threshold = rule["threshold_value"]
direction = rule["direction"]
severity = rule["severity"]
msg_tpl = rule["message_template"]
breached = (
(direction == "above" and value > threshold) or
(direction == "below" and value < threshold)
)
if breached:
existing = await session.execute(text("""
SELECT id FROM alarms
WHERE sensor_id = :sid AND severity = :sev AND state = 'active'
LIMIT 1
"""), {"sid": sensor_id, "sev": severity})
if not existing.fetchone():
message = msg_tpl.format(value=value, sensor_id=sensor_id)
await session.execute(text("""
INSERT INTO alarms
(sensor_id, site_id, room_id, rack_id, severity, message, state, triggered_at)
VALUES
(:sensor_id, :site_id, :room_id, :rack_id, :severity, :message, 'active', NOW())
"""), {
"sensor_id": sensor_id, "site_id": site_id,
"room_id": room_id, "rack_id": rack_id,
"severity": severity, "message": message,
})
logger.info(f"Alarm raised [{severity}]: {message}")
else:
await session.execute(text("""
UPDATE alarms
SET state = 'resolved', resolved_at = NOW()
WHERE sensor_id = :sid AND severity = :sev AND state = 'active'
"""), {"sid": sensor_id, "sev": severity})
# ── Hard-coded fallback (used before DB seed runs) ─────────────────────────────
_FALLBACK_RULES: list[dict] = [
{"sensor_type": st, "threshold_value": tv, "direction": d, "severity": s, "message_template": m}
for st, tv, d, s, m in [
("temperature", 28.0, "above", "warning", "Temperature elevated at {sensor_id}: {value:.1f}°C"),
("temperature", 32.0, "above", "critical", "Temperature critical at {sensor_id}: {value:.1f}°C"),
("humidity", 65.0, "above", "warning", "Humidity elevated at {sensor_id}: {value:.0f}%"),
("power_kw", 7.5, "above", "warning", "PDU load elevated at {sensor_id}: {value:.1f} kW"),
("power_kw", 9.5, "above", "critical", "PDU load critical at {sensor_id}: {value:.1f} kW"),
("ups_charge", 80.0, "below", "warning", "UPS battery low at {sensor_id}: {value:.0f}%"),
("ups_charge", 50.0, "below", "critical", "UPS battery critical at {sensor_id}: {value:.0f}%"),
("ups_state", 0.5, "above", "critical", "UPS switched to battery at {sensor_id} — mains power lost"),
("ups_state", 1.5, "above", "critical", "UPS overloaded at {sensor_id} — immediate risk of failure"),
("ups_load", 85.0, "above", "warning", "UPS load high at {sensor_id}: {value:.0f}%"),
("ups_load", 95.0, "above", "critical", "UPS load critical at {sensor_id}: {value:.0f}% — overload"),
("ups_runtime", 15.0, "below", "warning", "UPS runtime low at {sensor_id}: {value:.0f} min remaining"),
("ups_runtime", 5.0, "below", "critical", "UPS runtime critical at {sensor_id}: {value:.0f} min — imminent shutdown"),
("leak", 0.5, "above", "critical", "Water leak detected at {sensor_id}!"),
("cooling_cap_pct", 90.0, "above", "warning", "CRAC near capacity limit at {sensor_id}: {value:.1f}%"),
("cooling_cop", 1.5, "below", "warning", "CRAC running inefficiently at {sensor_id}: COP {value:.2f}"),
("cooling_comp_load", 95.0, "above", "warning", "CRAC compressor overloaded at {sensor_id}: {value:.1f}%"),
("cooling_high_press", 22.0, "above", "critical", "CRAC high refrigerant pressure at {sensor_id}: {value:.1f} bar"),
("cooling_low_press", 3.0, "below", "critical", "CRAC low refrigerant pressure at {sensor_id}: {value:.1f} bar — possible leak"),
("cooling_superheat", 16.0, "above", "warning", "CRAC discharge superheat high at {sensor_id}: {value:.1f}°C"),
("cooling_filter_dp", 80.0, "above", "warning", "CRAC filter requires attention at {sensor_id}: {value:.0f} Pa"),
("cooling_filter_dp", 120.0, "above", "critical", "CRAC filter critically blocked at {sensor_id}: {value:.0f} Pa — replace now"),
("cooling_return", 36.0, "above", "warning", "CRAC return air temperature high at {sensor_id}: {value:.1f}°C"),
("cooling_return", 42.0, "above", "critical", "CRAC return air temperature critical at {sensor_id}: {value:.1f}°C"),
("gen_fuel_pct", 25.0, "below", "warning", "Generator fuel low at {sensor_id}: {value:.1f}%"),
("gen_fuel_pct", 10.0, "below", "critical", "Generator fuel critical at {sensor_id}: {value:.1f}%"),
("gen_state", 0.5, "above", "warning", "Generator running at {sensor_id} — site is on standby power"),
("gen_state", -0.5, "below", "critical", "Generator fault at {sensor_id} — no standby power available"),
("gen_load_pct", 85.0, "above", "warning", "Generator load high at {sensor_id}: {value:.1f}%"),
("gen_load_pct", 95.0, "above", "critical", "Generator overloaded at {sensor_id}: {value:.1f}%"),
("gen_coolant_c", 95.0, "above", "warning", "Generator coolant temperature high at {sensor_id}: {value:.1f}°C"),
("gen_coolant_c", 105.0, "above", "critical", "Generator coolant critical at {sensor_id}: {value:.1f}°C — risk of shutdown"),
("gen_oil_press", 2.0, "below", "critical", "Generator oil pressure low at {sensor_id}: {value:.1f} bar"),
("pdu_imbalance", 5.0, "above", "warning", "PDU phase imbalance at {sensor_id}: {value:.1f}%"),
("pdu_imbalance", 15.0, "above", "critical", "PDU phase imbalance critical at {sensor_id}: {value:.1f}%"),
("ats_active", 1.5, "above", "warning", "ATS transferred to generator at {sensor_id} — utility power lost"),
("ats_ua_v", 50.0, "below", "critical", "Utility A power failure at {sensor_id} — supply lost"),
("chiller_state", 0.5, "below", "critical", "Chiller fault at {sensor_id} — CHW supply lost"),
("chiller_cop", 2.5, "below", "warning", "Chiller running inefficiently at {sensor_id}: COP {value:.2f}"),
("vesda_level", 0.5, "above", "warning", "VESDA smoke detected at {sensor_id}: level elevated"),
("vesda_level", 1.5, "above", "warning", "VESDA action threshold reached at {sensor_id}"),
("vesda_level", 2.5, "above", "critical", "VESDA FIRE ALARM at {sensor_id}!"),
("vesda_flow", 0.5, "below", "critical", "VESDA aspirator flow fault at {sensor_id} — detector may be compromised"),
("vesda_det1", 0.5, "below", "warning", "VESDA detector 1 fault at {sensor_id}"),
("vesda_det2", 0.5, "below", "warning", "VESDA detector 2 fault at {sensor_id}"),
("net_state", 0.5, "above", "warning", "Network switch degraded at {sensor_id}"),
("net_state", 1.5, "above", "critical", "Network switch down at {sensor_id} — connectivity lost"),
("net_pkt_loss_pct", 1.0, "above", "warning", "Packet loss detected at {sensor_id}: {value:.1f}%"),
("net_pkt_loss_pct", 5.0, "above", "critical", "High packet loss at {sensor_id}: {value:.1f}%"),
("net_temp_c", 65.0, "above", "warning", "Switch temperature high at {sensor_id}: {value:.1f}°C"),
("net_temp_c", 75.0, "above", "critical", "Switch temperature critical at {sensor_id}: {value:.1f}°C"),
]
]

View file

@ -0,0 +1,328 @@
import asyncio
import json
import logging
from datetime import datetime, timezone
import aiomqtt
from sqlalchemy import text
from core.config import settings
from core.database import AsyncSessionLocal
from services.alarm_engine import check_and_update_alarms
from services.ws_manager import manager as ws_manager
logger = logging.getLogger(__name__)
def parse_topic(topic: str) -> dict | None:
"""
Topic formats:
bms/{site_id}/{room_id}/{rack_id}/env rack environment
bms/{site_id}/{room_id}/{rack_id}/power rack PDU power
bms/{site_id}/cooling/{crac_id} CRAC unit
bms/{site_id}/cooling/chiller/{chiller_id} chiller plant
bms/{site_id}/power/{ups_id} UPS unit
bms/{site_id}/power/ats/{ats_id} ATS transfer switch
bms/{site_id}/generator/{gen_id} diesel generator
bms/{site_id}/fire/{zone_id} VESDA fire zone
bms/{site_id}/leak/{sensor_id} water leak sensor
"""
parts = topic.split("/")
if len(parts) < 4 or parts[0] != "bms":
return None
site_id = parts[1]
# 5-part: rack env/power OR cooling/chiller/{id} OR power/ats/{id}
if len(parts) == 5:
if parts[4] in ("env", "power"):
return {
"site_id": site_id, "room_id": parts[2],
"rack_id": parts[3], "device_id": None, "msg_type": parts[4],
}
if parts[2] == "cooling" and parts[3] == "chiller":
return {
"site_id": site_id, "room_id": None, "rack_id": None,
"device_id": parts[4], "msg_type": "chiller",
}
if parts[2] == "power" and parts[3] == "ats":
return {
"site_id": site_id, "room_id": None, "rack_id": None,
"device_id": parts[4], "msg_type": "ats",
}
# 4-part: bms/{site_id}/{room_id}/particles
if len(parts) == 4 and parts[3] == "particles":
return {
"site_id": site_id, "room_id": parts[2], "rack_id": None,
"device_id": None, "msg_type": "particles",
}
# 4-part: known subsystem topics
if len(parts) == 4 and parts[2] in ("cooling", "power", "leak", "generator", "fire", "network"):
return {
"site_id": site_id, "room_id": None, "rack_id": None,
"device_id": parts[3], "msg_type": parts[2],
}
return None
async def process_message(topic: str, payload: dict) -> None:
meta = parse_topic(topic)
if not meta:
return
site_id = meta["site_id"]
room_id = meta["room_id"]
rack_id = meta["rack_id"]
device_id = meta["device_id"]
msg_type = meta["msg_type"]
now = datetime.now(timezone.utc)
# Build list of (sensor_id, sensor_type, value, unit) tuples
readings: list[tuple[str, str, float, str]] = []
if msg_type == "env" and rack_id:
base = f"{site_id}/{room_id}/{rack_id}"
if "temperature" in payload:
readings.append((f"{base}/temperature", "temperature", float(payload["temperature"]), "°C"))
if "humidity" in payload:
readings.append((f"{base}/humidity", "humidity", float(payload["humidity"]), "%"))
elif msg_type == "power" and rack_id:
base = f"{site_id}/{room_id}/{rack_id}"
if "load_kw" in payload:
readings.append((f"{base}/power_kw", "power_kw", float(payload["load_kw"]), "kW"))
# Per-phase PDU data
for key, s_type, unit in [
("phase_a_kw", "pdu_phase_a_kw", "kW"),
("phase_b_kw", "pdu_phase_b_kw", "kW"),
("phase_c_kw", "pdu_phase_c_kw", "kW"),
("phase_a_a", "pdu_phase_a_a", "A"),
("phase_b_a", "pdu_phase_b_a", "A"),
("phase_c_a", "pdu_phase_c_a", "A"),
("imbalance_pct", "pdu_imbalance", "%"),
]:
if payload.get(key) is not None:
readings.append((f"{base}/{key}", s_type, float(payload[key]), unit))
elif msg_type == "cooling" and device_id:
base = f"{site_id}/cooling/{device_id}"
crac_fields = [
# (payload_key, sensor_type, unit)
("supply_temp", "cooling_supply", "°C"),
("return_temp", "cooling_return", "°C"),
("fan_pct", "cooling_fan", "%"),
("supply_humidity", "cooling_supply_hum", "%"),
("return_humidity", "cooling_return_hum", "%"),
("airflow_cfm", "cooling_airflow", "CFM"),
("filter_dp_pa", "cooling_filter_dp", "Pa"),
("cooling_capacity_kw", "cooling_cap_kw", "kW"),
("cooling_capacity_pct", "cooling_cap_pct", "%"),
("cop", "cooling_cop", ""),
("sensible_heat_ratio", "cooling_shr", ""),
("compressor_state", "cooling_comp_state", ""),
("compressor_load_pct", "cooling_comp_load", "%"),
("compressor_power_kw", "cooling_comp_power", "kW"),
("compressor_run_hours", "cooling_comp_hours", "h"),
("high_pressure_bar", "cooling_high_press", "bar"),
("low_pressure_bar", "cooling_low_press", "bar"),
("discharge_superheat_c", "cooling_superheat", "°C"),
("liquid_subcooling_c", "cooling_subcooling", "°C"),
("fan_rpm", "cooling_fan_rpm", "RPM"),
("fan_power_kw", "cooling_fan_power", "kW"),
("fan_run_hours", "cooling_fan_hours", "h"),
("total_unit_power_kw", "cooling_unit_power", "kW"),
("input_voltage_v", "cooling_voltage", "V"),
("input_current_a", "cooling_current", "A"),
("power_factor", "cooling_pf", ""),
]
for key, s_type, unit in crac_fields:
if payload.get(key) is not None:
readings.append((f"{base}/{key}", s_type, float(payload[key]), unit))
elif msg_type == "power" and device_id:
base = f"{site_id}/power/{device_id}"
for key, s_type, unit in [
("charge_pct", "ups_charge", "%"),
("load_pct", "ups_load", "%"),
("runtime_min", "ups_runtime", "min"),
("voltage", "ups_voltage", "V"),
]:
if key in payload:
readings.append((f"{base}/{key}", s_type, float(payload[key]), unit))
# Store state explicitly: 0.0 = online, 1.0 = on_battery, 2.0 = overload
if "state" in payload:
state_val = {"online": 0.0, "on_battery": 1.0, "overload": 2.0}.get(payload["state"], 0.0)
readings.append((f"{base}/state", "ups_state", state_val, ""))
elif msg_type == "generator" and device_id:
base = f"{site_id}/generator/{device_id}"
state_map = {"standby": 0.0, "running": 1.0, "test": 2.0, "fault": -1.0}
for key, s_type, unit in [
("fuel_pct", "gen_fuel_pct", "%"),
("fuel_litres", "gen_fuel_l", "L"),
("fuel_rate_lph", "gen_fuel_rate", "L/h"),
("load_kw", "gen_load_kw", "kW"),
("load_pct", "gen_load_pct", "%"),
("run_hours", "gen_run_hours", "h"),
("voltage_v", "gen_voltage_v", "V"),
("frequency_hz", "gen_freq_hz", "Hz"),
("engine_rpm", "gen_rpm", "RPM"),
("oil_pressure_bar", "gen_oil_press", "bar"),
("coolant_temp_c", "gen_coolant_c", "°C"),
("exhaust_temp_c", "gen_exhaust_c", "°C"),
("alternator_temp_c", "gen_alt_temp_c", "°C"),
("power_factor", "gen_pf", ""),
("battery_v", "gen_batt_v", "V"),
]:
if payload.get(key) is not None:
readings.append((f"{base}/{key}", s_type, float(payload[key]), unit))
if "state" in payload:
readings.append((f"{base}/state", "gen_state", state_map.get(payload["state"], 0.0), ""))
elif msg_type == "ats" and device_id:
base = f"{site_id}/power/ats/{device_id}"
feed_map = {"utility-a": 0.0, "utility-b": 1.0, "generator": 2.0}
for key, s_type, unit in [
("transfer_count", "ats_xfer_count", ""),
("last_transfer_ms", "ats_xfer_ms", "ms"),
("utility_a_v", "ats_ua_v", "V"),
("utility_b_v", "ats_ub_v", "V"),
("generator_v", "ats_gen_v", "V"),
]:
if payload.get(key) is not None:
readings.append((f"{base}/{key}", s_type, float(payload[key]), unit))
if "active_feed" in payload:
readings.append((f"{base}/active_feed", "ats_active",
feed_map.get(payload["active_feed"], 0.0), ""))
if "state" in payload:
readings.append((f"{base}/state", "ats_state",
1.0 if payload["state"] == "transferring" else 0.0, ""))
elif msg_type == "chiller" and device_id:
base = f"{site_id}/cooling/chiller/{device_id}"
for key, s_type, unit in [
("chw_supply_c", "chiller_chw_supply", "°C"),
("chw_return_c", "chiller_chw_return", "°C"),
("chw_delta_c", "chiller_chw_delta", "°C"),
("flow_gpm", "chiller_flow_gpm", "GPM"),
("cooling_load_kw", "chiller_load_kw", "kW"),
("cooling_load_pct", "chiller_load_pct", "%"),
("cop", "chiller_cop", ""),
("compressor_load_pct", "chiller_comp_load", "%"),
("condenser_pressure_bar", "chiller_cond_press", "bar"),
("evaporator_pressure_bar", "chiller_evap_press", "bar"),
("cw_supply_c", "chiller_cw_supply", "°C"),
("cw_return_c", "chiller_cw_return", "°C"),
("run_hours", "chiller_run_hours", "h"),
]:
if payload.get(key) is not None:
readings.append((f"{base}/{key}", s_type, float(payload[key]), unit))
if "state" in payload:
readings.append((f"{base}/state", "chiller_state",
1.0 if payload["state"] == "online" else 0.0, ""))
elif msg_type == "fire" and device_id:
base = f"{site_id}/fire/{device_id}"
level_map = {"normal": 0.0, "alert": 1.0, "action": 2.0, "fire": 3.0}
if "level" in payload:
readings.append((f"{base}/level", "vesda_level",
level_map.get(payload["level"], 0.0), ""))
if "obscuration_pct_m" in payload:
readings.append((f"{base}/obscuration", "vesda_obscuration",
float(payload["obscuration_pct_m"]), "%/m"))
for key, s_type in [
("detector_1_ok", "vesda_det1"),
("detector_2_ok", "vesda_det2"),
("power_ok", "vesda_power"),
("flow_ok", "vesda_flow"),
]:
if key in payload:
readings.append((f"{base}/{key}", s_type,
1.0 if payload[key] else 0.0, ""))
elif msg_type == "network" and device_id:
base = f"{site_id}/network/{device_id}"
state_map = {"up": 0.0, "degraded": 1.0, "down": 2.0}
for key, s_type, unit in [
("uptime_s", "net_uptime_s", "s"),
("active_ports", "net_active_ports", ""),
("bandwidth_in_mbps", "net_bw_in_mbps", "Mbps"),
("bandwidth_out_mbps","net_bw_out_mbps", "Mbps"),
("cpu_pct", "net_cpu_pct", "%"),
("mem_pct", "net_mem_pct", "%"),
("temperature_c", "net_temp_c", "°C"),
("packet_loss_pct", "net_pkt_loss_pct", "%"),
]:
if payload.get(key) is not None:
readings.append((f"{base}/{key}", s_type, float(payload[key]), unit))
if "state" in payload:
readings.append((f"{base}/state", "net_state",
state_map.get(payload["state"], 0.0), ""))
elif msg_type == "leak" and device_id:
state = payload.get("state", "clear")
readings.append((
f"{site_id}/leak/{device_id}", "leak",
1.0 if state == "detected" else 0.0, "",
))
elif msg_type == "particles":
base = f"{site_id}/{room_id}/particles"
if "particles_0_5um" in payload:
readings.append((f"{base}/0_5um", "particles_0_5um", float(payload["particles_0_5um"]), "/m³"))
if "particles_5um" in payload:
readings.append((f"{base}/5um", "particles_5um", float(payload["particles_5um"]), "/m³"))
if not readings:
return
async with AsyncSessionLocal() as session:
for sensor_id, sensor_type, value, unit in readings:
await session.execute(text("""
INSERT INTO readings
(recorded_at, sensor_id, sensor_type, site_id, room_id, rack_id, value, unit)
VALUES
(:ts, :sid, :stype, :site, :room, :rack, :val, :unit)
"""), {
"ts": now, "sid": sensor_id, "stype": sensor_type,
"site": site_id, "room": room_id, "rack": rack_id,
"val": value, "unit": unit,
})
await check_and_update_alarms(
session, sensor_id, sensor_type, site_id, room_id, rack_id, value
)
await session.commit()
# Push to any connected WebSocket clients
await ws_manager.broadcast({
"topic": topic,
"site_id": site_id,
"room_id": room_id,
"rack_id": rack_id,
"readings": [
{"sensor_id": s, "type": t, "value": v, "unit": u}
for s, t, v, u in readings
],
"timestamp": now.isoformat(),
})
async def run_subscriber() -> None:
"""Runs forever, reconnecting on any failure."""
while True:
try:
logger.info(f"Connecting to MQTT at {settings.MQTT_HOST}:{settings.MQTT_PORT}")
async with aiomqtt.Client(settings.MQTT_HOST, port=settings.MQTT_PORT) as client:
logger.info("MQTT connected — subscribing to bms/#")
await client.subscribe("bms/#")
async for message in client.messages:
try:
payload = json.loads(message.payload.decode())
await process_message(str(message.topic), payload)
except Exception as e:
logger.error(f"Error processing message on {message.topic}: {e}")
except Exception as e:
logger.error(f"MQTT connection failed: {e} — retrying in 5s")
await asyncio.sleep(5)

234
backend/services/seed.py Normal file
View file

@ -0,0 +1,234 @@
"""
Seed the database with default sensor registry and alarm threshold rules.
Runs on startup if tables are empty subsequent restarts are no-ops.
"""
import json
import logging
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
logger = logging.getLogger(__name__)
SITE_ID = "sg-01"
# ── Threshold seed data ────────────────────────────────────────────────────────
# (sensor_type, threshold_value, direction, severity, message_template, locked)
# locked=True → state-machine encoding, hidden from UI
# locked=False → numeric setpoint, user-editable
THRESHOLD_SEED_DATA: list[tuple] = [
# Rack environment
("temperature", 28.0, "above", "warning", "Temperature elevated at {sensor_id}: {value:.1f}°C", False),
("temperature", 32.0, "above", "critical", "Temperature critical at {sensor_id}: {value:.1f}°C", False),
("humidity", 65.0, "above", "warning", "Humidity elevated at {sensor_id}: {value:.0f}%", False),
# PDU / rack power
("power_kw", 7.5, "above", "warning", "PDU load elevated at {sensor_id}: {value:.1f} kW", False),
("power_kw", 9.5, "above", "critical", "PDU load critical at {sensor_id}: {value:.1f} kW", False),
# UPS — numeric setpoints
("ups_charge", 80.0, "below", "warning", "UPS battery low at {sensor_id}: {value:.0f}%", False),
("ups_charge", 50.0, "below", "critical", "UPS battery critical at {sensor_id}: {value:.0f}%", False),
("ups_load", 85.0, "above", "warning", "UPS load high at {sensor_id}: {value:.0f}%", False),
("ups_load", 95.0, "above", "critical", "UPS load critical at {sensor_id}: {value:.0f}% — overload", False),
("ups_runtime", 15.0, "below", "warning", "UPS runtime low at {sensor_id}: {value:.0f} min remaining", False),
("ups_runtime", 5.0, "below", "critical", "UPS runtime critical at {sensor_id}: {value:.0f} min — imminent shutdown", False),
# UPS — state transitions (locked)
("ups_state", 0.5, "above", "critical", "UPS switched to battery at {sensor_id} — mains power lost", True),
("ups_state", 1.5, "above", "critical", "UPS overloaded at {sensor_id} — immediate risk of failure", True),
# Leak (locked — binary)
("leak", 0.5, "above", "critical", "Water leak detected at {sensor_id}!", True),
# CRAC capacity & efficiency
("cooling_cap_pct", 90.0, "above", "warning", "CRAC near capacity limit at {sensor_id}: {value:.1f}%", False),
("cooling_cop", 1.5, "below", "warning", "CRAC running inefficiently at {sensor_id}: COP {value:.2f}", False),
# CRAC compressor
("cooling_comp_load", 95.0, "above", "warning", "CRAC compressor overloaded at {sensor_id}: {value:.1f}%", False),
("cooling_high_press", 22.0, "above", "critical", "CRAC high refrigerant pressure at {sensor_id}: {value:.1f} bar", False),
("cooling_low_press", 3.0, "below", "critical", "CRAC low refrigerant pressure at {sensor_id}: {value:.1f} bar — possible leak", False),
("cooling_superheat", 16.0, "above", "warning", "CRAC discharge superheat high at {sensor_id}: {value:.1f}°C", False),
# CRAC filter
("cooling_filter_dp", 80.0, "above", "warning", "CRAC filter requires attention at {sensor_id}: {value:.0f} Pa", False),
("cooling_filter_dp", 120.0, "above", "critical", "CRAC filter critically blocked at {sensor_id}: {value:.0f} Pa — replace now", False),
# CRAC return air
("cooling_return", 36.0, "above", "warning", "CRAC return air temperature high at {sensor_id}: {value:.1f}°C", False),
("cooling_return", 42.0, "above", "critical", "CRAC return air temperature critical at {sensor_id}: {value:.1f}°C", False),
# Generator — numeric setpoints
("gen_fuel_pct", 25.0, "below", "warning", "Generator fuel low at {sensor_id}: {value:.1f}%", False),
("gen_fuel_pct", 10.0, "below", "critical", "Generator fuel critical at {sensor_id}: {value:.1f}%", False),
("gen_load_pct", 85.0, "above", "warning", "Generator load high at {sensor_id}: {value:.1f}%", False),
("gen_load_pct", 95.0, "above", "critical", "Generator overloaded at {sensor_id}: {value:.1f}%", False),
("gen_coolant_c", 95.0, "above", "warning", "Generator coolant temperature high at {sensor_id}: {value:.1f}°C", False),
("gen_coolant_c", 105.0, "above", "critical", "Generator coolant critical at {sensor_id}: {value:.1f}°C — risk of shutdown", False),
("gen_oil_press", 2.0, "below", "critical", "Generator oil pressure low at {sensor_id}: {value:.1f} bar", False),
# Generator — state transitions (locked)
("gen_state", 0.5, "above", "warning", "Generator running at {sensor_id} — site is on standby power", True),
("gen_state", -0.5, "below", "critical", "Generator fault at {sensor_id} — no standby power available", True),
# PDU phase imbalance
("pdu_imbalance", 5.0, "above", "warning", "PDU phase imbalance at {sensor_id}: {value:.1f}%", False),
("pdu_imbalance", 15.0, "above", "critical", "PDU phase imbalance critical at {sensor_id}: {value:.1f}%", False),
# ATS — numeric
("ats_ua_v", 50.0, "below", "critical", "Utility A power failure at {sensor_id} — supply lost", False),
# ATS — state (locked)
("ats_active", 1.5, "above", "warning", "ATS transferred to generator at {sensor_id} — utility power lost", True),
# Chiller — numeric
("chiller_cop", 2.5, "below", "warning", "Chiller running inefficiently at {sensor_id}: COP {value:.2f}", False),
# Chiller — state (locked)
("chiller_state", 0.5, "below", "critical", "Chiller fault at {sensor_id} — CHW supply lost", True),
# VESDA fire — state (all locked)
("vesda_level", 0.5, "above", "warning", "VESDA smoke detected at {sensor_id}: level elevated", True),
("vesda_level", 1.5, "above", "warning", "VESDA action threshold reached at {sensor_id}", True),
("vesda_level", 2.5, "above", "critical", "VESDA FIRE ALARM at {sensor_id}!", True),
("vesda_flow", 0.5, "below", "critical", "VESDA aspirator flow fault at {sensor_id} — detector may be compromised", True),
("vesda_det1", 0.5, "below", "warning", "VESDA detector 1 fault at {sensor_id}", True),
("vesda_det2", 0.5, "below", "warning", "VESDA detector 2 fault at {sensor_id}", True),
# Network — numeric
("net_pkt_loss_pct", 1.0, "above", "warning", "Packet loss detected at {sensor_id}: {value:.1f}%", False),
("net_pkt_loss_pct", 5.0, "above", "critical", "High packet loss at {sensor_id}: {value:.1f}%", False),
("net_temp_c", 65.0, "above", "warning", "Switch temperature high at {sensor_id}: {value:.1f}°C", False),
("net_temp_c", 75.0, "above", "critical", "Switch temperature critical at {sensor_id}: {value:.1f}°C", False),
# Network — state (locked)
("net_state", 0.5, "above", "warning", "Network switch degraded at {sensor_id}", True),
("net_state", 1.5, "above", "critical", "Network switch down at {sensor_id} — connectivity lost", True),
]
# ── Sensor seed data ────────────────────────────────────────────────────────────
def _build_sensor_list() -> list[dict]:
sensors = [
{"device_id": "gen-01", "name": "Diesel Generator 1", "device_type": "generator", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/generator/gen-01"}},
{"device_id": "ups-01", "name": "UPS Unit 1", "device_type": "ups", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/power/ups-01"}},
{"device_id": "ups-02", "name": "UPS Unit 2", "device_type": "ups", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/power/ups-02"}},
{"device_id": "ats-01", "name": "Transfer Switch 1", "device_type": "ats", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/power/ats/ats-01"}},
{"device_id": "chiller-01", "name": "Chiller Plant 1", "device_type": "chiller", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/cooling/chiller/chiller-01"}},
{"device_id": "crac-01", "name": "CRAC Unit — Hall A", "device_type": "crac", "room_id": "hall-a", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/cooling/crac-01"}},
{"device_id": "crac-02", "name": "CRAC Unit — Hall B", "device_type": "crac", "room_id": "hall-b", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/cooling/crac-02"}},
{"device_id": "vesda-hall-a","name": "VESDA Fire Zone — Hall A","device_type": "fire_zone", "room_id": "hall-a", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/fire/vesda-hall-a"}},
{"device_id": "vesda-hall-b","name": "VESDA Fire Zone — Hall B","device_type": "fire_zone", "room_id": "hall-b", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/fire/vesda-hall-b"}},
{"device_id": "leak-01", "name": "Leak Sensor — CRAC Zone A","device_type": "leak", "room_id": "hall-a", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/leak/leak-01"}},
{"device_id": "leak-02", "name": "Leak Sensor — Server Row B1","device_type": "leak", "room_id": "hall-b", "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/leak/leak-02"}},
{"device_id": "leak-03", "name": "Leak Sensor — UPS Room", "device_type": "leak", "room_id": None, "rack_id": None, "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/leak/leak-03"}},
{"device_id": "sw-core-01", "name": "Core Switch — Hall A", "device_type": "network_switch","room_id": "hall-a", "rack_id": "SG1A01.01", "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/network/sw-core-01"}},
{"device_id": "sw-core-02", "name": "Core Switch — Hall B", "device_type": "network_switch","room_id": "hall-b", "rack_id": "SG1B01.01", "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/network/sw-core-02"}},
{"device_id": "sw-edge-01", "name": "Edge / Uplink Switch", "device_type": "network_switch","room_id": "hall-a", "rack_id": "SG1A01.05", "protocol": "mqtt", "protocol_config": {"topic": "bms/sg-01/network/sw-edge-01"}},
]
# Generate racks
for room_id, row_prefix in [("hall-a", "SG1A"), ("hall-b", "SG1B")]:
for row_num in ["01", "02"]:
for rack_num in range(1, 21):
rack_id = f"{row_prefix}{row_num}.{rack_num:02d}"
sensors.append({
"device_id": rack_id,
"name": f"Rack PDU — {rack_id}",
"device_type": "rack",
"room_id": room_id,
"rack_id": rack_id,
"protocol": "mqtt",
"protocol_config": {
"env_topic": f"bms/sg-01/{room_id}/{rack_id}/env",
"pdu_topic": f"bms/sg-01/{room_id}/{rack_id}/power",
},
})
return sensors
SENSOR_SEED_DATA = _build_sensor_list()
# ── Default settings ────────────────────────────────────────────────────────────
DEFAULT_SETTINGS: dict[str, dict] = {
"site": {
"name": "Singapore DC01",
"timezone": "Asia/Singapore",
"description": "Production data centre — Singapore",
},
"notifications": {
"critical_alarms": True,
"warning_alarms": True,
"generator_events": True,
"maintenance_reminders": True,
"webhook_url": "",
"email_recipients": "",
},
"integrations": {
"mqtt_host": "mqtt",
"mqtt_port": 1883,
},
"page_prefs": {
"default_time_range_hours": 6,
"refresh_interval_seconds": 30,
},
}
# ── Seed functions ──────────────────────────────────────────────────────────────
async def seed_thresholds(session: AsyncSession) -> None:
result = await session.execute(
text("SELECT COUNT(*) FROM alarm_thresholds WHERE site_id = :s"),
{"s": SITE_ID},
)
if result.scalar() > 0:
return
for st, tv, direction, severity, msg, locked in THRESHOLD_SEED_DATA:
await session.execute(text("""
INSERT INTO alarm_thresholds
(site_id, sensor_type, threshold_value, direction, severity, message_template, enabled, locked)
VALUES
(:site_id, :sensor_type, :threshold_value, :direction, :severity, :message_template, true, :locked)
"""), {
"site_id": SITE_ID, "sensor_type": st, "threshold_value": tv,
"direction": direction, "severity": severity,
"message_template": msg, "locked": locked,
})
await session.commit()
logger.info(f"Seeded {len(THRESHOLD_SEED_DATA)} alarm threshold rules")
async def seed_sensors(session: AsyncSession) -> None:
result = await session.execute(
text("SELECT COUNT(*) FROM sensors WHERE site_id = :s"),
{"s": SITE_ID},
)
if result.scalar() > 0:
return
for s in SENSOR_SEED_DATA:
await session.execute(text("""
INSERT INTO sensors
(site_id, device_id, name, device_type, room_id, rack_id, protocol, protocol_config, enabled)
VALUES
(:site_id, :device_id, :name, :device_type, :room_id, :rack_id, :protocol, :protocol_config, true)
ON CONFLICT (site_id, device_id) DO NOTHING
"""), {
"site_id": SITE_ID,
"device_id": s["device_id"],
"name": s["name"],
"device_type": s["device_type"],
"room_id": s.get("room_id"),
"rack_id": s.get("rack_id"),
"protocol": s["protocol"],
"protocol_config": json.dumps(s["protocol_config"]),
})
await session.commit()
logger.info(f"Seeded {len(SENSOR_SEED_DATA)} sensor devices")
async def seed_settings(session: AsyncSession) -> None:
for category, defaults in DEFAULT_SETTINGS.items():
result = await session.execute(text("""
SELECT COUNT(*) FROM site_settings
WHERE site_id = :s AND category = :cat AND key = 'config'
"""), {"s": SITE_ID, "cat": category})
if result.scalar() > 0:
continue
await session.execute(text("""
INSERT INTO site_settings (site_id, category, key, value)
VALUES (:site_id, :category, 'config', :value)
ON CONFLICT (site_id, category, key) DO NOTHING
"""), {"site_id": SITE_ID, "category": category, "value": json.dumps(defaults)})
await session.commit()
logger.info("Seeded site settings defaults")
async def run_all_seeds(session: AsyncSession) -> None:
await seed_thresholds(session)
await seed_sensors(session)
await seed_settings(session)

View file

@ -0,0 +1,35 @@
import json
import logging
from fastapi import WebSocket
logger = logging.getLogger(__name__)
class ConnectionManager:
def __init__(self) -> None:
self._connections: set[WebSocket] = set()
async def connect(self, ws: WebSocket) -> None:
await ws.accept()
self._connections.add(ws)
logger.info(f"WS client connected. Total: {len(self._connections)}")
def disconnect(self, ws: WebSocket) -> None:
self._connections.discard(ws)
logger.info(f"WS client disconnected. Total: {len(self._connections)}")
async def broadcast(self, data: dict) -> None:
if not self._connections:
return
message = json.dumps(data, default=str)
dead: set[WebSocket] = set()
for ws in self._connections:
try:
await ws.send_text(message)
except Exception:
dead.add(ws)
self._connections -= dead
# Singleton — imported by both the MQTT subscriber and the WS route
manager = ConnectionManager()