first commit

This commit is contained in:
mega 2026-03-19 11:32:17 +00:00
commit 4b98219bf7
144 changed files with 31561 additions and 0 deletions

View file

View file

@ -0,0 +1,82 @@
from fastapi import APIRouter, Depends, Query, HTTPException
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
@router.get("")
async def list_alarms(
site_id: str = Query(...),
state: str = Query("active", description="active | resolved | acknowledged | all"),
limit: int = Query(50, ge=1, le=200),
session: AsyncSession = Depends(get_session),
):
where = "WHERE site_id = :site_id"
if state != "all":
where += " AND state = :state"
result = await session.execute(text(f"""
SELECT id, sensor_id, site_id, room_id, rack_id,
severity, message, state, triggered_at,
acknowledged_at, resolved_at
FROM alarms
{where}
ORDER BY triggered_at DESC
LIMIT :limit
"""), {"site_id": site_id, "state": state, "limit": limit})
return [dict(r) for r in result.mappings().all()]
@router.post("/{alarm_id}/acknowledge")
async def acknowledge_alarm(
alarm_id: int,
session: AsyncSession = Depends(get_session),
):
result = await session.execute(text("""
UPDATE alarms
SET state = 'acknowledged', acknowledged_at = NOW()
WHERE id = :id AND state = 'active'
RETURNING id
"""), {"id": alarm_id})
await session.commit()
if not result.fetchone():
raise HTTPException(status_code=404, detail="Alarm not found or not active")
return {"id": alarm_id, "state": "acknowledged"}
@router.post("/{alarm_id}/resolve")
async def resolve_alarm(
alarm_id: int,
session: AsyncSession = Depends(get_session),
):
result = await session.execute(text("""
UPDATE alarms
SET state = 'resolved', resolved_at = NOW()
WHERE id = :id AND state IN ('active', 'acknowledged')
RETURNING id
"""), {"id": alarm_id})
await session.commit()
if not result.fetchone():
raise HTTPException(status_code=404, detail="Alarm not found or already resolved")
return {"id": alarm_id, "state": "resolved"}
@router.get("/stats")
async def alarm_stats(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
result = await session.execute(text("""
SELECT
COUNT(*) FILTER (WHERE state = 'active') AS active,
COUNT(*) FILTER (WHERE state = 'acknowledged') AS acknowledged,
COUNT(*) FILTER (WHERE state = 'resolved') AS resolved,
COUNT(*) FILTER (WHERE state = 'active' AND severity = 'critical') AS critical,
COUNT(*) FILTER (WHERE state = 'active' AND severity = 'warning') AS warning
FROM alarms
WHERE site_id = :site_id
"""), {"site_id": site_id})
row = result.mappings().one()
return {k: int(v) for k, v in row.items()}

View file

@ -0,0 +1,344 @@
import hashlib
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
# Mirrors the simulator topology — single source of truth for site layout
TOPOLOGY = {
"sg-01": {
"rooms": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
],
"ups_units": ["ups-01", "ups-02"],
"leak_sensors": ["leak-01"],
}
}
# ── Device catalog ────────────────────────────────────────────────────────────
# Each tuple: (name, u_height, power_draw_w)
_SERVERS = [
("Dell PowerEdge R750", 2, 420),
("HPE ProLiant DL380 Gen10 Plus", 2, 380),
("Supermicro SuperServer 2029P", 2, 350),
("Dell PowerEdge R650xs", 1, 280),
("HPE ProLiant DL360 Gen10 Plus", 1, 260),
]
_SWITCHES = [
("Cisco Catalyst C9300-48P", 1, 60),
("Arista 7050CX3-32S", 1, 180),
("Juniper EX4300-48T", 1, 75),
]
_PATCHES = [
("Leviton 24-Port Cat6A Patch Panel", 1, 5),
("Panduit 48-Port Cat6A Patch Panel", 1, 5),
]
_PDUS = [
("APC AP8888 Metered Rack PDU", 1, 10),
("Raritan PX3-5190R Metered PDU", 1, 10),
]
_STORAGE = [
("Dell EMC PowerVault ME5024", 2, 280),
("NetApp AFF C190", 2, 200),
]
_FIREWALL = [
("Palo Alto PA-5220", 2, 150),
("Fortinet FortiGate 3000F",2, 180),
]
_KVM = [("Raritan KX III-464", 1, 15)]
def _serial(rack_id: str, u: int) -> str:
return hashlib.md5(f"{rack_id}-u{u}".encode()).hexdigest()[:10].upper()
def _rack_seq(rack_id: str) -> int:
"""SG1A01.05 → 5, SG1A02.05 → 25, SG1B01.05 → 5"""
# Format: SG1A01.05 — row at [4:6], rack num after dot
row = int(rack_id[4:6]) # "01" or "02"
num = int(rack_id[7:]) # "01" to "20"
return (row - 1) * 20 + num
def _generate_devices(site_id: str, room_id: str, rack_id: str) -> list[dict]:
s = _rack_seq(rack_id)
room_oct = "1" if room_id == "hall-a" else "2"
devices: list[dict] = []
u = 1
def add(name: str, dtype: str, u_start: int, u_height: int, power_w: int, ip: str = "-"):
devices.append({
"device_id": f"{rack_id}-u{u_start:02d}",
"name": name,
"type": dtype,
"rack_id": rack_id,
"room_id": room_id,
"site_id": site_id,
"u_start": u_start,
"u_height": u_height,
"ip": ip,
"serial": _serial(rack_id, u_start),
"model": name,
"status": "online",
"power_draw_w": power_w,
})
# U1: Patch panel
p = _PATCHES[s % len(_PATCHES)]
add(p[0], "patch_panel", u, p[1], p[2]); u += p[1]
# U2: Switch
sw = _SWITCHES[s % len(_SWITCHES)]
add(sw[0], "switch", u, sw[1], sw[2], f"10.10.{room_oct}.{s}"); u += sw[1]
# KVM in rack 5 / 15
if s in (5, 15):
kvm = _KVM[0]
add(kvm[0], "kvm", u, kvm[1], kvm[2], f"10.10.{room_oct}.{s + 100}"); u += kvm[1]
# Firewall in first rack of each room
if rack_id in ("SG1A01.01", "SG1B01.01"):
fw = _FIREWALL[s % len(_FIREWALL)]
add(fw[0], "firewall", u, fw[1], fw[2], f"10.10.{room_oct}.254"); u += fw[1]
# Storage in rack 3 and 13
if s in (3, 13):
stor = _STORAGE[s % len(_STORAGE)]
add(stor[0], "storage", u, stor[1], stor[2], f"10.10.{room_oct}.{s + 50}"); u += stor[1]
# Servers filling U slots up to U41
srv_pool = (_SERVERS * 3)
ip_counter = (s - 1) * 15 + 10
for idx, (name, u_h, pwr) in enumerate(srv_pool):
if u + u_h > 41:
break
# Occasional empty gap for realism
if idx > 0 and (s + idx) % 8 == 0 and u + u_h + 1 <= 41:
u += 1
if u + u_h > 41:
break
add(name, "server", u, u_h, pwr, f"10.10.{room_oct}.{ip_counter}"); u += u_h
ip_counter += 1
# U42: PDU
pdu = _PDUS[s % len(_PDUS)]
add(pdu[0], "pdu", 42, pdu[1], pdu[2])
return devices
# ── Endpoints ─────────────────────────────────────────────────────────────────
@router.get("")
async def get_assets(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
site = TOPOLOGY.get(site_id)
if not site:
return {"site_id": site_id, "rooms": [], "ups_units": []}
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, room_id, rack_id, value
FROM readings
WHERE site_id = :site_id
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
readings = result.mappings().all()
alarm_result = await session.execute(text("""
SELECT rack_id, COUNT(*) AS cnt, MAX(severity) AS worst
FROM alarms
WHERE site_id = :site_id AND state = 'active' AND rack_id IS NOT NULL
GROUP BY rack_id
"""), {"site_id": site_id})
alarm_map: dict[str, tuple[int, str]] = {
r["rack_id"]: (int(r["cnt"]), r["worst"])
for r in alarm_result.mappings().all()
}
by_sensor: dict[str, float] = {r["sensor_id"]: float(r["value"]) for r in readings}
def rack_reading(site: str, room: str, rack: str, suffix: str) -> float | None:
return by_sensor.get(f"{site}/{room}/{rack}/{suffix}")
def cooling_reading(site: str, crac: str, suffix: str) -> float | None:
return by_sensor.get(f"{site}/cooling/{crac}/{suffix}")
def ups_reading(site: str, ups: str, suffix: str) -> float | None:
return by_sensor.get(f"{site}/power/{ups}/{suffix}")
rooms = []
for room in site["rooms"]:
room_id = room["room_id"]
crac_id = room["crac_id"]
supply = cooling_reading(site_id, crac_id, "supply_temp")
return_t = cooling_reading(site_id, crac_id, "return_temp")
fan = cooling_reading(site_id, crac_id, "fan_pct")
crac_has_data = any(sid.startswith(f"{site_id}/cooling/{crac_id}") for sid in by_sensor)
if supply is not None:
crac_state = "online"
elif crac_has_data:
crac_state = "fault"
else:
crac_state = "unknown"
racks = []
for rack_id in room["racks"]:
temp = rack_reading(site_id, room_id, rack_id, "temperature")
power = rack_reading(site_id, room_id, rack_id, "power_kw")
alarm_cnt, worst_sev = alarm_map.get(rack_id, (0, None))
status = "ok"
if worst_sev == "critical" or (temp is not None and temp >= 30):
status = "critical"
elif worst_sev == "warning" or (temp is not None and temp >= 26):
status = "warning"
elif temp is None and power is None:
status = "unknown"
racks.append({
"rack_id": rack_id,
"temp": round(temp, 1) if temp is not None else None,
"power_kw": round(power, 2) if power is not None else None,
"status": status,
"alarm_count": alarm_cnt,
})
rooms.append({
"room_id": room_id,
"crac": {
"crac_id": crac_id,
"state": crac_state,
"supply_temp": round(supply, 1) if supply is not None else None,
"return_temp": round(return_t, 1) if return_t is not None else None,
"fan_pct": round(fan, 1) if fan is not None else None,
},
"racks": racks,
})
ups_units = []
for ups_id in site["ups_units"]:
charge = ups_reading(site_id, ups_id, "charge_pct")
load = ups_reading(site_id, ups_id, "load_pct")
runtime = ups_reading(site_id, ups_id, "runtime_min")
state_raw = ups_reading(site_id, ups_id, "state")
if state_raw is not None:
state = "battery" if state_raw == 1.0 else "online"
elif charge is not None:
state = "battery" if charge < 20.0 else "online"
else:
state = "unknown"
ups_units.append({
"ups_id": ups_id,
"state": state,
"charge_pct": round(charge, 1) if charge is not None else None,
"load_pct": round(load, 1) if load is not None else None,
"runtime_min": round(runtime, 0) if runtime is not None else None,
})
return {"site_id": site_id, "rooms": rooms, "ups_units": ups_units}
@router.get("/devices")
async def get_all_devices(site_id: str = Query(...)):
"""All devices across all racks for the site."""
site = TOPOLOGY.get(site_id)
if not site:
return []
devices = []
for room in site["rooms"]:
for rack_id in room["racks"]:
devices.extend(_generate_devices(site_id, room["room_id"], rack_id))
return devices
@router.get("/rack-devices")
async def get_rack_devices(site_id: str = Query(...), rack_id: str = Query(...)):
"""Devices in a specific rack."""
site = TOPOLOGY.get(site_id)
if not site:
return []
for room in site["rooms"]:
if rack_id in room["racks"]:
return _generate_devices(site_id, room["room_id"], rack_id)
return []
@router.get("/pdus")
async def get_pdus(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Per-rack PDU live phase data."""
site = TOPOLOGY.get(site_id)
if not site:
return []
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, room_id, rack_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN (
'power_kw', 'pdu_phase_a_kw', 'pdu_phase_b_kw', 'pdu_phase_c_kw',
'pdu_phase_a_a', 'pdu_phase_b_a', 'pdu_phase_c_a', 'pdu_imbalance'
)
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
# Build per-rack dict keyed by rack_id
rack_data: dict[str, dict] = {}
for row in result.mappings().all():
rack_id = row["rack_id"]
if not rack_id:
continue
if rack_id not in rack_data:
rack_data[rack_id] = {"rack_id": rack_id, "room_id": row["room_id"]}
field_map = {
"power_kw": "total_kw",
"pdu_phase_a_kw": "phase_a_kw",
"pdu_phase_b_kw": "phase_b_kw",
"pdu_phase_c_kw": "phase_c_kw",
"pdu_phase_a_a": "phase_a_a",
"pdu_phase_b_a": "phase_b_a",
"pdu_phase_c_a": "phase_c_a",
"pdu_imbalance": "imbalance_pct",
}
field = field_map.get(row["sensor_type"])
if field:
rack_data[rack_id][field] = round(float(row["value"]), 2)
# Emit rows for every rack in topology order, filling in None for missing data
out = []
for room in site["rooms"]:
for rack_id in room["racks"]:
d = rack_data.get(rack_id, {"rack_id": rack_id, "room_id": room["room_id"]})
imb = d.get("imbalance_pct")
status = (
"critical" if imb is not None and imb >= 10
else "warning" if imb is not None and imb >= 5
else "ok"
)
out.append({
"rack_id": rack_id,
"room_id": d.get("room_id", room["room_id"]),
"total_kw": d.get("total_kw"),
"phase_a_kw": d.get("phase_a_kw"),
"phase_b_kw": d.get("phase_b_kw"),
"phase_c_kw": d.get("phase_c_kw"),
"phase_a_a": d.get("phase_a_a"),
"phase_b_a": d.get("phase_b_a"),
"phase_c_a": d.get("phase_c_a"),
"imbalance_pct": imb,
"status": status,
})
return out

View file

@ -0,0 +1,110 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
ROOMS = {
"sg-01": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
]
}
# Rated capacity config — would be per-asset configurable in production
RACK_POWER_CAPACITY_KW = 10.0 # max kW per rack
ROOM_POWER_CAPACITY_KW = 400.0 # 40 racks × 10 kW
CRAC_COOLING_CAPACITY_KW = 160.0 # rated cooling per CRAC
RACK_U_TOTAL = 42
@router.get("/summary")
async def capacity_summary(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Per-rack and per-room capacity: power used vs rated, cooling load vs rated, rack space."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
rack_id, room_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('power_kw', 'temperature')
AND rack_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
rows = result.mappings().all()
# Index: rack_id → {power_kw, temperature, room_id}
rack_idx: dict[str, dict] = {}
for row in rows:
rid = row["rack_id"]
if rid not in rack_idx:
rack_idx[rid] = {"room_id": row["room_id"]}
if row["sensor_type"] == "power_kw":
rack_idx[rid]["power_kw"] = round(float(row["value"]), 2)
elif row["sensor_type"] == "temperature":
rack_idx[rid]["temperature"] = round(float(row["value"]), 1)
rooms_out = []
racks_out = []
for room in ROOMS.get(site_id, []):
room_id = room["room_id"]
room_power = 0.0
populated = 0
for rack_id in room["racks"]:
d = rack_idx.get(rack_id, {})
power = d.get("power_kw")
temp = d.get("temperature")
if power is not None:
room_power += power
populated += 1
power_pct = round((power / RACK_POWER_CAPACITY_KW) * 100, 1) if power is not None else None
racks_out.append({
"rack_id": rack_id,
"room_id": room_id,
"power_kw": power,
"power_capacity_kw": RACK_POWER_CAPACITY_KW,
"power_pct": power_pct,
"temp": temp,
})
room_power = round(room_power, 2)
rooms_out.append({
"room_id": room_id,
"power": {
"used_kw": room_power,
"capacity_kw": ROOM_POWER_CAPACITY_KW,
"pct": round((room_power / ROOM_POWER_CAPACITY_KW) * 100, 1),
"headroom_kw": round(ROOM_POWER_CAPACITY_KW - room_power, 2),
},
"cooling": {
"load_kw": room_power, # IT power ≈ heat generated
"capacity_kw": CRAC_COOLING_CAPACITY_KW,
"pct": round(min(100.0, (room_power / CRAC_COOLING_CAPACITY_KW) * 100), 1),
"headroom_kw": round(max(0.0, CRAC_COOLING_CAPACITY_KW - room_power), 2),
},
"space": {
"racks_total": len(room["racks"]),
"racks_populated": populated,
"pct": round((populated / len(room["racks"])) * 100, 1),
},
})
return {
"site_id": site_id,
"config": {
"rack_power_kw": RACK_POWER_CAPACITY_KW,
"room_power_kw": ROOM_POWER_CAPACITY_KW,
"crac_cooling_kw": CRAC_COOLING_CAPACITY_KW,
"rack_u_total": RACK_U_TOTAL,
},
"rooms": rooms_out,
"racks": racks_out,
}

View file

@ -0,0 +1,131 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
CHILLERS = {"sg-01": ["chiller-01"]}
CHILLER_FIELD_MAP = {
"chiller_chw_supply": "chw_supply_c",
"chiller_chw_return": "chw_return_c",
"chiller_chw_delta": "chw_delta_c",
"chiller_flow_gpm": "flow_gpm",
"chiller_load_kw": "cooling_load_kw",
"chiller_load_pct": "cooling_load_pct",
"chiller_cop": "cop",
"chiller_comp_load": "compressor_load_pct",
"chiller_cond_press": "condenser_pressure_bar",
"chiller_evap_press": "evaporator_pressure_bar",
"chiller_cw_supply": "cw_supply_c",
"chiller_cw_return": "cw_return_c",
"chiller_run_hours": "run_hours",
}
@router.get("/status")
async def chiller_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest chiller plant readings."""
types_sql = ", ".join(f"'{t}'" for t in [*CHILLER_FIELD_MAP.keys(), "chiller_state"])
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
chiller_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
# sensor_id: {site}/{cooling/chiller}/{chiller_id}/{key} → parts[3]
if len(parts) < 4:
continue
chiller_id = parts[3]
if chiller_id not in chiller_data:
chiller_data[chiller_id] = {"chiller_id": chiller_id}
field = CHILLER_FIELD_MAP.get(row["sensor_type"])
if field:
chiller_data[chiller_id][field] = round(float(row["value"]), 2)
elif row["sensor_type"] == "chiller_state":
chiller_data[chiller_id]["state"] = "online" if float(row["value"]) > 0.5 else "fault"
out = []
for chiller_id in CHILLERS.get(site_id, []):
d = chiller_data.get(chiller_id, {"chiller_id": chiller_id, "state": "unknown"})
d.setdefault("state", "online")
out.append(d)
return out
@router.get("/history")
async def chiller_history(
site_id: str = Query(...),
chiller_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Time-series COP, load kW, and CHW temps for a chiller."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
METRICS = ("chiller_cop", "chiller_load_kw", "chiller_load_pct",
"chiller_chw_supply", "chiller_chw_return", "chiller_comp_load")
types_sql = ", ".join(f"'{t}'" for t in METRICS)
try:
result = await session.execute(text(f"""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 3) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/cooling/chiller/{chiller_id}/%",
"from_time": from_time})
except Exception:
result = await session.execute(text(f"""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 3) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/cooling/chiller/{chiller_id}/%",
"from_time": from_time})
bucket_map: dict[str, dict] = {}
for row in result.mappings().all():
b = str(row["bucket"])
if b not in bucket_map:
bucket_map[b] = {"bucket": b}
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
points = []
for b, vals in sorted(bucket_map.items()):
points.append({
"bucket": b,
"cop": vals.get("chiller_cop"),
"load_kw": vals.get("chiller_load_kw"),
"load_pct": vals.get("chiller_load_pct"),
"chw_supply_c": vals.get("chiller_chw_supply"),
"chw_return_c": vals.get("chiller_chw_return"),
"comp_load": vals.get("chiller_comp_load"),
})
return points

440
backend/api/routes/env.py Normal file
View file

@ -0,0 +1,440 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
ROOMS = {
"sg-01": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
]
}
@router.get("/rack-readings")
async def rack_env_readings(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest temperature and humidity per rack, grouped by room."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
rack_id, room_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('temperature', 'humidity')
AND rack_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
rows = result.mappings().all()
# Index by (rack_id, sensor_type)
data: dict[tuple, float] = {(r["rack_id"], r["sensor_type"]): float(r["value"]) for r in rows}
rooms = []
for room in ROOMS.get(site_id, []):
racks = []
for rack_id in room["racks"]:
temp = data.get((rack_id, "temperature"))
hum = data.get((rack_id, "humidity"))
racks.append({
"rack_id": rack_id,
"temperature": round(temp, 1) if temp is not None else None,
"humidity": round(hum, 1) if hum is not None else None,
})
rooms.append({"room_id": room["room_id"], "racks": racks})
return rooms
@router.get("/humidity-history")
async def humidity_history(
site_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Average humidity per room bucketed by 5 minutes."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 1) AS avg_humidity
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'humidity'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 1) AS avg_humidity
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'humidity'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
return [dict(r) for r in result.mappings().all()]
# All CRAC sensor types stored in the readings table
CRAC_SENSOR_TYPES = (
"cooling_supply", "cooling_return", "cooling_fan",
"cooling_supply_hum", "cooling_return_hum", "cooling_airflow", "cooling_filter_dp",
"cooling_cap_kw", "cooling_cap_pct", "cooling_cop", "cooling_shr",
"cooling_comp_state", "cooling_comp_load", "cooling_comp_power", "cooling_comp_hours",
"cooling_high_press", "cooling_low_press", "cooling_superheat", "cooling_subcooling",
"cooling_fan_rpm", "cooling_fan_power", "cooling_fan_hours",
"cooling_unit_power", "cooling_voltage", "cooling_current", "cooling_pf",
)
# sensor_type → response field name
CRAC_FIELD_MAP = {
"cooling_supply": "supply_temp",
"cooling_return": "return_temp",
"cooling_fan": "fan_pct",
"cooling_supply_hum": "supply_humidity",
"cooling_return_hum": "return_humidity",
"cooling_airflow": "airflow_cfm",
"cooling_filter_dp": "filter_dp_pa",
"cooling_cap_kw": "cooling_capacity_kw",
"cooling_cap_pct": "cooling_capacity_pct",
"cooling_cop": "cop",
"cooling_shr": "sensible_heat_ratio",
"cooling_comp_state": "compressor_state",
"cooling_comp_load": "compressor_load_pct",
"cooling_comp_power": "compressor_power_kw",
"cooling_comp_hours": "compressor_run_hours",
"cooling_high_press": "high_pressure_bar",
"cooling_low_press": "low_pressure_bar",
"cooling_superheat": "discharge_superheat_c",
"cooling_subcooling": "liquid_subcooling_c",
"cooling_fan_rpm": "fan_rpm",
"cooling_fan_power": "fan_power_kw",
"cooling_fan_hours": "fan_run_hours",
"cooling_unit_power": "total_unit_power_kw",
"cooling_voltage": "input_voltage_v",
"cooling_current": "input_current_a",
"cooling_pf": "power_factor",
}
RATED_CAPACITY_KW = 80.0
@router.get("/crac-status")
async def crac_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest CRAC readings — full sensor set."""
types_sql = ", ".join(f"'{t}'" for t in CRAC_SENSOR_TYPES)
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
crac_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
crac_id = parts[2]
if crac_id not in crac_data:
crac_data[crac_id] = {"crac_id": crac_id}
field = CRAC_FIELD_MAP.get(row["sensor_type"])
if field:
crac_data[crac_id][field] = round(float(row["value"]), 3)
room_map = {room["crac_id"]: room["room_id"] for room in ROOMS.get(site_id, [])}
result_list = []
for crac_id, d in sorted(crac_data.items()):
supply = d.get("supply_temp")
ret = d.get("return_temp")
delta = round(ret - supply, 1) if (ret is not None and supply is not None) else None
state = "online" if supply is not None else "fault"
result_list.append({
"crac_id": crac_id,
"room_id": room_map.get(crac_id),
"state": state,
"delta": delta,
"rated_capacity_kw": RATED_CAPACITY_KW,
**{k: round(v, 2) if isinstance(v, float) else v for k, v in d.items() if k != "crac_id"},
})
# Surface CRACs with no recent readings as faulted
known = set(crac_data.keys())
for room in ROOMS.get(site_id, []):
if room["crac_id"] not in known:
result_list.append({
"crac_id": room["crac_id"],
"room_id": room["room_id"],
"state": "fault",
"delta": None,
"rated_capacity_kw": RATED_CAPACITY_KW,
})
return sorted(result_list, key=lambda x: x["crac_id"])
@router.get("/crac-history")
async def crac_history(
site_id: str = Query(...),
crac_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Time-series history for a single CRAC unit — capacity, COP, compressor load, filter ΔP, temps."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
METRICS = (
"cooling_supply", "cooling_return", "cooling_cap_kw",
"cooling_cap_pct", "cooling_cop", "cooling_comp_load",
"cooling_filter_dp", "cooling_fan",
)
types_sql = ", ".join(f"'{t}'" for t in METRICS)
try:
result = await session.execute(text(f"""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 3) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
except Exception:
result = await session.execute(text(f"""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 3) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
bucket_map: dict[str, dict] = {}
for row in result.mappings().all():
b = str(row["bucket"])
if b not in bucket_map:
bucket_map[b] = {"bucket": b}
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
points = []
for b, vals in sorted(bucket_map.items()):
supply = vals.get("cooling_supply")
ret = vals.get("cooling_return")
points.append({
"bucket": b,
"supply_temp": round(supply, 1) if supply is not None else None,
"return_temp": round(ret, 1) if ret is not None else None,
"delta_t": round(ret - supply, 1) if (supply is not None and ret is not None) else None,
"capacity_kw": vals.get("cooling_cap_kw"),
"capacity_pct": vals.get("cooling_cap_pct"),
"cop": vals.get("cooling_cop"),
"comp_load": vals.get("cooling_comp_load"),
"filter_dp": vals.get("cooling_filter_dp"),
"fan_pct": vals.get("cooling_fan"),
})
return points
@router.get("/crac-delta-history")
async def crac_delta_history(
site_id: str = Query(...),
crac_id: str = Query(...),
hours: int = Query(1, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""ΔT (return - supply) over time for a single CRAC unit."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
AVG(value) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ('cooling_supply', 'cooling_return')
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
AVG(value) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ('cooling_supply', 'cooling_return')
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
rows = result.mappings().all()
bucket_map: dict[str, dict] = {}
for row in rows:
b = str(row["bucket"])
if b not in bucket_map:
bucket_map[b] = {"bucket": b}
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
points = []
for b, vals in bucket_map.items():
supply = vals.get("cooling_supply")
ret = vals.get("cooling_return")
if supply is not None and ret is not None:
points.append({"bucket": b, "delta": round(ret - supply, 2)})
return sorted(points, key=lambda x: x["bucket"])
@router.get("/rack-history")
async def rack_history(
site_id: str = Query(...),
rack_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Temperature and power history for a single rack."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_value
FROM readings
WHERE site_id = :site_id
AND rack_id = :rack_id
AND sensor_type IN ('temperature', 'humidity', 'power_kw')
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "rack_id": rack_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_value
FROM readings
WHERE site_id = :site_id
AND rack_id = :rack_id
AND sensor_type IN ('temperature', 'humidity', 'power_kw')
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id, "rack_id": rack_id, "from_time": from_time})
rows = result.mappings().all()
# Pivot into {bucket, temperature, humidity, power_kw}
bucket_map: dict[str, dict] = {}
for row in rows:
b = str(row["bucket"])
if b not in bucket_map:
bucket_map[b] = {"bucket": b}
bucket_map[b][row["sensor_type"]] = float(row["avg_value"])
# Fetch active alarms for this rack
alarms = await session.execute(text("""
SELECT id, severity, message, state, triggered_at
FROM alarms
WHERE site_id = :site_id AND rack_id = :rack_id
ORDER BY triggered_at DESC
LIMIT 10
"""), {"site_id": site_id, "rack_id": rack_id})
return {
"rack_id": rack_id,
"site_id": site_id,
"history": list(bucket_map.values()),
"alarms": [dict(r) for r in alarms.mappings().all()],
}
@router.get("/particles")
async def particle_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest particle counts per room."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
room_id, sensor_type, value, recorded_at
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('particles_0_5um', 'particles_5um')
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
room_data: dict[str, dict] = {}
for row in result.mappings().all():
rid = row["room_id"]
if rid not in room_data:
room_data[rid] = {}
room_data[rid][row["sensor_type"]] = round(float(row["value"]))
rooms_cfg = ROOMS.get(site_id, [])
out = []
for room in rooms_cfg:
rid = room["room_id"]
d = room_data.get(rid, {})
p05 = d.get("particles_0_5um")
p5 = d.get("particles_5um")
# Derive ISO 14644-1 class (simplified: class 8 = 3.52M @ 0.5µm)
iso_class = None
if p05 is not None:
if p05 <= 10_000: iso_class = 5
elif p05 <= 100_000: iso_class = 6
elif p05 <= 1_000_000: iso_class = 7
elif p05 <= 3_520_000: iso_class = 8
else: iso_class = 9
out.append({
"room_id": rid,
"particles_0_5um": p05,
"particles_5um": p5,
"iso_class": iso_class,
})
return out

View file

@ -0,0 +1,75 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
VESDA_ZONES = {
"sg-01": [
{"zone_id": "vesda-hall-a", "room_id": "hall-a"},
{"zone_id": "vesda-hall-b", "room_id": "hall-b"},
]
}
LEVEL_MAP = {0: "normal", 1: "alert", 2: "action", 3: "fire"}
VESDA_TYPES = ("vesda_level", "vesda_obscuration", "vesda_det1", "vesda_det2",
"vesda_power", "vesda_flow")
@router.get("/status")
async def fire_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest VESDA readings per fire zone."""
types_sql = ", ".join(f"'{t}'" for t in VESDA_TYPES)
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '2 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
zone_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
zone_id = parts[2]
if zone_id not in zone_data:
zone_data[zone_id] = {"zone_id": zone_id}
v = float(row["value"])
s_type = row["sensor_type"]
if s_type == "vesda_level":
zone_data[zone_id]["level"] = LEVEL_MAP.get(round(v), "normal")
elif s_type == "vesda_obscuration":
zone_data[zone_id]["obscuration_pct_m"] = round(v, 3)
elif s_type == "vesda_det1":
zone_data[zone_id]["detector_1_ok"] = v > 0.5
elif s_type == "vesda_det2":
zone_data[zone_id]["detector_2_ok"] = v > 0.5
elif s_type == "vesda_power":
zone_data[zone_id]["power_ok"] = v > 0.5
elif s_type == "vesda_flow":
zone_data[zone_id]["flow_ok"] = v > 0.5
zone_room_map = {z["zone_id"]: z["room_id"] for z in VESDA_ZONES.get(site_id, [])}
out = []
for zone_cfg in VESDA_ZONES.get(site_id, []):
zone_id = zone_cfg["zone_id"]
d = zone_data.get(zone_id, {"zone_id": zone_id})
d.setdefault("level", "normal")
d.setdefault("obscuration_pct_m", None)
d.setdefault("detector_1_ok", True)
d.setdefault("detector_2_ok", True)
d.setdefault("power_ok", True)
d.setdefault("flow_ok", True)
d["room_id"] = zone_room_map.get(zone_id)
out.append(d)
return out

View file

@ -0,0 +1,33 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import text
from core.database import get_session
router = APIRouter()
@router.get("")
async def get_floor_layout(site_id: str, db: AsyncSession = Depends(get_session)):
row = await db.execute(
text("SELECT value FROM site_config WHERE site_id = :site_id AND key = 'floor_layout'"),
{"site_id": site_id},
)
result = row.fetchone()
if result is None:
raise HTTPException(status_code=404, detail="No floor layout saved for this site")
return result[0]
@router.put("")
async def save_floor_layout(site_id: str, layout: dict, db: AsyncSession = Depends(get_session)):
await db.execute(
text("""
INSERT INTO site_config (site_id, key, value, updated_at)
VALUES (:site_id, 'floor_layout', CAST(:value AS jsonb), NOW())
ON CONFLICT (site_id, key)
DO UPDATE SET value = EXCLUDED.value, updated_at = NOW()
"""),
{"site_id": site_id, "value": __import__("json").dumps(layout)},
)
await db.commit()
return {"ok": True}

View file

@ -0,0 +1,138 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
GENERATORS = {"sg-01": ["gen-01"]}
GEN_FIELD_MAP = {
"gen_fuel_pct": "fuel_pct",
"gen_fuel_l": "fuel_litres",
"gen_fuel_rate": "fuel_rate_lph",
"gen_load_kw": "load_kw",
"gen_load_pct": "load_pct",
"gen_run_hours": "run_hours",
"gen_voltage_v": "voltage_v",
"gen_freq_hz": "frequency_hz",
"gen_rpm": "engine_rpm",
"gen_oil_press": "oil_pressure_bar",
"gen_coolant_c": "coolant_temp_c",
"gen_exhaust_c": "exhaust_temp_c",
"gen_alt_temp_c": "alternator_temp_c",
"gen_pf": "power_factor",
"gen_batt_v": "battery_v",
}
STATE_MAP = {-1.0: "fault", 0.0: "standby", 1.0: "running", 2.0: "test"}
@router.get("/status")
async def generator_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest reading for each generator."""
types_sql = ", ".join(f"'{t}'" for t in [*GEN_FIELD_MAP.keys(), "gen_state"])
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
gen_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
gen_id = parts[2]
if gen_id not in gen_data:
gen_data[gen_id] = {"gen_id": gen_id}
field = GEN_FIELD_MAP.get(row["sensor_type"])
if field:
gen_data[gen_id][field] = round(float(row["value"]), 2)
elif row["sensor_type"] == "gen_state":
v = round(float(row["value"]))
gen_data[gen_id]["state"] = STATE_MAP.get(v, "standby")
out = []
for gen_id in GENERATORS.get(site_id, []):
d = gen_data.get(gen_id, {"gen_id": gen_id, "state": "unknown"})
if "state" not in d:
d["state"] = "standby"
out.append(d)
return out
HISTORY_METRICS = (
"gen_load_pct", "gen_fuel_pct", "gen_coolant_c",
"gen_exhaust_c", "gen_freq_hz", "gen_alt_temp_c",
)
@router.get("/history")
async def generator_history(
site_id: str = Query(...),
gen_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""5-minute bucketed time-series for a single generator."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
types_sql = ", ".join(f"'{t}'" for t in HISTORY_METRICS)
try:
result = await session.execute(text(f"""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/generator/{gen_id}/%",
"from_time": from_time})
except Exception:
result = await session.execute(text(f"""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/generator/{gen_id}/%",
"from_time": from_time})
# Pivot: bucket → {metric: value}
buckets: dict[str, dict] = {}
for row in result.mappings().all():
b = row["bucket"].isoformat()
buckets.setdefault(b, {"bucket": b})
key_map = {
"gen_load_pct": "load_pct",
"gen_fuel_pct": "fuel_pct",
"gen_coolant_c": "coolant_temp_c",
"gen_exhaust_c": "exhaust_temp_c",
"gen_freq_hz": "frequency_hz",
"gen_alt_temp_c":"alternator_temp_c",
}
field = key_map.get(row["sensor_type"])
if field:
buckets[b][field] = float(row["avg_val"])
return list(buckets.values())

View file

@ -0,0 +1,13 @@
from fastapi import APIRouter
from datetime import datetime, timezone
router = APIRouter()
@router.get("/health")
async def health_check():
return {
"status": "ok",
"service": "DemoBMS API",
"timestamp": datetime.now(timezone.utc).isoformat(),
}

View file

@ -0,0 +1,57 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
# Static topology metadata — mirrors simulator config
LEAK_SENSORS = {
"sg-01": [
{"sensor_id": "leak-01", "floor_zone": "crac-zone-a", "under_floor": True, "near_crac": True, "room_id": "hall-a"},
{"sensor_id": "leak-02", "floor_zone": "server-row-b1", "under_floor": True, "near_crac": False, "room_id": "hall-b"},
{"sensor_id": "leak-03", "floor_zone": "ups-room", "under_floor": False, "near_crac": False, "room_id": None},
]
}
@router.get("/status")
async def leak_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest state for all leak sensors, enriched with location metadata."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, value, recorded_at
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'leak'
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
# sensor_id format: {site_id}/leak/{sensor_id}
state_map: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
sid = parts[2]
state_map[sid] = {
"state": "detected" if float(row["value"]) > 0.5 else "clear",
"recorded_at": str(row["recorded_at"]),
}
out = []
for cfg in LEAK_SENSORS.get(site_id, []):
sid = cfg["sensor_id"]
entry = {**cfg}
if sid in state_map:
entry["state"] = state_map[sid]["state"]
entry["recorded_at"] = state_map[sid]["recorded_at"]
else:
entry["state"] = "unknown"
entry["recorded_at"] = None
out.append(entry)
return out

View file

@ -0,0 +1,77 @@
import uuid
from datetime import datetime, timezone
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
router = APIRouter()
# In-memory store (demo — resets on restart)
_windows: list[dict] = []
class WindowCreate(BaseModel):
site_id: str
title: str
target: str # "all", a room_id like "hall-a", or a rack_id like "rack-A01"
target_label: str # human-readable label
start_dt: str # ISO 8601
end_dt: str # ISO 8601
suppress_alarms: bool = True
notes: str = ""
def _window_status(w: dict) -> str:
now = datetime.now(timezone.utc).isoformat()
if w["end_dt"] < now:
return "expired"
if w["start_dt"] <= now:
return "active"
return "scheduled"
@router.get("")
async def list_windows(site_id: str = "sg-01"):
return [
{**w, "status": _window_status(w)}
for w in _windows
if w["site_id"] == site_id
]
@router.post("", status_code=201)
async def create_window(body: WindowCreate):
window = {
"id": str(uuid.uuid4())[:8],
"site_id": body.site_id,
"title": body.title,
"target": body.target,
"target_label": body.target_label,
"start_dt": body.start_dt,
"end_dt": body.end_dt,
"suppress_alarms": body.suppress_alarms,
"notes": body.notes,
"created_at": datetime.now(timezone.utc).isoformat(),
}
_windows.append(window)
return {**window, "status": _window_status(window)}
@router.delete("/{window_id}", status_code=204)
async def delete_window(window_id: str):
global _windows
before = len(_windows)
_windows = [w for w in _windows if w["id"] != window_id]
if len(_windows) == before:
raise HTTPException(status_code=404, detail="Window not found")
@router.get("/active")
async def active_windows(site_id: str = "sg-01"):
"""Returns only currently active windows — used by alarm page for suppression check."""
now = datetime.now(timezone.utc).isoformat()
return [
w for w in _windows
if w["site_id"] == site_id
and w["start_dt"] <= now <= w["end_dt"]
and w["suppress_alarms"]
]

View file

@ -0,0 +1,69 @@
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
SWITCHES = {
"sg-01": [
{"switch_id": "sw-core-01", "name": "Core Switch — Hall A", "model": "Cisco Catalyst C9300-48P", "room_id": "hall-a", "rack_id": "SG1A01.01", "port_count": 48, "role": "core"},
{"switch_id": "sw-core-02", "name": "Core Switch — Hall B", "model": "Arista 7050CX3-32S", "room_id": "hall-b", "rack_id": "SG1B01.01", "port_count": 32, "role": "core"},
{"switch_id": "sw-edge-01", "name": "Edge / Uplink Switch", "model": "Juniper EX4300-48T", "room_id": "hall-a", "rack_id": "SG1A01.05", "port_count": 48, "role": "edge"},
]
}
NET_FIELD_MAP = {
"net_uptime_s": "uptime_s",
"net_active_ports": "active_ports",
"net_bw_in_mbps": "bandwidth_in_mbps",
"net_bw_out_mbps": "bandwidth_out_mbps",
"net_cpu_pct": "cpu_pct",
"net_mem_pct": "mem_pct",
"net_temp_c": "temperature_c",
"net_pkt_loss_pct": "packet_loss_pct",
}
STATE_MAP = {0.0: "up", 1.0: "degraded", 2.0: "down"}
@router.get("/status")
async def network_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest reading for each network switch."""
types_sql = ", ".join(f"'{t}'" for t in [*NET_FIELD_MAP.keys(), "net_state"])
result = await session.execute(text(f"""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ({types_sql})
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
sw_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
sw_id = parts[2]
if sw_id not in sw_data:
sw_data[sw_id] = {}
field = NET_FIELD_MAP.get(row["sensor_type"])
if field:
sw_data[sw_id][field] = round(float(row["value"]), 2)
elif row["sensor_type"] == "net_state":
v = round(float(row["value"]))
sw_data[sw_id]["state"] = STATE_MAP.get(v, "unknown")
out = []
for sw_cfg in SWITCHES.get(site_id, []):
sw_id = sw_cfg["switch_id"]
d = {**sw_cfg, **sw_data.get(sw_id, {})}
if "state" not in d:
d["state"] = "unknown"
out.append(d)
return out

460
backend/api/routes/power.py Normal file
View file

@ -0,0 +1,460 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
# Topology — mirrors simulator config
ROOMS = {
"sg-01": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)]},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)]},
]
}
ATS_UNITS = {"sg-01": ["ats-01"]}
GENERATORS = {"sg-01": ["gen-01"]}
ACTIVE_FEED_MAP = {0.0: "utility-a", 1.0: "utility-b", 2.0: "generator"}
# Singapore commercial electricity tariff (SGD / kWh, approximate)
TARIFF_SGD_KWH = 0.298
@router.get("/rack-breakdown")
async def rack_power_breakdown(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest kW reading per rack, grouped by room."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
rack_id, room_id, value AS power_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND rack_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
rows = result.mappings().all()
rack_map: dict[str, dict] = {r["rack_id"]: dict(r) for r in rows}
rooms = []
for room in ROOMS.get(site_id, []):
racks = []
for rack_id in room["racks"]:
reading = rack_map.get(rack_id)
racks.append({
"rack_id": rack_id,
"power_kw": round(float(reading["power_kw"]), 2) if reading else None,
})
rooms.append({"room_id": room["room_id"], "racks": racks})
return rooms
@router.get("/room-history")
async def room_power_history(
site_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Total power per room bucketed by 5 minutes — for a multi-line trend chart."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(SUM(avg_per_rack)::numeric, 1) AS total_kw
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(SUM(avg_per_rack)::numeric, 1) AS total_kw
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
return [dict(r) for r in result.mappings().all()]
@router.get("/ups")
async def ups_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest UPS readings."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('ups_charge', 'ups_load', 'ups_runtime', 'ups_state', 'ups_voltage')
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
rows = result.mappings().all()
# sensor_id format: sg-01/power/ups-01/charge_pct
ups_data: dict[str, dict] = {}
for row in rows:
parts = row["sensor_id"].split("/")
if len(parts) < 3:
continue
ups_id = parts[2]
if ups_id not in ups_data:
ups_data[ups_id] = {"ups_id": ups_id}
key_map = {
"ups_charge": "charge_pct",
"ups_load": "load_pct",
"ups_runtime": "runtime_min",
"ups_state": "_state_raw",
"ups_voltage": "voltage_v",
}
field = key_map.get(row["sensor_type"])
if field:
ups_data[ups_id][field] = round(float(row["value"]), 1)
STATE_MAP = {0.0: "online", 1.0: "battery", 2.0: "overload"}
result_list = []
for ups_id, d in sorted(ups_data.items()):
# Use stored state if available; fall back to charge heuristic only if state never arrived
state_raw = d.get("_state_raw")
if state_raw is not None:
state = STATE_MAP.get(round(state_raw), "online")
else:
charge = d.get("charge_pct")
state = "battery" if (charge is not None and charge < 20.0) else "online"
result_list.append({
"ups_id": ups_id,
"state": state,
"charge_pct": d.get("charge_pct"),
"load_pct": d.get("load_pct"),
"runtime_min": d.get("runtime_min"),
"voltage_v": d.get("voltage_v"),
})
return result_list
@router.get("/ups/history")
async def ups_history(
site_id: str = Query(...),
ups_id: str = Query(...),
hours: int = Query(6, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""5-minute bucketed trend for a single UPS: charge, load, runtime, voltage."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
types_sql = "'ups_charge', 'ups_load', 'ups_runtime', 'ups_voltage'"
try:
result = await session.execute(text(f"""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/power/{ups_id}/%",
"from_time": from_time})
except Exception:
result = await session.execute(text(f"""
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_type,
ROUND(AVG(value)::numeric, 2) AS avg_val
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type IN ({types_sql})
AND recorded_at > :from_time
GROUP BY bucket, sensor_type
ORDER BY bucket ASC
"""), {"site_id": site_id,
"pattern": f"{site_id}/power/{ups_id}/%",
"from_time": from_time})
KEY_MAP = {
"ups_charge": "charge_pct",
"ups_load": "load_pct",
"ups_runtime": "runtime_min",
"ups_voltage": "voltage_v",
}
buckets: dict[str, dict] = {}
for row in result.mappings().all():
b = row["bucket"].isoformat()
buckets.setdefault(b, {"bucket": b})
field = KEY_MAP.get(row["sensor_type"])
if field:
buckets[b][field] = float(row["avg_val"])
return list(buckets.values())
@router.get("/ats")
async def ats_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Latest ATS transfer switch readings."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('ats_active', 'ats_state', 'ats_xfer_count',
'ats_xfer_ms', 'ats_ua_v', 'ats_ub_v', 'ats_gen_v')
AND recorded_at > NOW() - INTERVAL '2 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
ats_data: dict[str, dict] = {}
for row in result.mappings().all():
parts = row["sensor_id"].split("/")
# sensor_id: {site}/power/ats/{ats_id}/{key} → parts[3]
if len(parts) < 4:
continue
ats_id = parts[3]
if ats_id not in ats_data:
ats_data[ats_id] = {"ats_id": ats_id}
v = float(row["value"])
s_type = row["sensor_type"]
if s_type == "ats_active":
ats_data[ats_id]["active_feed"] = ACTIVE_FEED_MAP.get(round(v), "utility-a")
elif s_type == "ats_state":
ats_data[ats_id]["state"] = "transferring" if v > 0.5 else "stable"
elif s_type == "ats_xfer_count":
ats_data[ats_id]["transfer_count"] = int(v)
elif s_type == "ats_xfer_ms":
ats_data[ats_id]["last_transfer_ms"] = round(v, 0) if v > 0 else None
elif s_type == "ats_ua_v":
ats_data[ats_id]["utility_a_v"] = round(v, 1)
elif s_type == "ats_ub_v":
ats_data[ats_id]["utility_b_v"] = round(v, 1)
elif s_type == "ats_gen_v":
ats_data[ats_id]["generator_v"] = round(v, 1)
out = []
for ats_id in ATS_UNITS.get(site_id, []):
d = ats_data.get(ats_id, {"ats_id": ats_id})
d.setdefault("state", "stable")
d.setdefault("active_feed", "utility-a")
d.setdefault("transfer_count", 0)
d.setdefault("last_transfer_ms", None)
out.append(d)
return out
@router.get("/phase")
async def pdu_phase_breakdown(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Per-phase kW, amps, and imbalance % for every rack PDU."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
rack_id, room_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('pdu_phase_a_kw', 'pdu_phase_b_kw', 'pdu_phase_c_kw',
'pdu_phase_a_a', 'pdu_phase_b_a', 'pdu_phase_c_a',
'pdu_imbalance')
AND rack_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
FIELD_MAP = {
"pdu_phase_a_kw": "phase_a_kw",
"pdu_phase_b_kw": "phase_b_kw",
"pdu_phase_c_kw": "phase_c_kw",
"pdu_phase_a_a": "phase_a_a",
"pdu_phase_b_a": "phase_b_a",
"pdu_phase_c_a": "phase_c_a",
"pdu_imbalance": "imbalance_pct",
}
rack_map: dict[tuple, float] = {}
rack_rooms: dict[str, str] = {}
for row in result.mappings().all():
rack_id = row["rack_id"]
room_id = row["room_id"]
s_type = row["sensor_type"]
if rack_id:
rack_map[(rack_id, s_type)] = round(float(row["value"]), 2)
if room_id:
rack_rooms[rack_id] = room_id
rooms = []
for room in ROOMS.get(site_id, []):
racks = []
for rack_id in room["racks"]:
entry: dict = {"rack_id": rack_id, "room_id": room["room_id"]}
for s_type, field in FIELD_MAP.items():
entry[field] = rack_map.get((rack_id, s_type))
racks.append(entry)
rooms.append({"room_id": room["room_id"], "racks": racks})
return rooms
@router.get("/redundancy")
async def power_redundancy(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Compute power redundancy level: 2N, N+1, or N."""
# Count UPS units online
ups_result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'ups_charge'
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
ups_rows = ups_result.mappings().all()
ups_online = len([r for r in ups_rows if float(r["value"]) > 10.0])
ups_total = len(ups_rows)
# ATS active feed
ats_result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'ats_active'
AND recorded_at > NOW() - INTERVAL '2 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
ats_rows = ats_result.mappings().all()
ats_active_feed = None
if ats_rows:
ats_active_feed = ACTIVE_FEED_MAP.get(round(float(ats_rows[0]["value"])), "utility-a")
# Generator available (not fault)
gen_result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'gen_state'
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
gen_rows = gen_result.mappings().all()
gen_available = len([r for r in gen_rows if float(r["value"]) >= 0.0]) > 0
# Derive level
if ups_total >= 2 and ups_online >= 2 and gen_available:
level = "2N"
elif ups_online >= 1 and gen_available:
level = "N+1"
else:
level = "N"
return {
"site_id": site_id,
"level": level,
"ups_total": ups_total,
"ups_online": ups_online,
"generator_ok": gen_available,
"ats_active_feed": ats_active_feed,
"notes": (
"Dual UPS + generator = 2N" if level == "2N" else
"Single path active — reduced redundancy" if level == "N" else
"N+1 — one redundant path available"
),
}
@router.get("/utility")
async def utility_power(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Current total IT load and estimated monthly energy cost."""
# Latest total IT load
kw_result = await session.execute(text("""
SELECT ROUND(SUM(value)::numeric, 2) AS total_kw
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
"""), {"site_id": site_id})
kw_row = kw_result.mappings().first()
total_kw = float(kw_row["total_kw"] or 0) if kw_row else 0.0
# Estimated month-to-date kWh (from readings since start of month)
from_month = datetime.now(timezone.utc).replace(day=1, hour=0, minute=0, second=0, microsecond=0)
kwh_result = await session.execute(text("""
SELECT ROUND((SUM(value) * 5.0 / 60.0)::numeric, 1) AS kwh_mtd
FROM (
SELECT DISTINCT ON (sensor_id, date_trunc('minute', recorded_at))
sensor_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_month
ORDER BY sensor_id, date_trunc('minute', recorded_at), recorded_at DESC
) bucketed
"""), {"site_id": site_id, "from_month": from_month})
kwh_row = kwh_result.mappings().first()
kwh_mtd = float(kwh_row["kwh_mtd"] or 0) if kwh_row else 0.0
cost_mtd = round(kwh_mtd * TARIFF_SGD_KWH, 2)
# Annualised from month-to-date pace
now = datetime.now(timezone.utc)
day_of_month = now.day
days_in_month = 30
if day_of_month > 0:
kwh_annual_est = round(kwh_mtd / day_of_month * 365, 0)
cost_annual_est = round(kwh_annual_est * TARIFF_SGD_KWH, 2)
else:
kwh_annual_est = 0.0
cost_annual_est = 0.0
return {
"site_id": site_id,
"total_kw": total_kw,
"tariff_sgd_kwh": TARIFF_SGD_KWH,
"kwh_month_to_date": kwh_mtd,
"cost_sgd_mtd": cost_mtd,
"kwh_annual_est": kwh_annual_est,
"cost_sgd_annual_est": cost_annual_est,
"currency": "SGD",
}

View file

@ -0,0 +1,229 @@
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
@router.get("/latest")
async def get_latest_readings(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Most recent reading per sensor for a site (last 10 minutes)."""
result = await session.execute(text("""
SELECT DISTINCT ON (sensor_id)
sensor_id, sensor_type, site_id, room_id, rack_id, value, unit, recorded_at
FROM readings
WHERE site_id = :site_id
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
"""), {"site_id": site_id})
return [dict(r) for r in result.mappings().all()]
@router.get("/kpis")
async def get_site_kpis(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Aggregate KPIs for the overview dashboard."""
power = await session.execute(text("""
SELECT COALESCE(SUM(value), 0) AS total_power_kw
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, value
FROM readings
WHERE site_id = :site_id AND sensor_type = 'power_kw'
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
"""), {"site_id": site_id})
temp = await session.execute(text("""
SELECT COALESCE(AVG(value), 0) AS avg_temp
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, value
FROM readings
WHERE site_id = :site_id AND sensor_type = 'temperature'
AND recorded_at > NOW() - INTERVAL '5 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
"""), {"site_id": site_id})
alarms = await session.execute(text("""
SELECT COUNT(*) AS alarm_count
FROM alarms
WHERE site_id = :site_id AND state = 'active'
"""), {"site_id": site_id})
total_kw = float(power.mappings().one()["total_power_kw"])
avg_temp = float(temp.mappings().one()["avg_temp"])
alarm_cnt = int(alarms.mappings().one()["alarm_count"])
pue = round(total_kw / (total_kw * 0.87), 2) if total_kw > 0 else 0.0
return {
"total_power_kw": round(total_kw, 1),
"pue": pue,
"avg_temperature": round(avg_temp, 1),
"active_alarms": alarm_cnt,
}
@router.get("/site-power-history")
async def get_site_power_history(
site_id: str = Query(...),
hours: int = Query(1, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Total power (kW) bucketed by 5 minutes — for the power trend chart."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT bucket, ROUND(SUM(avg_per_sensor)::numeric, 1) AS total_kw
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_id,
AVG(value) AS avg_per_sensor
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY bucket, sensor_id
) per_sensor
GROUP BY bucket
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT bucket, ROUND(SUM(avg_per_sensor)::numeric, 1) AS total_kw
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_id,
AVG(value) AS avg_per_sensor
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY bucket, sensor_id
) per_sensor
GROUP BY bucket
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
return [dict(r) for r in result.mappings().all()]
@router.get("/room-temp-history")
async def get_room_temp_history(
site_id: str = Query(...),
hours: int = Query(1, ge=1, le=24),
session: AsyncSession = Depends(get_session),
):
"""Average temperature per room bucketed by 5 minutes — for the temp trend chart."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 2) AS avg_temp
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 2) AS avg_temp
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
sensor_id, room_id,
AVG(value) AS avg_per_rack
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, sensor_id, room_id
) per_rack
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
return [dict(r) for r in result.mappings().all()]
@router.get("/room-status")
async def get_room_status(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Current per-room summary: avg temp, total power, rack count, alarm count."""
temp = await session.execute(text("""
SELECT room_id, ROUND(AVG(value)::numeric, 1) AS avg_temp
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, room_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND room_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
GROUP BY room_id
"""), {"site_id": site_id})
power = await session.execute(text("""
SELECT room_id, ROUND(SUM(value)::numeric, 1) AS total_kw
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, room_id, value
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
GROUP BY room_id
"""), {"site_id": site_id})
alarm_counts = await session.execute(text("""
SELECT room_id, COUNT(*) AS alarm_count, MAX(severity) AS worst_severity
FROM alarms
WHERE site_id = :site_id AND state = 'active' AND room_id IS NOT NULL
GROUP BY room_id
"""), {"site_id": site_id})
temp_map = {r["room_id"]: float(r["avg_temp"]) for r in temp.mappings().all()}
power_map = {r["room_id"]: float(r["total_kw"]) for r in power.mappings().all()}
alarm_map = {r["room_id"]: (int(r["alarm_count"]), r["worst_severity"])
for r in alarm_counts.mappings().all()}
rooms = sorted(set(list(temp_map.keys()) + list(power_map.keys())))
result = []
for room_id in rooms:
avg_temp = temp_map.get(room_id, 0.0)
alarm_cnt, ws = alarm_map.get(room_id, (0, None))
status = "ok"
if ws == "critical" or avg_temp >= 30:
status = "critical"
elif ws == "warning" or avg_temp >= 26:
status = "warning"
result.append({
"room_id": room_id,
"avg_temp": avg_temp,
"total_kw": power_map.get(room_id, 0.0),
"alarm_count": alarm_cnt,
"status": status,
})
return result

View file

@ -0,0 +1,356 @@
import csv
import io
from datetime import datetime, timezone, timedelta
from fastapi import APIRouter, Depends, Query
from fastapi.responses import StreamingResponse
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
router = APIRouter()
TARIFF_SGD_KWH = 0.298
ROOMS = {
"sg-01": [
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
]
}
UPS_IDS = {"sg-01": ["ups-01", "ups-02"]}
@router.get("/energy")
async def energy_report(
site_id: str = Query(...),
days: int = Query(30, ge=1, le=90),
session: AsyncSession = Depends(get_session),
):
"""kWh consumption, cost, and 30-day PUE trend."""
from_time = datetime.now(timezone.utc) - timedelta(days=days)
# Total kWh over period (5-min buckets × kW / 12 = kWh per bucket)
try:
kwh_result = await session.execute(text("""
SELECT ROUND((SUM(avg_kw) / 12.0)::numeric, 1) AS kwh_total
FROM (
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
AVG(value) AS avg_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY bucket
) bucketed
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
kwh_result = await session.execute(text("""
SELECT ROUND((SUM(avg_kw) / 12.0)::numeric, 1) AS kwh_total
FROM (
SELECT
date_trunc('minute', recorded_at) AS bucket,
AVG(value) AS avg_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY bucket
) bucketed
"""), {"site_id": site_id, "from_time": from_time})
kwh_row = kwh_result.mappings().first()
kwh_total = float(kwh_row["kwh_total"] or 0) if kwh_row else 0.0
cost_sgd = round(kwh_total * TARIFF_SGD_KWH, 2)
# PUE daily average (IT load / total facility load — approximated as IT load / 0.85 overhead)
# Since we only have IT load, estimate PUE = total_facility / it_load ≈ 1.41.6
# For a proper PUE we'd need facility meter — use a day-by-day IT load trend instead
try:
pue_result = await session.execute(text("""
SELECT
time_bucket('1 day', recorded_at) AS day,
ROUND(AVG(value)::numeric, 2) AS avg_it_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY day
ORDER BY day ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
pue_result = await session.execute(text("""
SELECT
date_trunc('day', recorded_at) AS day,
ROUND(AVG(value)::numeric, 2) AS avg_it_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND recorded_at > :from_time
GROUP BY day
ORDER BY day ASC
"""), {"site_id": site_id, "from_time": from_time})
# Estimated PUE: assume ~40% overhead (cooling + lighting + UPS losses)
OVERHEAD_FACTOR = 1.40
pue_trend = [
{
"day": str(r["day"]),
"avg_it_kw": float(r["avg_it_kw"]),
"pue_est": round(OVERHEAD_FACTOR, 2),
}
for r in pue_result.mappings().all()
]
return {
"site_id": site_id,
"period_days": days,
"from_date": from_time.date().isoformat(),
"to_date": datetime.now(timezone.utc).date().isoformat(),
"kwh_total": kwh_total,
"cost_sgd": cost_sgd,
"tariff_sgd_kwh": TARIFF_SGD_KWH,
"currency": "SGD",
"pue_estimated": OVERHEAD_FACTOR,
"pue_trend": pue_trend,
}
@router.get("/summary")
async def site_summary(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Site-level summary: KPIs, alarm stats, CRAC uptime%, UPS uptime%."""
# KPIs
kpi_res = await session.execute(text("""
SELECT
ROUND(SUM(CASE WHEN sensor_type = 'power_kw' THEN value END)::numeric, 2) AS total_power_kw,
ROUND(AVG(CASE WHEN sensor_type = 'temperature' THEN value END)::numeric, 1) AS avg_temperature
FROM (
SELECT DISTINCT ON (sensor_id) sensor_id, sensor_type, value
FROM readings
WHERE site_id = :site_id
AND sensor_type IN ('power_kw', 'temperature')
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_id, recorded_at DESC
) latest
"""), {"site_id": site_id})
kpi_row = kpi_res.mappings().first() or {}
# Alarm stats (all-time by state/severity)
alarm_res = await session.execute(text("""
SELECT state, severity, COUNT(*) AS cnt
FROM alarms
WHERE site_id = :site_id
GROUP BY state, severity
"""), {"site_id": site_id})
alarm_stats: dict = {"active": 0, "acknowledged": 0, "resolved": 0, "critical": 0, "warning": 0}
for row in alarm_res.mappings().all():
if row["state"] in alarm_stats:
alarm_stats[row["state"]] += int(row["cnt"])
if row["severity"] in ("critical", "warning"):
alarm_stats[row["severity"]] += int(row["cnt"])
# CRAC uptime % over last 24h
from_24h = datetime.now(timezone.utc) - timedelta(hours=24)
total_buckets = 24 * 12 # one 5-min bucket per 5 minutes
cracs = []
for room in ROOMS.get(site_id, []):
crac_id = room["crac_id"]
try:
r = await session.execute(text("""
SELECT COUNT(DISTINCT time_bucket('5 minutes', recorded_at)) AS buckets
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type = 'cooling_supply'
AND recorded_at > :from_time
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_24h})
except Exception:
r = await session.execute(text("""
SELECT COUNT(DISTINCT date_trunc('minute', recorded_at)) AS buckets
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type = 'cooling_supply'
AND recorded_at > :from_time
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_24h})
row = r.mappings().first()
buckets = int(row["buckets"]) if row and row["buckets"] else 0
cracs.append({
"crac_id": crac_id,
"room_id": room["room_id"],
"uptime_pct": round(min(100.0, buckets / total_buckets * 100), 1),
})
# UPS uptime % over last 24h
ups_units = []
for ups_id in UPS_IDS.get(site_id, []):
try:
r = await session.execute(text("""
SELECT COUNT(DISTINCT time_bucket('5 minutes', recorded_at)) AS buckets
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type = 'ups_charge'
AND recorded_at > :from_time
"""), {"site_id": site_id, "pattern": f"{site_id}/ups/{ups_id}/%", "from_time": from_24h})
except Exception:
r = await session.execute(text("""
SELECT COUNT(DISTINCT date_trunc('minute', recorded_at)) AS buckets
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND sensor_type = 'ups_charge'
AND recorded_at > :from_time
"""), {"site_id": site_id, "pattern": f"{site_id}/ups/{ups_id}/%", "from_time": from_24h})
row = r.mappings().first()
buckets = int(row["buckets"]) if row and row["buckets"] else 0
ups_units.append({
"ups_id": ups_id,
"uptime_pct": round(min(100.0, buckets / total_buckets * 100), 1),
})
return {
"site_id": site_id,
"generated_at": datetime.now(timezone.utc).isoformat(),
"kpis": {
"total_power_kw": float(kpi_row.get("total_power_kw") or 0),
"avg_temperature": float(kpi_row.get("avg_temperature") or 0),
},
"alarm_stats": alarm_stats,
"crac_uptime": cracs,
"ups_uptime": ups_units,
}
@router.get("/export/power")
async def export_power(
site_id: str = Query(...),
hours: int = Query(24, ge=1, le=168),
session: AsyncSession = Depends(get_session),
):
"""Download power history as CSV."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
room_id,
ROUND(SUM(value)::numeric, 2) AS total_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT
date_trunc('minute', recorded_at) AS bucket,
room_id,
ROUND(SUM(value)::numeric, 2) AS total_kw
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'power_kw'
AND room_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
output = io.StringIO()
writer = csv.writer(output)
writer.writerow(["timestamp", "room_id", "total_kw"])
for row in result.mappings().all():
writer.writerow([row["bucket"], row["room_id"], row["total_kw"]])
output.seek(0)
return StreamingResponse(
iter([output.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename=power_{site_id}_{hours}h.csv"},
)
@router.get("/export/temperature")
async def export_temperature(
site_id: str = Query(...),
hours: int = Query(24, ge=1, le=168),
session: AsyncSession = Depends(get_session),
):
"""Download temperature history per rack as CSV."""
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
try:
result = await session.execute(text("""
SELECT
time_bucket('5 minutes', recorded_at) AS bucket,
rack_id, room_id,
ROUND(AVG(value)::numeric, 1) AS avg_temp
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND rack_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, rack_id, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
except Exception:
result = await session.execute(text("""
SELECT
date_trunc('minute', recorded_at) AS bucket,
rack_id, room_id,
ROUND(AVG(value)::numeric, 1) AS avg_temp
FROM readings
WHERE site_id = :site_id
AND sensor_type = 'temperature'
AND rack_id IS NOT NULL
AND recorded_at > :from_time
GROUP BY bucket, rack_id, room_id
ORDER BY bucket ASC
"""), {"site_id": site_id, "from_time": from_time})
output = io.StringIO()
writer = csv.writer(output)
writer.writerow(["timestamp", "room_id", "rack_id", "avg_temp_c"])
for row in result.mappings().all():
writer.writerow([row["bucket"], row["room_id"], row["rack_id"], row["avg_temp"]])
output.seek(0)
return StreamingResponse(
iter([output.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename=temperature_{site_id}_{hours}h.csv"},
)
@router.get("/export/alarms")
async def export_alarms(
site_id: str = Query(...),
session: AsyncSession = Depends(get_session),
):
"""Download full alarm log as CSV."""
result = await session.execute(text("""
SELECT id, severity, message, state, room_id, rack_id, triggered_at
FROM alarms
WHERE site_id = :site_id
ORDER BY triggered_at DESC
"""), {"site_id": site_id})
output = io.StringIO()
writer = csv.writer(output)
writer.writerow(["id", "severity", "message", "state", "room_id", "rack_id", "triggered_at"])
for row in result.mappings().all():
writer.writerow([
row["id"], row["severity"], row["message"], row["state"],
row["room_id"], row["rack_id"], row["triggered_at"],
])
output.seek(0)
return StreamingResponse(
iter([output.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename=alarms_{site_id}.csv"},
)

View file

@ -0,0 +1,248 @@
"""
Scenario control API proxies trigger/reset commands to the MQTT broker
so the frontend can fire simulator scenarios over HTTP.
"""
import json
import asyncio
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import Optional
import aiomqtt
from core.config import settings
router = APIRouter()
# ── Scenario catalogue ───────────────────────────────────────────────────────
# Mirrors the definitions in simulators/scenarios/runner.py and compound.py.
# Kept here so the frontend has a single typed source of truth.
SCENARIOS = [
# ── Compound (multi-bot, time-sequenced) ─────────────────────────────────
{
"name": "HOT_NIGHT",
"label": "Hot Night",
"description": "CRAC-01 compressor trips silently. The backup unit overworks itself. Rack temps climb, power draw rises, VESDA alert fires.",
"duration": "~10 min",
"compound": True,
"default_target": None,
"targets": [],
},
{
"name": "GENERATOR_TEST_GONE_WRONG",
"label": "Generator Test Gone Wrong",
"description": "Planned ATS transfer to generator. Generator was low on fuel and faults after 15 min. UPS must carry full site load alone.",
"duration": "~16 min",
"compound": True,
"default_target": None,
"targets": [],
},
{
"name": "SLOW_BURN",
"label": "Slow Burn",
"description": "A dirty filter nobody noticed. Airflow degrades for 30 min. Temps creep, humidity climbs, VESDA alerts, then CRAC trips on thermal protection.",
"duration": "~30 min",
"compound": True,
"default_target": None,
"targets": [],
},
{
"name": "LAST_RESORT",
"label": "Last Resort",
"description": "Utility fails. Generator starts then faults after 2 minutes. UPS absorbs the full load, overheats, and VESDA escalates to fire.",
"duration": "~9 min",
"compound": True,
"default_target": None,
"targets": [],
},
# ── Cooling ──────────────────────────────────────────────────────────────
{
"name": "COOLING_FAILURE",
"label": "Cooling Failure",
"description": "CRAC unit goes offline — rack temperatures rise rapidly.",
"duration": "ongoing",
"compound": False,
"default_target": "crac-01",
"targets": ["crac-01", "crac-02"],
},
{
"name": "FAN_DEGRADATION",
"label": "Fan Degradation",
"description": "CRAC fan bearing wear — fan speed drops, ΔT rises over ~25 min.",
"duration": "~25 min",
"compound": False,
"default_target": "crac-01",
"targets": ["crac-01", "crac-02"],
},
{
"name": "COMPRESSOR_FAULT",
"label": "Compressor Fault",
"description": "Compressor trips — unit drops to fan-only, cooling capacity collapses to ~8%.",
"duration": "ongoing",
"compound": False,
"default_target": "crac-01",
"targets": ["crac-01", "crac-02"],
},
{
"name": "DIRTY_FILTER",
"label": "Dirty Filter",
"description": "Filter fouling — ΔP rises, airflow and capacity degrade over time.",
"duration": "ongoing",
"compound": False,
"default_target": "crac-01",
"targets": ["crac-01", "crac-02"],
},
{
"name": "HIGH_TEMPERATURE",
"label": "High Temperature",
"description": "Gradual ambient heat rise — slower than a full cooling failure.",
"duration": "ongoing",
"compound": False,
"default_target": "hall-a",
"targets": ["hall-a", "hall-b"],
},
{
"name": "HUMIDITY_SPIKE",
"label": "Humidity Spike",
"description": "Humidity climbs — condensation / humidifier fault risk.",
"duration": "ongoing",
"compound": False,
"default_target": "hall-a",
"targets": ["hall-a", "hall-b"],
},
{
"name": "CHILLER_FAULT",
"label": "Chiller Fault",
"description": "Chiller plant trips — chilled water supply lost.",
"duration": "ongoing",
"compound": False,
"default_target": "chiller-01",
"targets": ["chiller-01"],
},
# ── Power ────────────────────────────────────────────────────────────────
{
"name": "UPS_MAINS_FAILURE",
"label": "UPS Mains Failure",
"description": "Mains power lost — UPS switches to battery and drains.",
"duration": "~60 min",
"compound": False,
"default_target": "ups-01",
"targets": ["ups-01", "ups-02"],
},
{
"name": "POWER_SPIKE",
"label": "Power Spike",
"description": "PDU load surges across a room by up to 50%.",
"duration": "ongoing",
"compound": False,
"default_target": "hall-a",
"targets": ["hall-a", "hall-b"],
},
{
"name": "RACK_OVERLOAD",
"label": "Rack Overload",
"description": "Single rack redlines at ~8595% of rated 10 kW capacity.",
"duration": "ongoing",
"compound": False,
"default_target": "SG1A01.10",
"targets": ["SG1A01.10", "SG1B01.10"],
},
{
"name": "PHASE_IMBALANCE",
"label": "Phase Imbalance",
"description": "PDU phase A overloads, phase C drops — imbalance flag triggers.",
"duration": "ongoing",
"compound": False,
"default_target": "SG1A01.10/pdu",
"targets": ["SG1A01.10/pdu", "SG1B01.10/pdu"],
},
{
"name": "ATS_TRANSFER",
"label": "ATS Transfer",
"description": "Utility feed lost — ATS transfers load to generator.",
"duration": "ongoing",
"compound": False,
"default_target": "ats-01",
"targets": ["ats-01"],
},
{
"name": "GENERATOR_FAILURE",
"label": "Generator Running",
"description": "Generator starts and runs under load following a utility failure.",
"duration": "ongoing",
"compound": False,
"default_target": "gen-01",
"targets": ["gen-01"],
},
{
"name": "GENERATOR_LOW_FUEL",
"label": "Generator Low Fuel",
"description": "Generator fuel level drains to critical low.",
"duration": "ongoing",
"compound": False,
"default_target": "gen-01",
"targets": ["gen-01"],
},
{
"name": "GENERATOR_FAULT",
"label": "Generator Fault",
"description": "Generator fails — fault state, no output.",
"duration": "ongoing",
"compound": False,
"default_target": "gen-01",
"targets": ["gen-01"],
},
# ── Environmental / Life Safety ──────────────────────────────────────────
{
"name": "LEAK_DETECTED",
"label": "Leak Detected",
"description": "Water leak sensor triggers a critical alarm.",
"duration": "ongoing",
"compound": False,
"default_target": "leak-01",
"targets": ["leak-01", "leak-02", "leak-03"],
},
{
"name": "VESDA_ALERT",
"label": "VESDA Alert",
"description": "Smoke obscuration rises into the Alert/Action band.",
"duration": "ongoing",
"compound": False,
"default_target": "vesda-hall-a",
"targets": ["vesda-hall-a", "vesda-hall-b"],
},
{
"name": "VESDA_FIRE",
"label": "VESDA Fire",
"description": "Smoke obscuration escalates to Fire level.",
"duration": "ongoing",
"compound": False,
"default_target": "vesda-hall-a",
"targets": ["vesda-hall-a", "vesda-hall-b"],
},
]
# ── Request / Response models ────────────────────────────────────────────────
class TriggerRequest(BaseModel):
scenario: str
target: Optional[str] = None
# ── Endpoints ────────────────────────────────────────────────────────────────
@router.get("")
async def list_scenarios():
return SCENARIOS
@router.post("/trigger")
async def trigger_scenario(body: TriggerRequest):
payload = json.dumps({"scenario": body.scenario, "target": body.target})
try:
async with aiomqtt.Client(settings.MQTT_HOST, port=settings.MQTT_PORT) as client:
await client.publish("bms/control/scenario", payload, qos=1)
except Exception as e:
raise HTTPException(status_code=503, detail=f"MQTT unavailable: {e}")
return {"ok": True, "scenario": body.scenario, "target": body.target}

View file

@ -0,0 +1,465 @@
import json
import logging
from typing import Any
from fastapi import APIRouter, Depends, HTTPException, Query
from pydantic import BaseModel
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from core.database import get_session
from services.alarm_engine import invalidate_threshold_cache
from services.seed import THRESHOLD_SEED_DATA, DEFAULT_SETTINGS, SITE_ID as DEFAULT_SITE
router = APIRouter()
logger = logging.getLogger(__name__)
# ── Pydantic models ────────────────────────────────────────────────────────────
class SensorCreate(BaseModel):
device_id: str
name: str
device_type: str
room_id: str | None = None
rack_id: str | None = None
protocol: str = "mqtt"
protocol_config: dict[str, Any] = {}
enabled: bool = True
class SensorUpdate(BaseModel):
name: str | None = None
device_type: str | None = None
room_id: str | None = None
rack_id: str | None = None
protocol: str | None = None
protocol_config: dict[str, Any] | None = None
enabled: bool | None = None
class ThresholdUpdate(BaseModel):
threshold_value: float | None = None
severity: str | None = None
enabled: bool | None = None
class ThresholdCreate(BaseModel):
sensor_type: str
threshold_value: float
direction: str
severity: str
message_template: str
class SettingsUpdate(BaseModel):
value: dict[str, Any]
# ── Sensors ────────────────────────────────────────────────────────────────────
@router.get("/sensors")
async def list_sensors(
site_id: str = Query(DEFAULT_SITE),
device_type: str | None = Query(None),
room_id: str | None = Query(None),
protocol: str | None = Query(None),
session: AsyncSession = Depends(get_session),
):
"""List all sensor devices, with optional filters."""
conditions = ["site_id = :site_id"]
params: dict = {"site_id": site_id}
if device_type:
conditions.append("device_type = :device_type")
params["device_type"] = device_type
if room_id:
conditions.append("room_id = :room_id")
params["room_id"] = room_id
if protocol:
conditions.append("protocol = :protocol")
params["protocol"] = protocol
where = " AND ".join(conditions)
result = await session.execute(text(f"""
SELECT id, site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled, created_at, updated_at
FROM sensors
WHERE {where}
ORDER BY device_type, room_id NULLS LAST, device_id
"""), params)
return [dict(r) for r in result.mappings().all()]
@router.post("/sensors", status_code=201)
async def create_sensor(
body: SensorCreate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
"""Register a new sensor device."""
result = await session.execute(text("""
INSERT INTO sensors
(site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled)
VALUES
(:site_id, :device_id, :name, :device_type, :room_id, :rack_id,
:protocol, :protocol_config, :enabled)
RETURNING id, site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled, created_at, updated_at
"""), {
"site_id": site_id,
"device_id": body.device_id,
"name": body.name,
"device_type": body.device_type,
"room_id": body.room_id,
"rack_id": body.rack_id,
"protocol": body.protocol,
"protocol_config": json.dumps(body.protocol_config),
"enabled": body.enabled,
})
await session.commit()
return dict(result.mappings().first())
@router.get("/sensors/{sensor_id}")
async def get_sensor(
sensor_id: int,
session: AsyncSession = Depends(get_session),
):
"""Get a single sensor device plus its most recent readings."""
result = await session.execute(text("""
SELECT id, site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled, created_at, updated_at
FROM sensors WHERE id = :id
"""), {"id": sensor_id})
row = result.mappings().first()
if not row:
raise HTTPException(status_code=404, detail="Sensor not found")
sensor = dict(row)
# Fetch latest readings for this device
readings_result = await session.execute(text("""
SELECT DISTINCT ON (sensor_type)
sensor_type, value, unit, recorded_at
FROM readings
WHERE site_id = :site_id
AND sensor_id LIKE :pattern
AND recorded_at > NOW() - INTERVAL '10 minutes'
ORDER BY sensor_type, recorded_at DESC
"""), {
"site_id": sensor["site_id"],
"pattern": f"{sensor['site_id']}%{sensor['device_id']}%",
})
sensor["recent_readings"] = [dict(r) for r in readings_result.mappings().all()]
return sensor
@router.put("/sensors/{sensor_id}")
async def update_sensor(
sensor_id: int,
body: SensorUpdate,
session: AsyncSession = Depends(get_session),
):
"""Update a sensor device's config or toggle enabled."""
updates = []
params: dict = {"id": sensor_id}
if body.name is not None:
updates.append("name = :name")
params["name"] = body.name
if body.device_type is not None:
updates.append("device_type = :device_type")
params["device_type"] = body.device_type
if body.room_id is not None:
updates.append("room_id = :room_id")
params["room_id"] = body.room_id
if body.rack_id is not None:
updates.append("rack_id = :rack_id")
params["rack_id"] = body.rack_id
if body.protocol is not None:
updates.append("protocol = :protocol")
params["protocol"] = body.protocol
if body.protocol_config is not None:
updates.append("protocol_config = :protocol_config")
params["protocol_config"] = json.dumps(body.protocol_config)
if body.enabled is not None:
updates.append("enabled = :enabled")
params["enabled"] = body.enabled
if not updates:
raise HTTPException(status_code=400, detail="No fields to update")
updates.append("updated_at = NOW()")
set_clause = ", ".join(updates)
result = await session.execute(text(f"""
UPDATE sensors SET {set_clause}
WHERE id = :id
RETURNING id, site_id, device_id, name, device_type, room_id, rack_id,
protocol, protocol_config, enabled, created_at, updated_at
"""), params)
row = result.mappings().first()
if not row:
raise HTTPException(status_code=404, detail="Sensor not found")
await session.commit()
return dict(row)
@router.delete("/sensors/{sensor_id}", status_code=204)
async def delete_sensor(
sensor_id: int,
session: AsyncSession = Depends(get_session),
):
"""Remove a sensor device from the registry."""
result = await session.execute(
text("DELETE FROM sensors WHERE id = :id RETURNING id"),
{"id": sensor_id},
)
if not result.fetchone():
raise HTTPException(status_code=404, detail="Sensor not found")
await session.commit()
# ── Alarm thresholds ───────────────────────────────────────────────────────────
@router.get("/thresholds")
async def list_thresholds(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
"""Return all user-editable threshold rules (locked=false)."""
result = await session.execute(text("""
SELECT id, site_id, sensor_type, threshold_value, direction,
severity, message_template, enabled, locked, created_at, updated_at
FROM alarm_thresholds
WHERE site_id = :site_id AND locked = false
ORDER BY id
"""), {"site_id": site_id})
return [dict(r) for r in result.mappings().all()]
@router.put("/thresholds/{threshold_id}")
async def update_threshold(
threshold_id: int,
body: ThresholdUpdate,
session: AsyncSession = Depends(get_session),
):
"""Update a threshold value, severity, or enabled state."""
# Refuse to update locked rules
locked_result = await session.execute(
text("SELECT locked, site_id FROM alarm_thresholds WHERE id = :id"),
{"id": threshold_id},
)
row = locked_result.mappings().first()
if not row:
raise HTTPException(status_code=404, detail="Threshold not found")
if row["locked"]:
raise HTTPException(status_code=403, detail="Cannot modify locked threshold")
updates = []
params: dict = {"id": threshold_id}
if body.threshold_value is not None:
updates.append("threshold_value = :threshold_value")
params["threshold_value"] = body.threshold_value
if body.severity is not None:
if body.severity not in ("warning", "critical"):
raise HTTPException(status_code=400, detail="severity must be warning or critical")
updates.append("severity = :severity")
params["severity"] = body.severity
if body.enabled is not None:
updates.append("enabled = :enabled")
params["enabled"] = body.enabled
if not updates:
raise HTTPException(status_code=400, detail="No fields to update")
updates.append("updated_at = NOW()")
set_clause = ", ".join(updates)
result = await session.execute(text(f"""
UPDATE alarm_thresholds SET {set_clause}
WHERE id = :id
RETURNING id, site_id, sensor_type, threshold_value, direction,
severity, message_template, enabled, locked, updated_at
"""), params)
await session.commit()
invalidate_threshold_cache(row["site_id"])
return dict(result.mappings().first())
@router.post("/thresholds", status_code=201)
async def create_threshold(
body: ThresholdCreate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
"""Add a custom threshold rule."""
if body.direction not in ("above", "below"):
raise HTTPException(status_code=400, detail="direction must be above or below")
if body.severity not in ("warning", "critical"):
raise HTTPException(status_code=400, detail="severity must be warning or critical")
result = await session.execute(text("""
INSERT INTO alarm_thresholds
(site_id, sensor_type, threshold_value, direction, severity, message_template, enabled, locked)
VALUES
(:site_id, :sensor_type, :threshold_value, :direction, :severity, :message_template, true, false)
RETURNING id, site_id, sensor_type, threshold_value, direction,
severity, message_template, enabled, locked, created_at, updated_at
"""), {
"site_id": site_id,
"sensor_type": body.sensor_type,
"threshold_value": body.threshold_value,
"direction": body.direction,
"severity": body.severity,
"message_template": body.message_template,
})
await session.commit()
invalidate_threshold_cache(site_id)
return dict(result.mappings().first())
@router.delete("/thresholds/{threshold_id}", status_code=204)
async def delete_threshold(
threshold_id: int,
session: AsyncSession = Depends(get_session),
):
"""Delete a custom (non-locked) threshold rule."""
locked_result = await session.execute(
text("SELECT locked, site_id FROM alarm_thresholds WHERE id = :id"),
{"id": threshold_id},
)
row = locked_result.mappings().first()
if not row:
raise HTTPException(status_code=404, detail="Threshold not found")
if row["locked"]:
raise HTTPException(status_code=403, detail="Cannot delete locked threshold")
await session.execute(
text("DELETE FROM alarm_thresholds WHERE id = :id"),
{"id": threshold_id},
)
await session.commit()
invalidate_threshold_cache(row["site_id"])
@router.post("/thresholds/reset")
async def reset_thresholds(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
"""Delete all thresholds for a site and re-seed from defaults."""
await session.execute(
text("DELETE FROM alarm_thresholds WHERE site_id = :site_id"),
{"site_id": site_id},
)
for st, tv, direction, severity, msg, locked in THRESHOLD_SEED_DATA:
await session.execute(text("""
INSERT INTO alarm_thresholds
(site_id, sensor_type, threshold_value, direction, severity, message_template, enabled, locked)
VALUES
(:site_id, :sensor_type, :threshold_value, :direction, :severity, :message_template, true, :locked)
"""), {
"site_id": site_id, "sensor_type": st, "threshold_value": tv,
"direction": direction, "severity": severity,
"message_template": msg, "locked": locked,
})
await session.commit()
invalidate_threshold_cache(site_id)
logger.info(f"Alarm thresholds reset to defaults for {site_id}")
return {"ok": True, "count": len(THRESHOLD_SEED_DATA)}
# ── Generic settings (site / notifications / integrations / page_prefs) ────────
async def _get_settings(session: AsyncSession, site_id: str, category: str) -> dict:
result = await session.execute(text("""
SELECT value FROM site_settings
WHERE site_id = :site_id AND category = :category AND key = 'config'
"""), {"site_id": site_id, "category": category})
row = result.mappings().first()
if row:
return row["value"] if isinstance(row["value"], dict) else json.loads(row["value"])
return DEFAULT_SETTINGS.get(category, {})
async def _put_settings(
session: AsyncSession, site_id: str, category: str, updates: dict
) -> dict:
current = await _get_settings(session, site_id, category)
merged = {**current, **updates}
await session.execute(text("""
INSERT INTO site_settings (site_id, category, key, value, updated_at)
VALUES (:site_id, :category, 'config', :value, NOW())
ON CONFLICT (site_id, category, key)
DO UPDATE SET value = :value, updated_at = NOW()
"""), {"site_id": site_id, "category": category, "value": json.dumps(merged)})
await session.commit()
return merged
@router.get("/site")
async def get_site_settings(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _get_settings(session, site_id, "site")
@router.put("/site")
async def update_site_settings(
body: SettingsUpdate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _put_settings(session, site_id, "site", body.value)
@router.get("/notifications")
async def get_notifications(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _get_settings(session, site_id, "notifications")
@router.put("/notifications")
async def update_notifications(
body: SettingsUpdate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _put_settings(session, site_id, "notifications", body.value)
@router.get("/integrations")
async def get_integrations(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _get_settings(session, site_id, "integrations")
@router.put("/integrations")
async def update_integrations(
body: SettingsUpdate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _put_settings(session, site_id, "integrations", body.value)
@router.get("/page-prefs")
async def get_page_prefs(
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _get_settings(session, site_id, "page_prefs")
@router.put("/page-prefs")
async def update_page_prefs(
body: SettingsUpdate,
site_id: str = Query(DEFAULT_SITE),
session: AsyncSession = Depends(get_session),
):
return await _put_settings(session, site_id, "page_prefs", body.value)

View file

@ -0,0 +1,36 @@
from fastapi import APIRouter
from pydantic import BaseModel
router = APIRouter()
class Site(BaseModel):
id: str
name: str
location: str
status: str
rack_count: int
total_power_kw: float
pue: float
# Static stub data — will be replaced by DB queries in Phase 2
SITES: list[Site] = [
Site(id="sg-01", name="Singapore DC01", location="Singapore", status="ok", rack_count=128, total_power_kw=847.0, pue=1.42),
Site(id="sg-02", name="Singapore DC02", location="Singapore", status="warning", rack_count=64, total_power_kw=412.0, pue=1.51),
Site(id="lon-01", name="London DC01", location="London", status="ok", rack_count=96, total_power_kw=631.0, pue=1.38),
]
@router.get("", response_model=list[Site])
async def list_sites():
return SITES
@router.get("/{site_id}", response_model=Site)
async def get_site(site_id: str):
for site in SITES:
if site.id == site_id:
return site
from fastapi import HTTPException
raise HTTPException(status_code=404, detail="Site not found")

16
backend/api/routes/ws.py Normal file
View file

@ -0,0 +1,16 @@
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from services.ws_manager import manager
router = APIRouter()
@router.websocket("/ws")
async def websocket_endpoint(ws: WebSocket):
await manager.connect(ws)
try:
while True:
# We only push from server → client.
# receive_text() keeps the connection alive.
await ws.receive_text()
except WebSocketDisconnect:
manager.disconnect(ws)