460 lines
17 KiB
Python
460 lines
17 KiB
Python
from datetime import datetime, timezone, timedelta
|
|
from fastapi import APIRouter, Depends, Query
|
|
from sqlalchemy import text
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from core.database import get_session
|
|
|
|
router = APIRouter()
|
|
|
|
# Topology — mirrors simulator config
|
|
ROOMS = {
|
|
"sg-01": [
|
|
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)]},
|
|
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)]},
|
|
]
|
|
}
|
|
ATS_UNITS = {"sg-01": ["ats-01"]}
|
|
GENERATORS = {"sg-01": ["gen-01"]}
|
|
|
|
ACTIVE_FEED_MAP = {0.0: "utility-a", 1.0: "utility-b", 2.0: "generator"}
|
|
|
|
# Singapore commercial electricity tariff (SGD / kWh, approximate)
|
|
TARIFF_SGD_KWH = 0.298
|
|
|
|
|
|
@router.get("/rack-breakdown")
|
|
async def rack_power_breakdown(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Latest kW reading per rack, grouped by room."""
|
|
result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
rack_id, room_id, value AS power_kw
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'power_kw'
|
|
AND rack_id IS NOT NULL
|
|
AND recorded_at > NOW() - INTERVAL '10 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
rows = result.mappings().all()
|
|
|
|
rack_map: dict[str, dict] = {r["rack_id"]: dict(r) for r in rows}
|
|
|
|
rooms = []
|
|
for room in ROOMS.get(site_id, []):
|
|
racks = []
|
|
for rack_id in room["racks"]:
|
|
reading = rack_map.get(rack_id)
|
|
racks.append({
|
|
"rack_id": rack_id,
|
|
"power_kw": round(float(reading["power_kw"]), 2) if reading else None,
|
|
})
|
|
rooms.append({"room_id": room["room_id"], "racks": racks})
|
|
|
|
return rooms
|
|
|
|
|
|
@router.get("/room-history")
|
|
async def room_power_history(
|
|
site_id: str = Query(...),
|
|
hours: int = Query(6, ge=1, le=24),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Total power per room bucketed by 5 minutes — for a multi-line trend chart."""
|
|
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
|
|
try:
|
|
result = await session.execute(text("""
|
|
SELECT bucket, room_id, ROUND(SUM(avg_per_rack)::numeric, 1) AS total_kw
|
|
FROM (
|
|
SELECT
|
|
time_bucket('5 minutes', recorded_at) AS bucket,
|
|
sensor_id, room_id,
|
|
AVG(value) AS avg_per_rack
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'power_kw'
|
|
AND room_id IS NOT NULL
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_id, room_id
|
|
) per_rack
|
|
GROUP BY bucket, room_id
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "from_time": from_time})
|
|
except Exception:
|
|
result = await session.execute(text("""
|
|
SELECT bucket, room_id, ROUND(SUM(avg_per_rack)::numeric, 1) AS total_kw
|
|
FROM (
|
|
SELECT
|
|
date_trunc('minute', recorded_at) AS bucket,
|
|
sensor_id, room_id,
|
|
AVG(value) AS avg_per_rack
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'power_kw'
|
|
AND room_id IS NOT NULL
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_id, room_id
|
|
) per_rack
|
|
GROUP BY bucket, room_id
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "from_time": from_time})
|
|
return [dict(r) for r in result.mappings().all()]
|
|
|
|
|
|
@router.get("/ups")
|
|
async def ups_status(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Latest UPS readings."""
|
|
result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
sensor_id, sensor_type, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type IN ('ups_charge', 'ups_load', 'ups_runtime', 'ups_state', 'ups_voltage')
|
|
AND recorded_at > NOW() - INTERVAL '10 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
rows = result.mappings().all()
|
|
|
|
# sensor_id format: sg-01/power/ups-01/charge_pct
|
|
ups_data: dict[str, dict] = {}
|
|
for row in rows:
|
|
parts = row["sensor_id"].split("/")
|
|
if len(parts) < 3:
|
|
continue
|
|
ups_id = parts[2]
|
|
if ups_id not in ups_data:
|
|
ups_data[ups_id] = {"ups_id": ups_id}
|
|
key_map = {
|
|
"ups_charge": "charge_pct",
|
|
"ups_load": "load_pct",
|
|
"ups_runtime": "runtime_min",
|
|
"ups_state": "_state_raw",
|
|
"ups_voltage": "voltage_v",
|
|
}
|
|
field = key_map.get(row["sensor_type"])
|
|
if field:
|
|
ups_data[ups_id][field] = round(float(row["value"]), 1)
|
|
|
|
STATE_MAP = {0.0: "online", 1.0: "battery", 2.0: "overload"}
|
|
result_list = []
|
|
for ups_id, d in sorted(ups_data.items()):
|
|
# Use stored state if available; fall back to charge heuristic only if state never arrived
|
|
state_raw = d.get("_state_raw")
|
|
if state_raw is not None:
|
|
state = STATE_MAP.get(round(state_raw), "online")
|
|
else:
|
|
charge = d.get("charge_pct")
|
|
state = "battery" if (charge is not None and charge < 20.0) else "online"
|
|
result_list.append({
|
|
"ups_id": ups_id,
|
|
"state": state,
|
|
"charge_pct": d.get("charge_pct"),
|
|
"load_pct": d.get("load_pct"),
|
|
"runtime_min": d.get("runtime_min"),
|
|
"voltage_v": d.get("voltage_v"),
|
|
})
|
|
return result_list
|
|
|
|
|
|
@router.get("/ups/history")
|
|
async def ups_history(
|
|
site_id: str = Query(...),
|
|
ups_id: str = Query(...),
|
|
hours: int = Query(6, ge=1, le=24),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""5-minute bucketed trend for a single UPS: charge, load, runtime, voltage."""
|
|
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
|
|
types_sql = "'ups_charge', 'ups_load', 'ups_runtime', 'ups_voltage'"
|
|
try:
|
|
result = await session.execute(text(f"""
|
|
SELECT
|
|
time_bucket('5 minutes', recorded_at) AS bucket,
|
|
sensor_type,
|
|
ROUND(AVG(value)::numeric, 2) AS avg_val
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_id LIKE :pattern
|
|
AND sensor_type IN ({types_sql})
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id,
|
|
"pattern": f"{site_id}/power/{ups_id}/%",
|
|
"from_time": from_time})
|
|
except Exception:
|
|
result = await session.execute(text(f"""
|
|
SELECT
|
|
date_trunc('minute', recorded_at) AS bucket,
|
|
sensor_type,
|
|
ROUND(AVG(value)::numeric, 2) AS avg_val
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_id LIKE :pattern
|
|
AND sensor_type IN ({types_sql})
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id,
|
|
"pattern": f"{site_id}/power/{ups_id}/%",
|
|
"from_time": from_time})
|
|
|
|
KEY_MAP = {
|
|
"ups_charge": "charge_pct",
|
|
"ups_load": "load_pct",
|
|
"ups_runtime": "runtime_min",
|
|
"ups_voltage": "voltage_v",
|
|
}
|
|
buckets: dict[str, dict] = {}
|
|
for row in result.mappings().all():
|
|
b = row["bucket"].isoformat()
|
|
buckets.setdefault(b, {"bucket": b})
|
|
field = KEY_MAP.get(row["sensor_type"])
|
|
if field:
|
|
buckets[b][field] = float(row["avg_val"])
|
|
|
|
return list(buckets.values())
|
|
|
|
|
|
@router.get("/ats")
|
|
async def ats_status(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Latest ATS transfer switch readings."""
|
|
result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
sensor_id, sensor_type, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type IN ('ats_active', 'ats_state', 'ats_xfer_count',
|
|
'ats_xfer_ms', 'ats_ua_v', 'ats_ub_v', 'ats_gen_v')
|
|
AND recorded_at > NOW() - INTERVAL '2 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
|
|
ats_data: dict[str, dict] = {}
|
|
for row in result.mappings().all():
|
|
parts = row["sensor_id"].split("/")
|
|
# sensor_id: {site}/power/ats/{ats_id}/{key} → parts[3]
|
|
if len(parts) < 4:
|
|
continue
|
|
ats_id = parts[3]
|
|
if ats_id not in ats_data:
|
|
ats_data[ats_id] = {"ats_id": ats_id}
|
|
v = float(row["value"])
|
|
s_type = row["sensor_type"]
|
|
if s_type == "ats_active":
|
|
ats_data[ats_id]["active_feed"] = ACTIVE_FEED_MAP.get(round(v), "utility-a")
|
|
elif s_type == "ats_state":
|
|
ats_data[ats_id]["state"] = "transferring" if v > 0.5 else "stable"
|
|
elif s_type == "ats_xfer_count":
|
|
ats_data[ats_id]["transfer_count"] = int(v)
|
|
elif s_type == "ats_xfer_ms":
|
|
ats_data[ats_id]["last_transfer_ms"] = round(v, 0) if v > 0 else None
|
|
elif s_type == "ats_ua_v":
|
|
ats_data[ats_id]["utility_a_v"] = round(v, 1)
|
|
elif s_type == "ats_ub_v":
|
|
ats_data[ats_id]["utility_b_v"] = round(v, 1)
|
|
elif s_type == "ats_gen_v":
|
|
ats_data[ats_id]["generator_v"] = round(v, 1)
|
|
|
|
out = []
|
|
for ats_id in ATS_UNITS.get(site_id, []):
|
|
d = ats_data.get(ats_id, {"ats_id": ats_id})
|
|
d.setdefault("state", "stable")
|
|
d.setdefault("active_feed", "utility-a")
|
|
d.setdefault("transfer_count", 0)
|
|
d.setdefault("last_transfer_ms", None)
|
|
out.append(d)
|
|
return out
|
|
|
|
|
|
@router.get("/phase")
|
|
async def pdu_phase_breakdown(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Per-phase kW, amps, and imbalance % for every rack PDU."""
|
|
result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
rack_id, room_id, sensor_type, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type IN ('pdu_phase_a_kw', 'pdu_phase_b_kw', 'pdu_phase_c_kw',
|
|
'pdu_phase_a_a', 'pdu_phase_b_a', 'pdu_phase_c_a',
|
|
'pdu_imbalance')
|
|
AND rack_id IS NOT NULL
|
|
AND recorded_at > NOW() - INTERVAL '10 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
|
|
FIELD_MAP = {
|
|
"pdu_phase_a_kw": "phase_a_kw",
|
|
"pdu_phase_b_kw": "phase_b_kw",
|
|
"pdu_phase_c_kw": "phase_c_kw",
|
|
"pdu_phase_a_a": "phase_a_a",
|
|
"pdu_phase_b_a": "phase_b_a",
|
|
"pdu_phase_c_a": "phase_c_a",
|
|
"pdu_imbalance": "imbalance_pct",
|
|
}
|
|
|
|
rack_map: dict[tuple, float] = {}
|
|
rack_rooms: dict[str, str] = {}
|
|
for row in result.mappings().all():
|
|
rack_id = row["rack_id"]
|
|
room_id = row["room_id"]
|
|
s_type = row["sensor_type"]
|
|
if rack_id:
|
|
rack_map[(rack_id, s_type)] = round(float(row["value"]), 2)
|
|
if room_id:
|
|
rack_rooms[rack_id] = room_id
|
|
|
|
rooms = []
|
|
for room in ROOMS.get(site_id, []):
|
|
racks = []
|
|
for rack_id in room["racks"]:
|
|
entry: dict = {"rack_id": rack_id, "room_id": room["room_id"]}
|
|
for s_type, field in FIELD_MAP.items():
|
|
entry[field] = rack_map.get((rack_id, s_type))
|
|
racks.append(entry)
|
|
rooms.append({"room_id": room["room_id"], "racks": racks})
|
|
return rooms
|
|
|
|
|
|
@router.get("/redundancy")
|
|
async def power_redundancy(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Compute power redundancy level: 2N, N+1, or N."""
|
|
# Count UPS units online
|
|
ups_result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
sensor_id, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'ups_charge'
|
|
AND recorded_at > NOW() - INTERVAL '10 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
ups_rows = ups_result.mappings().all()
|
|
ups_online = len([r for r in ups_rows if float(r["value"]) > 10.0])
|
|
ups_total = len(ups_rows)
|
|
|
|
# ATS active feed
|
|
ats_result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
sensor_id, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'ats_active'
|
|
AND recorded_at > NOW() - INTERVAL '2 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
ats_rows = ats_result.mappings().all()
|
|
ats_active_feed = None
|
|
if ats_rows:
|
|
ats_active_feed = ACTIVE_FEED_MAP.get(round(float(ats_rows[0]["value"])), "utility-a")
|
|
|
|
# Generator available (not fault)
|
|
gen_result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
sensor_id, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'gen_state'
|
|
AND recorded_at > NOW() - INTERVAL '5 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
gen_rows = gen_result.mappings().all()
|
|
gen_available = len([r for r in gen_rows if float(r["value"]) >= 0.0]) > 0
|
|
|
|
# Derive level
|
|
if ups_total >= 2 and ups_online >= 2 and gen_available:
|
|
level = "2N"
|
|
elif ups_online >= 1 and gen_available:
|
|
level = "N+1"
|
|
else:
|
|
level = "N"
|
|
|
|
return {
|
|
"site_id": site_id,
|
|
"level": level,
|
|
"ups_total": ups_total,
|
|
"ups_online": ups_online,
|
|
"generator_ok": gen_available,
|
|
"ats_active_feed": ats_active_feed,
|
|
"notes": (
|
|
"Dual UPS + generator = 2N" if level == "2N" else
|
|
"Single path active — reduced redundancy" if level == "N" else
|
|
"N+1 — one redundant path available"
|
|
),
|
|
}
|
|
|
|
|
|
@router.get("/utility")
|
|
async def utility_power(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Current total IT load and estimated monthly energy cost."""
|
|
# Latest total IT load
|
|
kw_result = await session.execute(text("""
|
|
SELECT ROUND(SUM(value)::numeric, 2) AS total_kw
|
|
FROM (
|
|
SELECT DISTINCT ON (sensor_id) sensor_id, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'power_kw'
|
|
AND recorded_at > NOW() - INTERVAL '10 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
) latest
|
|
"""), {"site_id": site_id})
|
|
kw_row = kw_result.mappings().first()
|
|
total_kw = float(kw_row["total_kw"] or 0) if kw_row else 0.0
|
|
|
|
# Estimated month-to-date kWh (from readings since start of month)
|
|
from_month = datetime.now(timezone.utc).replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
|
kwh_result = await session.execute(text("""
|
|
SELECT ROUND((SUM(value) * 5.0 / 60.0)::numeric, 1) AS kwh_mtd
|
|
FROM (
|
|
SELECT DISTINCT ON (sensor_id, date_trunc('minute', recorded_at))
|
|
sensor_id, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'power_kw'
|
|
AND recorded_at > :from_month
|
|
ORDER BY sensor_id, date_trunc('minute', recorded_at), recorded_at DESC
|
|
) bucketed
|
|
"""), {"site_id": site_id, "from_month": from_month})
|
|
kwh_row = kwh_result.mappings().first()
|
|
kwh_mtd = float(kwh_row["kwh_mtd"] or 0) if kwh_row else 0.0
|
|
|
|
cost_mtd = round(kwh_mtd * TARIFF_SGD_KWH, 2)
|
|
# Annualised from month-to-date pace
|
|
now = datetime.now(timezone.utc)
|
|
day_of_month = now.day
|
|
days_in_month = 30
|
|
if day_of_month > 0:
|
|
kwh_annual_est = round(kwh_mtd / day_of_month * 365, 0)
|
|
cost_annual_est = round(kwh_annual_est * TARIFF_SGD_KWH, 2)
|
|
else:
|
|
kwh_annual_est = 0.0
|
|
cost_annual_est = 0.0
|
|
|
|
return {
|
|
"site_id": site_id,
|
|
"total_kw": total_kw,
|
|
"tariff_sgd_kwh": TARIFF_SGD_KWH,
|
|
"kwh_month_to_date": kwh_mtd,
|
|
"cost_sgd_mtd": cost_mtd,
|
|
"kwh_annual_est": kwh_annual_est,
|
|
"cost_sgd_annual_est": cost_annual_est,
|
|
"currency": "SGD",
|
|
}
|