440 lines
17 KiB
Python
440 lines
17 KiB
Python
from datetime import datetime, timezone, timedelta
|
|
from fastapi import APIRouter, Depends, Query
|
|
from sqlalchemy import text
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from core.database import get_session
|
|
|
|
router = APIRouter()
|
|
|
|
ROOMS = {
|
|
"sg-01": [
|
|
{"room_id": "hall-a", "racks": [f"SG1A01.{i:02d}" for i in range(1, 21)] + [f"SG1A02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-01"},
|
|
{"room_id": "hall-b", "racks": [f"SG1B01.{i:02d}" for i in range(1, 21)] + [f"SG1B02.{i:02d}" for i in range(1, 21)], "crac_id": "crac-02"},
|
|
]
|
|
}
|
|
|
|
|
|
@router.get("/rack-readings")
|
|
async def rack_env_readings(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Latest temperature and humidity per rack, grouped by room."""
|
|
result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
rack_id, room_id, sensor_type, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type IN ('temperature', 'humidity')
|
|
AND rack_id IS NOT NULL
|
|
AND recorded_at > NOW() - INTERVAL '10 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
rows = result.mappings().all()
|
|
|
|
# Index by (rack_id, sensor_type)
|
|
data: dict[tuple, float] = {(r["rack_id"], r["sensor_type"]): float(r["value"]) for r in rows}
|
|
|
|
rooms = []
|
|
for room in ROOMS.get(site_id, []):
|
|
racks = []
|
|
for rack_id in room["racks"]:
|
|
temp = data.get((rack_id, "temperature"))
|
|
hum = data.get((rack_id, "humidity"))
|
|
racks.append({
|
|
"rack_id": rack_id,
|
|
"temperature": round(temp, 1) if temp is not None else None,
|
|
"humidity": round(hum, 1) if hum is not None else None,
|
|
})
|
|
rooms.append({"room_id": room["room_id"], "racks": racks})
|
|
return rooms
|
|
|
|
|
|
@router.get("/humidity-history")
|
|
async def humidity_history(
|
|
site_id: str = Query(...),
|
|
hours: int = Query(6, ge=1, le=24),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Average humidity per room bucketed by 5 minutes."""
|
|
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
|
|
try:
|
|
result = await session.execute(text("""
|
|
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 1) AS avg_humidity
|
|
FROM (
|
|
SELECT
|
|
time_bucket('5 minutes', recorded_at) AS bucket,
|
|
sensor_id, room_id,
|
|
AVG(value) AS avg_per_rack
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'humidity'
|
|
AND room_id IS NOT NULL
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_id, room_id
|
|
) per_rack
|
|
GROUP BY bucket, room_id
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "from_time": from_time})
|
|
except Exception:
|
|
result = await session.execute(text("""
|
|
SELECT bucket, room_id, ROUND(AVG(avg_per_rack)::numeric, 1) AS avg_humidity
|
|
FROM (
|
|
SELECT
|
|
date_trunc('minute', recorded_at) AS bucket,
|
|
sensor_id, room_id,
|
|
AVG(value) AS avg_per_rack
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type = 'humidity'
|
|
AND room_id IS NOT NULL
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_id, room_id
|
|
) per_rack
|
|
GROUP BY bucket, room_id
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "from_time": from_time})
|
|
return [dict(r) for r in result.mappings().all()]
|
|
|
|
|
|
# All CRAC sensor types stored in the readings table
|
|
CRAC_SENSOR_TYPES = (
|
|
"cooling_supply", "cooling_return", "cooling_fan",
|
|
"cooling_supply_hum", "cooling_return_hum", "cooling_airflow", "cooling_filter_dp",
|
|
"cooling_cap_kw", "cooling_cap_pct", "cooling_cop", "cooling_shr",
|
|
"cooling_comp_state", "cooling_comp_load", "cooling_comp_power", "cooling_comp_hours",
|
|
"cooling_high_press", "cooling_low_press", "cooling_superheat", "cooling_subcooling",
|
|
"cooling_fan_rpm", "cooling_fan_power", "cooling_fan_hours",
|
|
"cooling_unit_power", "cooling_voltage", "cooling_current", "cooling_pf",
|
|
)
|
|
|
|
# sensor_type → response field name
|
|
CRAC_FIELD_MAP = {
|
|
"cooling_supply": "supply_temp",
|
|
"cooling_return": "return_temp",
|
|
"cooling_fan": "fan_pct",
|
|
"cooling_supply_hum": "supply_humidity",
|
|
"cooling_return_hum": "return_humidity",
|
|
"cooling_airflow": "airflow_cfm",
|
|
"cooling_filter_dp": "filter_dp_pa",
|
|
"cooling_cap_kw": "cooling_capacity_kw",
|
|
"cooling_cap_pct": "cooling_capacity_pct",
|
|
"cooling_cop": "cop",
|
|
"cooling_shr": "sensible_heat_ratio",
|
|
"cooling_comp_state": "compressor_state",
|
|
"cooling_comp_load": "compressor_load_pct",
|
|
"cooling_comp_power": "compressor_power_kw",
|
|
"cooling_comp_hours": "compressor_run_hours",
|
|
"cooling_high_press": "high_pressure_bar",
|
|
"cooling_low_press": "low_pressure_bar",
|
|
"cooling_superheat": "discharge_superheat_c",
|
|
"cooling_subcooling": "liquid_subcooling_c",
|
|
"cooling_fan_rpm": "fan_rpm",
|
|
"cooling_fan_power": "fan_power_kw",
|
|
"cooling_fan_hours": "fan_run_hours",
|
|
"cooling_unit_power": "total_unit_power_kw",
|
|
"cooling_voltage": "input_voltage_v",
|
|
"cooling_current": "input_current_a",
|
|
"cooling_pf": "power_factor",
|
|
}
|
|
|
|
RATED_CAPACITY_KW = 80.0
|
|
|
|
|
|
@router.get("/crac-status")
|
|
async def crac_status(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Latest CRAC readings — full sensor set."""
|
|
types_sql = ", ".join(f"'{t}'" for t in CRAC_SENSOR_TYPES)
|
|
result = await session.execute(text(f"""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
sensor_id, sensor_type, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type IN ({types_sql})
|
|
AND recorded_at > NOW() - INTERVAL '10 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
|
|
crac_data: dict[str, dict] = {}
|
|
for row in result.mappings().all():
|
|
parts = row["sensor_id"].split("/")
|
|
if len(parts) < 3:
|
|
continue
|
|
crac_id = parts[2]
|
|
if crac_id not in crac_data:
|
|
crac_data[crac_id] = {"crac_id": crac_id}
|
|
field = CRAC_FIELD_MAP.get(row["sensor_type"])
|
|
if field:
|
|
crac_data[crac_id][field] = round(float(row["value"]), 3)
|
|
|
|
room_map = {room["crac_id"]: room["room_id"] for room in ROOMS.get(site_id, [])}
|
|
|
|
result_list = []
|
|
for crac_id, d in sorted(crac_data.items()):
|
|
supply = d.get("supply_temp")
|
|
ret = d.get("return_temp")
|
|
delta = round(ret - supply, 1) if (ret is not None and supply is not None) else None
|
|
state = "online" if supply is not None else "fault"
|
|
result_list.append({
|
|
"crac_id": crac_id,
|
|
"room_id": room_map.get(crac_id),
|
|
"state": state,
|
|
"delta": delta,
|
|
"rated_capacity_kw": RATED_CAPACITY_KW,
|
|
**{k: round(v, 2) if isinstance(v, float) else v for k, v in d.items() if k != "crac_id"},
|
|
})
|
|
|
|
# Surface CRACs with no recent readings as faulted
|
|
known = set(crac_data.keys())
|
|
for room in ROOMS.get(site_id, []):
|
|
if room["crac_id"] not in known:
|
|
result_list.append({
|
|
"crac_id": room["crac_id"],
|
|
"room_id": room["room_id"],
|
|
"state": "fault",
|
|
"delta": None,
|
|
"rated_capacity_kw": RATED_CAPACITY_KW,
|
|
})
|
|
|
|
return sorted(result_list, key=lambda x: x["crac_id"])
|
|
|
|
|
|
@router.get("/crac-history")
|
|
async def crac_history(
|
|
site_id: str = Query(...),
|
|
crac_id: str = Query(...),
|
|
hours: int = Query(6, ge=1, le=24),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Time-series history for a single CRAC unit — capacity, COP, compressor load, filter ΔP, temps."""
|
|
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
|
|
METRICS = (
|
|
"cooling_supply", "cooling_return", "cooling_cap_kw",
|
|
"cooling_cap_pct", "cooling_cop", "cooling_comp_load",
|
|
"cooling_filter_dp", "cooling_fan",
|
|
)
|
|
types_sql = ", ".join(f"'{t}'" for t in METRICS)
|
|
try:
|
|
result = await session.execute(text(f"""
|
|
SELECT
|
|
time_bucket('5 minutes', recorded_at) AS bucket,
|
|
sensor_type,
|
|
ROUND(AVG(value)::numeric, 3) AS avg_val
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_id LIKE :pattern
|
|
AND sensor_type IN ({types_sql})
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
|
|
except Exception:
|
|
result = await session.execute(text(f"""
|
|
SELECT
|
|
date_trunc('minute', recorded_at) AS bucket,
|
|
sensor_type,
|
|
ROUND(AVG(value)::numeric, 3) AS avg_val
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_id LIKE :pattern
|
|
AND sensor_type IN ({types_sql})
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
|
|
|
|
bucket_map: dict[str, dict] = {}
|
|
for row in result.mappings().all():
|
|
b = str(row["bucket"])
|
|
if b not in bucket_map:
|
|
bucket_map[b] = {"bucket": b}
|
|
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
|
|
|
|
points = []
|
|
for b, vals in sorted(bucket_map.items()):
|
|
supply = vals.get("cooling_supply")
|
|
ret = vals.get("cooling_return")
|
|
points.append({
|
|
"bucket": b,
|
|
"supply_temp": round(supply, 1) if supply is not None else None,
|
|
"return_temp": round(ret, 1) if ret is not None else None,
|
|
"delta_t": round(ret - supply, 1) if (supply is not None and ret is not None) else None,
|
|
"capacity_kw": vals.get("cooling_cap_kw"),
|
|
"capacity_pct": vals.get("cooling_cap_pct"),
|
|
"cop": vals.get("cooling_cop"),
|
|
"comp_load": vals.get("cooling_comp_load"),
|
|
"filter_dp": vals.get("cooling_filter_dp"),
|
|
"fan_pct": vals.get("cooling_fan"),
|
|
})
|
|
return points
|
|
|
|
|
|
@router.get("/crac-delta-history")
|
|
async def crac_delta_history(
|
|
site_id: str = Query(...),
|
|
crac_id: str = Query(...),
|
|
hours: int = Query(1, ge=1, le=24),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""ΔT (return - supply) over time for a single CRAC unit."""
|
|
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
|
|
try:
|
|
result = await session.execute(text("""
|
|
SELECT
|
|
time_bucket('5 minutes', recorded_at) AS bucket,
|
|
sensor_type,
|
|
AVG(value) AS avg_val
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_id LIKE :pattern
|
|
AND sensor_type IN ('cooling_supply', 'cooling_return')
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
|
|
except Exception:
|
|
result = await session.execute(text("""
|
|
SELECT
|
|
date_trunc('minute', recorded_at) AS bucket,
|
|
sensor_type,
|
|
AVG(value) AS avg_val
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_id LIKE :pattern
|
|
AND sensor_type IN ('cooling_supply', 'cooling_return')
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "pattern": f"{site_id}/cooling/{crac_id}/%", "from_time": from_time})
|
|
|
|
rows = result.mappings().all()
|
|
bucket_map: dict[str, dict] = {}
|
|
for row in rows:
|
|
b = str(row["bucket"])
|
|
if b not in bucket_map:
|
|
bucket_map[b] = {"bucket": b}
|
|
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
|
|
|
|
points = []
|
|
for b, vals in bucket_map.items():
|
|
supply = vals.get("cooling_supply")
|
|
ret = vals.get("cooling_return")
|
|
if supply is not None and ret is not None:
|
|
points.append({"bucket": b, "delta": round(ret - supply, 2)})
|
|
|
|
return sorted(points, key=lambda x: x["bucket"])
|
|
|
|
|
|
@router.get("/rack-history")
|
|
async def rack_history(
|
|
site_id: str = Query(...),
|
|
rack_id: str = Query(...),
|
|
hours: int = Query(6, ge=1, le=24),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Temperature and power history for a single rack."""
|
|
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
|
|
try:
|
|
result = await session.execute(text("""
|
|
SELECT
|
|
time_bucket('5 minutes', recorded_at) AS bucket,
|
|
sensor_type,
|
|
ROUND(AVG(value)::numeric, 2) AS avg_value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND rack_id = :rack_id
|
|
AND sensor_type IN ('temperature', 'humidity', 'power_kw')
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "rack_id": rack_id, "from_time": from_time})
|
|
except Exception:
|
|
result = await session.execute(text("""
|
|
SELECT
|
|
date_trunc('minute', recorded_at) AS bucket,
|
|
sensor_type,
|
|
ROUND(AVG(value)::numeric, 2) AS avg_value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND rack_id = :rack_id
|
|
AND sensor_type IN ('temperature', 'humidity', 'power_kw')
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id, "rack_id": rack_id, "from_time": from_time})
|
|
|
|
rows = result.mappings().all()
|
|
|
|
# Pivot into {bucket, temperature, humidity, power_kw}
|
|
bucket_map: dict[str, dict] = {}
|
|
for row in rows:
|
|
b = str(row["bucket"])
|
|
if b not in bucket_map:
|
|
bucket_map[b] = {"bucket": b}
|
|
bucket_map[b][row["sensor_type"]] = float(row["avg_value"])
|
|
|
|
# Fetch active alarms for this rack
|
|
alarms = await session.execute(text("""
|
|
SELECT id, severity, message, state, triggered_at
|
|
FROM alarms
|
|
WHERE site_id = :site_id AND rack_id = :rack_id
|
|
ORDER BY triggered_at DESC
|
|
LIMIT 10
|
|
"""), {"site_id": site_id, "rack_id": rack_id})
|
|
|
|
return {
|
|
"rack_id": rack_id,
|
|
"site_id": site_id,
|
|
"history": list(bucket_map.values()),
|
|
"alarms": [dict(r) for r in alarms.mappings().all()],
|
|
}
|
|
|
|
|
|
@router.get("/particles")
|
|
async def particle_status(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Latest particle counts per room."""
|
|
result = await session.execute(text("""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
room_id, sensor_type, value, recorded_at
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type IN ('particles_0_5um', 'particles_5um')
|
|
AND recorded_at > NOW() - INTERVAL '10 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
|
|
room_data: dict[str, dict] = {}
|
|
for row in result.mappings().all():
|
|
rid = row["room_id"]
|
|
if rid not in room_data:
|
|
room_data[rid] = {}
|
|
room_data[rid][row["sensor_type"]] = round(float(row["value"]))
|
|
|
|
rooms_cfg = ROOMS.get(site_id, [])
|
|
out = []
|
|
for room in rooms_cfg:
|
|
rid = room["room_id"]
|
|
d = room_data.get(rid, {})
|
|
p05 = d.get("particles_0_5um")
|
|
p5 = d.get("particles_5um")
|
|
# Derive ISO 14644-1 class (simplified: class 8 = 3.52M @ 0.5µm)
|
|
iso_class = None
|
|
if p05 is not None:
|
|
if p05 <= 10_000: iso_class = 5
|
|
elif p05 <= 100_000: iso_class = 6
|
|
elif p05 <= 1_000_000: iso_class = 7
|
|
elif p05 <= 3_520_000: iso_class = 8
|
|
else: iso_class = 9
|
|
out.append({
|
|
"room_id": rid,
|
|
"particles_0_5um": p05,
|
|
"particles_5um": p5,
|
|
"iso_class": iso_class,
|
|
})
|
|
return out
|