131 lines
4.9 KiB
Python
131 lines
4.9 KiB
Python
from datetime import datetime, timezone, timedelta
|
|
from fastapi import APIRouter, Depends, Query
|
|
from sqlalchemy import text
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from core.database import get_session
|
|
|
|
router = APIRouter()
|
|
|
|
CHILLERS = {"sg-01": ["chiller-01"]}
|
|
|
|
CHILLER_FIELD_MAP = {
|
|
"chiller_chw_supply": "chw_supply_c",
|
|
"chiller_chw_return": "chw_return_c",
|
|
"chiller_chw_delta": "chw_delta_c",
|
|
"chiller_flow_gpm": "flow_gpm",
|
|
"chiller_load_kw": "cooling_load_kw",
|
|
"chiller_load_pct": "cooling_load_pct",
|
|
"chiller_cop": "cop",
|
|
"chiller_comp_load": "compressor_load_pct",
|
|
"chiller_cond_press": "condenser_pressure_bar",
|
|
"chiller_evap_press": "evaporator_pressure_bar",
|
|
"chiller_cw_supply": "cw_supply_c",
|
|
"chiller_cw_return": "cw_return_c",
|
|
"chiller_run_hours": "run_hours",
|
|
}
|
|
|
|
|
|
@router.get("/status")
|
|
async def chiller_status(
|
|
site_id: str = Query(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Latest chiller plant readings."""
|
|
types_sql = ", ".join(f"'{t}'" for t in [*CHILLER_FIELD_MAP.keys(), "chiller_state"])
|
|
result = await session.execute(text(f"""
|
|
SELECT DISTINCT ON (sensor_id)
|
|
sensor_id, sensor_type, value
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_type IN ({types_sql})
|
|
AND recorded_at > NOW() - INTERVAL '5 minutes'
|
|
ORDER BY sensor_id, recorded_at DESC
|
|
"""), {"site_id": site_id})
|
|
|
|
chiller_data: dict[str, dict] = {}
|
|
for row in result.mappings().all():
|
|
parts = row["sensor_id"].split("/")
|
|
# sensor_id: {site}/{cooling/chiller}/{chiller_id}/{key} → parts[3]
|
|
if len(parts) < 4:
|
|
continue
|
|
chiller_id = parts[3]
|
|
if chiller_id not in chiller_data:
|
|
chiller_data[chiller_id] = {"chiller_id": chiller_id}
|
|
field = CHILLER_FIELD_MAP.get(row["sensor_type"])
|
|
if field:
|
|
chiller_data[chiller_id][field] = round(float(row["value"]), 2)
|
|
elif row["sensor_type"] == "chiller_state":
|
|
chiller_data[chiller_id]["state"] = "online" if float(row["value"]) > 0.5 else "fault"
|
|
|
|
out = []
|
|
for chiller_id in CHILLERS.get(site_id, []):
|
|
d = chiller_data.get(chiller_id, {"chiller_id": chiller_id, "state": "unknown"})
|
|
d.setdefault("state", "online")
|
|
out.append(d)
|
|
return out
|
|
|
|
|
|
@router.get("/history")
|
|
async def chiller_history(
|
|
site_id: str = Query(...),
|
|
chiller_id: str = Query(...),
|
|
hours: int = Query(6, ge=1, le=24),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Time-series COP, load kW, and CHW temps for a chiller."""
|
|
from_time = datetime.now(timezone.utc) - timedelta(hours=hours)
|
|
METRICS = ("chiller_cop", "chiller_load_kw", "chiller_load_pct",
|
|
"chiller_chw_supply", "chiller_chw_return", "chiller_comp_load")
|
|
types_sql = ", ".join(f"'{t}'" for t in METRICS)
|
|
try:
|
|
result = await session.execute(text(f"""
|
|
SELECT
|
|
time_bucket('5 minutes', recorded_at) AS bucket,
|
|
sensor_type,
|
|
ROUND(AVG(value)::numeric, 3) AS avg_val
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_id LIKE :pattern
|
|
AND sensor_type IN ({types_sql})
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id,
|
|
"pattern": f"{site_id}/cooling/chiller/{chiller_id}/%",
|
|
"from_time": from_time})
|
|
except Exception:
|
|
result = await session.execute(text(f"""
|
|
SELECT
|
|
date_trunc('minute', recorded_at) AS bucket,
|
|
sensor_type,
|
|
ROUND(AVG(value)::numeric, 3) AS avg_val
|
|
FROM readings
|
|
WHERE site_id = :site_id
|
|
AND sensor_id LIKE :pattern
|
|
AND sensor_type IN ({types_sql})
|
|
AND recorded_at > :from_time
|
|
GROUP BY bucket, sensor_type
|
|
ORDER BY bucket ASC
|
|
"""), {"site_id": site_id,
|
|
"pattern": f"{site_id}/cooling/chiller/{chiller_id}/%",
|
|
"from_time": from_time})
|
|
|
|
bucket_map: dict[str, dict] = {}
|
|
for row in result.mappings().all():
|
|
b = str(row["bucket"])
|
|
if b not in bucket_map:
|
|
bucket_map[b] = {"bucket": b}
|
|
bucket_map[b][row["sensor_type"]] = float(row["avg_val"])
|
|
|
|
points = []
|
|
for b, vals in sorted(bucket_map.items()):
|
|
points.append({
|
|
"bucket": b,
|
|
"cop": vals.get("chiller_cop"),
|
|
"load_kw": vals.get("chiller_load_kw"),
|
|
"load_pct": vals.get("chiller_load_pct"),
|
|
"chw_supply_c": vals.get("chiller_chw_supply"),
|
|
"chw_return_c": vals.get("chiller_chw_return"),
|
|
"comp_load": vals.get("chiller_comp_load"),
|
|
})
|
|
return points
|