Files
unifi-access-dashboard/app.py

256 lines
8.1 KiB
Python
Raw Normal View History

2026-03-04 21:43:49 -06:00
import os, hmac, hashlib, json, logging
from flask import Flask, request, jsonify
from datetime import datetime
import pytz, sqlite3
2026-03-04 19:42:38 -06:00
from apscheduler.schedulers.background import BackgroundScheduler
2026-03-04 21:43:49 -06:00
import requests, urllib3
2026-03-04 17:45:28 -06:00
2026-03-04 21:43:49 -06:00
urllib3.disable_warnings()
2026-03-04 19:42:38 -06:00
2026-03-04 21:43:49 -06:00
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
2026-03-04 17:45:28 -06:00
2026-03-04 21:43:49 -06:00
app = Flask(__name__, static_folder="static", static_url_path="")
2026-03-04 19:42:38 -06:00
2026-03-04 21:43:49 -06:00
UNIFI_HOST = os.environ.get("UNIFI_HOST", "10.0.0.1")
UNIFI_PORT = int(os.environ.get("UNIFI_PORT", "12445"))
UNIFI_TOKEN = os.environ.get("UNIFI_API_TOKEN", "")
WEBHOOK_SECRET = os.environ.get("WEBHOOK_SECRET", "")
DB_PATH = os.environ.get("DB_PATH", "/data/dashboard.db")
TZ = os.environ.get("TZ", "America/Chicago")
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
UNIFI_BASE = f"https://{UNIFI_HOST}:{UNIFI_PORT}/api/v1/developer"
2026-03-04 17:45:28 -06:00
def get_db():
conn = sqlite3.connect(DB_PATH)
conn.row_factory = sqlite3.Row
return conn
2026-03-04 22:03:30 -06:00
2026-03-04 17:45:28 -06:00
def init_db():
2026-03-04 21:43:49 -06:00
with get_db() as db:
db.execute("""
CREATE TABLE IF NOT EXISTS badge_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
actor_id TEXT NOT NULL,
ts TEXT NOT NULL,
date TEXT NOT NULL
)
""")
db.execute("""
CREATE TABLE IF NOT EXISTS user_cache (
actor_id TEXT PRIMARY KEY,
full_name TEXT NOT NULL,
updated_at TEXT NOT NULL
)
""")
db.commit()
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
2026-03-04 19:42:38 -06:00
def sync_unifi_users():
try:
2026-03-04 21:43:49 -06:00
r = requests.get(
f"{UNIFI_BASE}/users",
headers={"Authorization": f"Bearer {UNIFI_TOKEN}"},
2026-03-04 22:03:30 -06:00
verify=False,
timeout=10,
2026-03-04 21:43:49 -06:00
)
if r.status_code != 200:
log.warning("User sync failed: %s %s", r.status_code, r.text[:200])
return
users = r.json().get("data", [])
with get_db() as db:
for u in users:
2026-03-04 22:03:30 -06:00
full_name = (u.get("full_name") or "").strip()
if not full_name:
full_name = f"{u.get('first_name','')} {u.get('last_name','')}".strip()
db.execute(
"""
2026-03-04 21:43:49 -06:00
INSERT INTO user_cache (actor_id, full_name, updated_at)
VALUES (?, ?, ?)
ON CONFLICT(actor_id) DO UPDATE SET
full_name = excluded.full_name,
updated_at = excluded.updated_at
2026-03-04 22:03:30 -06:00
""",
(
u["id"],
full_name or f"User {u['id'][:8]}",
datetime.utcnow().isoformat(),
),
)
2026-03-04 21:43:49 -06:00
db.commit()
log.info("Synced %d users from UniFi Access", len(users))
2026-03-04 19:42:38 -06:00
except Exception as e:
2026-03-04 21:43:49 -06:00
log.error("sync_unifi_users error: %s", e)
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
2026-03-04 21:43:49 -06:00
def verify_signature(payload_bytes, sig_header):
2026-03-04 21:55:13 -06:00
"""
2026-03-04 22:03:30 -06:00
UniFi Access signature format (API docs section 11.7):
2026-03-04 21:55:13 -06:00
Header name : Signature
Header value: t=<unix_timestamp>,v1=<hex_hmac_sha256>
Signed data : f"{timestamp}.{raw_body}"
2026-03-04 22:03:30 -06:00
2026-03-04 21:55:13 -06:00
Returns True if valid, or if no WEBHOOK_SECRET is configured.
"""
2026-03-04 21:43:49 -06:00
if not WEBHOOK_SECRET:
2026-03-04 22:03:30 -06:00
# If no secret configured, accept all (useful for initial testing)
2026-03-04 21:43:49 -06:00
return True
2026-03-04 21:55:13 -06:00
if not sig_header:
log.warning("No Signature header present")
return False
try:
parts = dict(p.split("=", 1) for p in sig_header.split(","))
2026-03-04 22:03:30 -06:00
timestamp = parts.get("t", "")
received = parts.get("v1", "")
if not timestamp or not received:
log.warning("Signature header missing t or v1: %s", sig_header)
return False
2026-03-04 21:55:13 -06:00
signed_payload = f"{timestamp}.".encode() + payload_bytes
expected = hmac.new(
WEBHOOK_SECRET.encode(), signed_payload, hashlib.sha256
).hexdigest()
return hmac.compare_digest(expected, received)
except Exception as e:
log.warning("Signature parse error: %s", e)
return False
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
2026-03-04 21:43:49 -06:00
@app.route("/")
def index():
return app.send_static_file("index.html")
2026-03-04 22:03:30 -06:00
2026-03-04 21:43:49 -06:00
@app.route("/api/unifi-access", methods=["POST"])
def receive_webhook():
raw = request.get_data()
2026-03-04 17:45:28 -06:00
2026-03-04 21:55:13 -06:00
sig = request.headers.get("Signature", "")
2026-03-04 21:43:49 -06:00
if not verify_signature(raw, sig):
log.warning("Webhook signature mismatch")
return jsonify({"error": "invalid signature"}), 401
2026-03-04 17:45:28 -06:00
2026-03-04 21:43:49 -06:00
try:
payload = json.loads(raw)
except Exception:
return jsonify({"error": "bad json"}), 400
2026-03-04 19:42:38 -06:00
2026-03-04 21:43:49 -06:00
log.info("Webhook received: %s", json.dumps(payload)[:300])
2026-03-04 17:45:28 -06:00
2026-03-04 22:03:30 -06:00
event = payload.get("event") or payload.get("event_object_id", "") or ""
# Data block per UniFi Access docs: payload["data"]["actor"], ["event"], etc.
data = payload.get("data") or {}
actor_obj = data.get("actor") or {}
actor = actor_obj.get("id") or payload.get("actor_id", "")
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
if "access.door.unlock" not in str(event):
# Ignore other notification types
2026-03-04 21:43:49 -06:00
return jsonify({"status": "ignored"}), 200
2026-03-04 19:42:38 -06:00
2026-03-04 21:43:49 -06:00
if not actor:
2026-03-04 22:03:30 -06:00
log.warning("Webhook has no actor id: %s", json.dumps(payload)[:300])
2026-03-04 21:43:49 -06:00
return jsonify({"error": "no actor"}), 400
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
# Prefer data.event.published (ms since epoch) if present
event_meta = data.get("event") or {}
ts_ms = event_meta.get("published")
if ts_ms:
ts = datetime.fromtimestamp(ts_ms / 1000.0, tz=pytz.utc)
else:
ts_raw = (
payload.get("timestamp")
or payload.get("created_at")
or datetime.utcnow().isoformat()
)
ts = datetime.fromisoformat(str(ts_raw).replace("Z", "+00:00"))
tz = pytz.timezone(TZ)
ts_local = ts.astimezone(tz)
date = ts_local.strftime("%Y-%m-%d")
ts_str = ts_local.strftime("%H:%M:%S")
2026-03-04 17:45:28 -06:00
2026-03-04 21:43:49 -06:00
with get_db() as db:
db.execute(
"INSERT INTO badge_events (actor_id, ts, date) VALUES (?, ?, ?)",
2026-03-04 22:03:30 -06:00
(actor, ts_str, date),
2026-03-04 21:43:49 -06:00
)
db.commit()
2026-03-04 17:45:28 -06:00
2026-03-04 22:03:30 -06:00
log.info("Badge-in recorded: actor=%s date=%s ts=%s", actor, date, ts_str)
2026-03-04 21:43:49 -06:00
return jsonify({"status": "ok"}), 200
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
2026-03-04 21:43:49 -06:00
@app.route("/api/first-badge-status")
2026-03-04 17:45:28 -06:00
def first_badge_status():
2026-03-04 22:03:30 -06:00
date = request.args.get("date", datetime.now().strftime("%Y-%m-%d"))
cutoff = request.args.get("cutoff", "09:00") # HH:MM
2026-03-04 17:45:28 -06:00
2026-03-04 21:43:49 -06:00
with get_db() as db:
2026-03-04 22:03:30 -06:00
rows = db.execute(
"""
2026-03-04 21:43:49 -06:00
SELECT
b.actor_id,
MIN(b.ts) AS first_ts,
MAX(b.ts) AS latest_ts,
2026-03-04 22:03:30 -06:00
COALESCE(
u.full_name,
'Unknown (' || SUBSTR(b.actor_id,1,8) || '...)'
) AS name
2026-03-04 21:43:49 -06:00
FROM badge_events b
LEFT JOIN user_cache u ON u.actor_id = b.actor_id
WHERE b.date = ?
GROUP BY b.actor_id
ORDER BY first_ts ASC
2026-03-04 22:03:30 -06:00
""",
(date,),
).fetchall()
2026-03-04 19:53:39 -06:00
2026-03-04 17:45:28 -06:00
result = []
2026-03-04 21:43:49 -06:00
for r in rows:
2026-03-04 22:03:30 -06:00
first = r["first_ts"]
2026-03-04 21:43:49 -06:00
latest = r["latest_ts"]
2026-03-04 22:03:30 -06:00
# Compare as strings HH:MM:SS against cutoff HH:MM (treat <= cutoff:59 as on time)
2026-03-04 21:43:49 -06:00
status = "ON TIME" if first <= cutoff + ":59" else "LATE"
2026-03-04 22:03:30 -06:00
result.append(
{
"actor_id": r["actor_id"],
"name": r["name"],
"first_ts": first,
"latest_ts": latest if latest != first else None,
"status": status,
}
)
2026-03-04 17:45:28 -06:00
return jsonify(result)
2026-03-04 22:03:30 -06:00
2026-03-04 21:43:49 -06:00
@app.route("/api/sync-users")
2026-03-04 19:42:38 -06:00
def manual_sync():
sync_unifi_users()
2026-03-04 21:43:49 -06:00
return jsonify({"status": "synced"})
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
2026-03-04 21:43:49 -06:00
@app.route("/api/reset-day", methods=["DELETE"])
2026-03-04 19:53:39 -06:00
def reset_day():
2026-03-04 21:43:49 -06:00
date = request.args.get("date", datetime.now().strftime("%Y-%m-%d"))
with get_db() as db:
cur = db.execute("DELETE FROM badge_events WHERE date = ?", (date,))
db.commit()
return jsonify({"status": "ok", "deleted": cur.rowcount, "date": date})
2026-03-04 19:42:38 -06:00
2026-03-04 22:03:30 -06:00
2026-03-04 20:56:30 -06:00
# Initialise DB and kick off background scheduler at import time
with app.app_context():
init_db()
sync_unifi_users()
2026-03-04 19:42:38 -06:00
scheduler = BackgroundScheduler()
scheduler.add_job(sync_unifi_users, "interval", hours=6)
scheduler.start()
2026-03-04 17:45:28 -06:00
if __name__ == "__main__":
app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 8000)))