2026-03-04 21:43:49 -06:00
|
|
|
import os, hmac, hashlib, json, logging
|
|
|
|
|
from flask import Flask, request, jsonify
|
|
|
|
|
from datetime import datetime
|
|
|
|
|
import pytz, sqlite3
|
2026-03-04 19:42:38 -06:00
|
|
|
from apscheduler.schedulers.background import BackgroundScheduler
|
2026-03-04 21:43:49 -06:00
|
|
|
import requests, urllib3
|
2026-03-04 17:45:28 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
urllib3.disable_warnings()
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
log = logging.getLogger(__name__)
|
2026-03-04 17:45:28 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
app = Flask(__name__, static_folder="static", static_url_path="")
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 23:04:55 -06:00
|
|
|
UNIFI_HOST = os.environ.get("UNIFI_HOST", "10.0.0.1")
|
|
|
|
|
UNIFI_PORT = int(os.environ.get("UNIFI_PORT", "12445"))
|
|
|
|
|
UNIFI_TOKEN = os.environ.get("UNIFI_API_TOKEN", "")
|
|
|
|
|
WEBHOOK_SECRET = os.environ.get("WEBHOOK_SECRET", "")
|
|
|
|
|
DB_PATH = os.environ.get("DB_PATH", "/data/dashboard.db")
|
|
|
|
|
TZ = os.environ.get("TZ", "America/Chicago")
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
UNIFI_BASE = f"https://{UNIFI_HOST}:{UNIFI_PORT}/api/v1/developer"
|
|
|
|
|
|
2026-03-04 17:45:28 -06:00
|
|
|
|
|
|
|
|
def get_db():
|
|
|
|
|
conn = sqlite3.connect(DB_PATH)
|
|
|
|
|
conn.row_factory = sqlite3.Row
|
|
|
|
|
return conn
|
|
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 17:45:28 -06:00
|
|
|
def init_db():
|
2026-03-04 21:43:49 -06:00
|
|
|
with get_db() as db:
|
2026-03-04 22:16:36 -06:00
|
|
|
db.execute(
|
|
|
|
|
"""
|
2026-03-04 21:43:49 -06:00
|
|
|
CREATE TABLE IF NOT EXISTS badge_events (
|
|
|
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
|
|
|
actor_id TEXT NOT NULL,
|
|
|
|
|
ts TEXT NOT NULL,
|
|
|
|
|
date TEXT NOT NULL
|
|
|
|
|
)
|
2026-03-04 22:16:36 -06:00
|
|
|
"""
|
|
|
|
|
)
|
|
|
|
|
db.execute(
|
|
|
|
|
"""
|
2026-03-04 21:43:49 -06:00
|
|
|
CREATE TABLE IF NOT EXISTS user_cache (
|
|
|
|
|
actor_id TEXT PRIMARY KEY,
|
|
|
|
|
full_name TEXT NOT NULL,
|
|
|
|
|
updated_at TEXT NOT NULL
|
|
|
|
|
)
|
2026-03-04 22:16:36 -06:00
|
|
|
"""
|
|
|
|
|
)
|
2026-03-04 21:43:49 -06:00
|
|
|
db.commit()
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 19:42:38 -06:00
|
|
|
def sync_unifi_users():
|
|
|
|
|
try:
|
2026-03-04 21:43:49 -06:00
|
|
|
r = requests.get(
|
|
|
|
|
f"{UNIFI_BASE}/users",
|
|
|
|
|
headers={"Authorization": f"Bearer {UNIFI_TOKEN}"},
|
2026-03-04 22:03:30 -06:00
|
|
|
verify=False,
|
|
|
|
|
timeout=10,
|
2026-03-04 21:43:49 -06:00
|
|
|
)
|
|
|
|
|
if r.status_code != 200:
|
|
|
|
|
log.warning("User sync failed: %s %s", r.status_code, r.text[:200])
|
|
|
|
|
return
|
|
|
|
|
users = r.json().get("data", [])
|
|
|
|
|
with get_db() as db:
|
|
|
|
|
for u in users:
|
2026-03-04 22:26:21 -06:00
|
|
|
actor_id = u.get("id")
|
2026-03-04 22:16:36 -06:00
|
|
|
if not actor_id:
|
2026-03-04 23:04:55 -06:00
|
|
|
continue
|
2026-03-04 22:16:36 -06:00
|
|
|
|
|
|
|
|
full_name = (u.get("full_name") or "").strip()
|
|
|
|
|
if not full_name:
|
|
|
|
|
full_name = f"{u.get('first_name','')} {u.get('last_name','')}".strip()
|
|
|
|
|
|
|
|
|
|
db.execute(
|
|
|
|
|
"""
|
|
|
|
|
INSERT INTO user_cache (actor_id, full_name, updated_at)
|
|
|
|
|
VALUES (?, ?, ?)
|
|
|
|
|
ON CONFLICT(actor_id) DO UPDATE SET
|
|
|
|
|
full_name = excluded.full_name,
|
|
|
|
|
updated_at = excluded.updated_at
|
|
|
|
|
""",
|
|
|
|
|
(
|
|
|
|
|
actor_id,
|
|
|
|
|
full_name or f"User {actor_id[:8]}",
|
|
|
|
|
datetime.utcnow().isoformat(),
|
|
|
|
|
),
|
|
|
|
|
)
|
2026-03-04 21:43:49 -06:00
|
|
|
db.commit()
|
|
|
|
|
log.info("Synced %d users from UniFi Access", len(users))
|
2026-03-04 19:42:38 -06:00
|
|
|
except Exception as e:
|
2026-03-04 21:43:49 -06:00
|
|
|
log.error("sync_unifi_users error: %s", e)
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
def verify_signature(payload_bytes, sig_header):
|
|
|
|
|
if not WEBHOOK_SECRET:
|
|
|
|
|
return True
|
2026-03-04 21:55:13 -06:00
|
|
|
if not sig_header:
|
|
|
|
|
log.warning("No Signature header present")
|
|
|
|
|
return False
|
|
|
|
|
try:
|
|
|
|
|
parts = dict(p.split("=", 1) for p in sig_header.split(","))
|
2026-03-04 22:03:30 -06:00
|
|
|
timestamp = parts.get("t", "")
|
2026-03-04 23:04:55 -06:00
|
|
|
received = parts.get("v1", "")
|
2026-03-04 22:03:30 -06:00
|
|
|
if not timestamp or not received:
|
|
|
|
|
log.warning("Signature header missing t or v1: %s", sig_header)
|
|
|
|
|
return False
|
2026-03-04 21:55:13 -06:00
|
|
|
signed_payload = f"{timestamp}.".encode() + payload_bytes
|
|
|
|
|
expected = hmac.new(
|
|
|
|
|
WEBHOOK_SECRET.encode(), signed_payload, hashlib.sha256
|
|
|
|
|
).hexdigest()
|
|
|
|
|
return hmac.compare_digest(expected, received)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
log.warning("Signature parse error: %s", e)
|
|
|
|
|
return False
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
@app.route("/")
|
|
|
|
|
def index():
|
|
|
|
|
return app.send_static_file("index.html")
|
|
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
@app.route("/api/unifi-access", methods=["POST"])
|
|
|
|
|
def receive_webhook():
|
|
|
|
|
raw = request.get_data()
|
2026-03-04 17:45:28 -06:00
|
|
|
|
2026-03-04 21:55:13 -06:00
|
|
|
sig = request.headers.get("Signature", "")
|
2026-03-04 21:43:49 -06:00
|
|
|
if not verify_signature(raw, sig):
|
|
|
|
|
log.warning("Webhook signature mismatch")
|
|
|
|
|
return jsonify({"error": "invalid signature"}), 401
|
2026-03-04 17:45:28 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
try:
|
|
|
|
|
payload = json.loads(raw)
|
|
|
|
|
except Exception:
|
|
|
|
|
return jsonify({"error": "bad json"}), 400
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 23:04:55 -06:00
|
|
|
log.info("Webhook received: %s", json.dumps(payload)[:400])
|
2026-03-04 17:45:28 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
event = payload.get("event") or payload.get("event_object_id", "") or ""
|
|
|
|
|
|
2026-03-04 23:04:55 -06:00
|
|
|
data = payload.get("data") or {}
|
2026-03-04 22:03:30 -06:00
|
|
|
actor_obj = data.get("actor") or {}
|
2026-03-04 23:04:55 -06:00
|
|
|
actor = actor_obj.get("id")
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
if "access.door.unlock" not in str(event):
|
2026-03-04 21:43:49 -06:00
|
|
|
return jsonify({"status": "ignored"}), 200
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
if not actor:
|
2026-03-04 22:03:30 -06:00
|
|
|
log.warning("Webhook has no actor id: %s", json.dumps(payload)[:300])
|
2026-03-04 21:43:49 -06:00
|
|
|
return jsonify({"error": "no actor"}), 400
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 23:04:55 -06:00
|
|
|
# ----------------------------------------------------------------
|
|
|
|
|
# Timestamp resolution — checked in priority order:
|
|
|
|
|
# 1. Top-level "timestamp" key (milliseconds epoch) — UniFi Access standard
|
|
|
|
|
# 2. data.event.published (milliseconds epoch)
|
|
|
|
|
# 3. Top-level ISO string fields
|
|
|
|
|
# 4. Fall back to NOW in the configured local timezone
|
|
|
|
|
# ----------------------------------------------------------------
|
2026-03-04 22:03:30 -06:00
|
|
|
tz = pytz.timezone(TZ)
|
2026-03-04 23:04:55 -06:00
|
|
|
ts = None
|
|
|
|
|
|
|
|
|
|
# 1. Top-level timestamp (ms)
|
|
|
|
|
top_ts_ms = payload.get("timestamp")
|
|
|
|
|
if top_ts_ms and isinstance(top_ts_ms, (int, float)) and top_ts_ms > 1e10:
|
|
|
|
|
ts = datetime.fromtimestamp(top_ts_ms / 1000.0, tz=pytz.utc)
|
|
|
|
|
log.info("Timestamp source: top-level ms (%s)", top_ts_ms)
|
|
|
|
|
|
|
|
|
|
# 2. data.event.published (ms)
|
|
|
|
|
if ts is None:
|
|
|
|
|
event_meta = data.get("event") or {}
|
|
|
|
|
published = event_meta.get("published")
|
|
|
|
|
if published and isinstance(published, (int, float)) and published > 1e10:
|
|
|
|
|
ts = datetime.fromtimestamp(published / 1000.0, tz=pytz.utc)
|
|
|
|
|
log.info("Timestamp source: data.event.published (%s)", published)
|
|
|
|
|
|
|
|
|
|
# 3. ISO string fields
|
|
|
|
|
if ts is None:
|
|
|
|
|
for field in ("created_at", "time", "occurred_at"):
|
|
|
|
|
raw_ts = payload.get(field)
|
|
|
|
|
if raw_ts:
|
|
|
|
|
try:
|
|
|
|
|
ts = datetime.fromisoformat(str(raw_ts).replace("Z", "+00:00"))
|
|
|
|
|
log.info("Timestamp source: ISO field '%s' (%s)", field, raw_ts)
|
|
|
|
|
break
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
# 4. Fallback — use local now so the date bucket is always correct
|
|
|
|
|
if ts is None:
|
|
|
|
|
ts = datetime.now(tz=tz)
|
|
|
|
|
log.warning("Timestamp source: fallback to local now")
|
|
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
ts_local = ts.astimezone(tz)
|
2026-03-04 23:04:55 -06:00
|
|
|
date = ts_local.strftime("%Y-%m-%d")
|
|
|
|
|
ts_str = ts_local.strftime("%H:%M:%S")
|
2026-03-04 17:45:28 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
with get_db() as db:
|
|
|
|
|
db.execute(
|
|
|
|
|
"INSERT INTO badge_events (actor_id, ts, date) VALUES (?, ?, ?)",
|
2026-03-04 22:03:30 -06:00
|
|
|
(actor, ts_str, date),
|
2026-03-04 21:43:49 -06:00
|
|
|
)
|
|
|
|
|
db.commit()
|
2026-03-04 17:45:28 -06:00
|
|
|
|
2026-03-04 23:04:55 -06:00
|
|
|
log.info("Badge-in recorded: actor=%s date=%s ts=%s (tz=%s)", actor, date, ts_str, TZ)
|
2026-03-04 21:43:49 -06:00
|
|
|
return jsonify({"status": "ok"}), 200
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
@app.route("/api/first-badge-status")
|
2026-03-04 17:45:28 -06:00
|
|
|
def first_badge_status():
|
2026-03-04 23:04:55 -06:00
|
|
|
date = request.args.get("date", datetime.now(pytz.timezone(TZ)).strftime("%Y-%m-%d"))
|
2026-03-04 22:03:30 -06:00
|
|
|
cutoff = request.args.get("cutoff", "09:00") # HH:MM
|
2026-03-04 17:45:28 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
with get_db() as db:
|
2026-03-04 22:03:30 -06:00
|
|
|
rows = db.execute(
|
|
|
|
|
"""
|
2026-03-04 21:43:49 -06:00
|
|
|
SELECT
|
|
|
|
|
b.actor_id,
|
|
|
|
|
MIN(b.ts) AS first_ts,
|
|
|
|
|
MAX(b.ts) AS latest_ts,
|
2026-03-04 22:03:30 -06:00
|
|
|
COALESCE(
|
|
|
|
|
u.full_name,
|
|
|
|
|
'Unknown (' || SUBSTR(b.actor_id,1,8) || '...)'
|
|
|
|
|
) AS name
|
2026-03-04 21:43:49 -06:00
|
|
|
FROM badge_events b
|
|
|
|
|
LEFT JOIN user_cache u ON u.actor_id = b.actor_id
|
|
|
|
|
WHERE b.date = ?
|
|
|
|
|
GROUP BY b.actor_id
|
|
|
|
|
ORDER BY first_ts ASC
|
2026-03-04 22:03:30 -06:00
|
|
|
""",
|
|
|
|
|
(date,),
|
|
|
|
|
).fetchall()
|
2026-03-04 19:53:39 -06:00
|
|
|
|
2026-03-04 17:45:28 -06:00
|
|
|
result = []
|
2026-03-04 21:43:49 -06:00
|
|
|
for r in rows:
|
2026-03-04 23:04:55 -06:00
|
|
|
first = r["first_ts"]
|
2026-03-04 21:43:49 -06:00
|
|
|
latest = r["latest_ts"]
|
|
|
|
|
status = "ON TIME" if first <= cutoff + ":59" else "LATE"
|
2026-03-04 22:03:30 -06:00
|
|
|
result.append(
|
|
|
|
|
{
|
|
|
|
|
"actor_id": r["actor_id"],
|
2026-03-04 23:04:55 -06:00
|
|
|
"name": r["name"],
|
2026-03-04 22:03:30 -06:00
|
|
|
"first_ts": first,
|
|
|
|
|
"latest_ts": latest if latest != first else None,
|
2026-03-04 23:04:55 -06:00
|
|
|
"status": status,
|
2026-03-04 22:03:30 -06:00
|
|
|
}
|
|
|
|
|
)
|
2026-03-04 17:45:28 -06:00
|
|
|
|
|
|
|
|
return jsonify(result)
|
|
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
@app.route("/api/sync-users")
|
2026-03-04 19:42:38 -06:00
|
|
|
def manual_sync():
|
|
|
|
|
sync_unifi_users()
|
2026-03-04 21:43:49 -06:00
|
|
|
return jsonify({"status": "synced"})
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 21:43:49 -06:00
|
|
|
@app.route("/api/reset-day", methods=["DELETE"])
|
2026-03-04 19:53:39 -06:00
|
|
|
def reset_day():
|
2026-03-04 23:04:55 -06:00
|
|
|
date = request.args.get("date", datetime.now(pytz.timezone(TZ)).strftime("%Y-%m-%d"))
|
2026-03-04 21:43:49 -06:00
|
|
|
with get_db() as db:
|
|
|
|
|
cur = db.execute("DELETE FROM badge_events WHERE date = ?", (date,))
|
|
|
|
|
db.commit()
|
|
|
|
|
return jsonify({"status": "ok", "deleted": cur.rowcount, "date": date})
|
2026-03-04 19:42:38 -06:00
|
|
|
|
2026-03-04 22:03:30 -06:00
|
|
|
|
2026-03-04 22:22:04 -06:00
|
|
|
@app.route("/api/debug-user-cache")
|
|
|
|
|
def debug_user_cache():
|
|
|
|
|
actor_id = request.args.get("actor_id", "").strip()
|
|
|
|
|
if not actor_id:
|
|
|
|
|
return jsonify({"error": "missing actor_id"}), 400
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
r = requests.get(
|
|
|
|
|
f"{UNIFI_BASE}/users/search",
|
|
|
|
|
headers={"Authorization": f"Bearer {UNIFI_TOKEN}"},
|
|
|
|
|
params={"userid": actor_id},
|
|
|
|
|
verify=False,
|
|
|
|
|
timeout=10,
|
|
|
|
|
)
|
|
|
|
|
try:
|
|
|
|
|
data = r.json()
|
|
|
|
|
except Exception:
|
|
|
|
|
data = {"raw": r.text[:500]}
|
|
|
|
|
return jsonify(
|
|
|
|
|
{
|
|
|
|
|
"status_code": r.status_code,
|
|
|
|
|
"actor_id_param": actor_id,
|
|
|
|
|
"response": data,
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
|
|
|
|
|
|
|
2026-03-04 20:56:30 -06:00
|
|
|
with app.app_context():
|
|
|
|
|
init_db()
|
|
|
|
|
sync_unifi_users()
|
|
|
|
|
|
2026-03-04 19:42:38 -06:00
|
|
|
scheduler = BackgroundScheduler()
|
|
|
|
|
scheduler.add_job(sync_unifi_users, "interval", hours=6)
|
|
|
|
|
scheduler.start()
|
|
|
|
|
|
2026-03-04 17:45:28 -06:00
|
|
|
if __name__ == "__main__":
|
|
|
|
|
app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 8000)))
|