Add webhook receiver, consolidate shared utilities, expand telemetry coverage

- Add FastAPI webhook receiver (webhook_receiver_rev.py) for Jimi push data:
  OBD diagnostics, DTC fault codes, alarms, GPS, heartbeats, trip reports
- Add schema migration (03_webhook_schema_migration.sql) for webhook tables:
  fault_codes, heartbeats, expanded obd_readings/trips/position_history/alarms
- Consolidate duplicated _safe/_shutdown into shared safe_task/setup_shutdown
  in ts_shared_rev.py (DRY refactor)
- Add auto-commit to get_conn() context manager (prevents forgotten commits)
- Fix poll_trips to capture runTimeSecond and maxSpeed from API
- Add poll_parking via jimi.open.platform.report.parking
- Remove broken poll_obd (OBD is push-only, no polling endpoint exists)
- Fix alarms schema: add lat/lng/acc_status columns + dedup constraint
- Fix obd_readings schema: add dedup constraint
- Fix trigger DO block: replace nonexistent has_column with information_schema
- Narrow api_post exception handling to RequestException/ValueError
- Add webhook_receiver service to docker-compose.yaml
- Add fastapi/uvicorn/python-multipart to pyproject.toml
- Add clean_ts timestamp validator to ts_shared_rev.py
- Add Tracksolid Pro API documentation (tracksolidApiDocumentation.md)
- Populate .gitignore with Python/OS/secrets patterns

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
David Kiania 2026-04-08 16:28:45 +03:00
parent 85b50db71a
commit de70972d6a
10 changed files with 2465 additions and 122 deletions

22
.gitignore vendored
View file

@ -0,0 +1,22 @@
# Secrets
.env
.env.*
# Python
__pycache__/
*.pyc
*.pyo
.venv/
# uv
.uv/
# OS
.DS_Store
Thumbs.db
# Backups
bak_*
# Logs
*.log

View file

@ -186,13 +186,27 @@ CREATE TABLE IF NOT EXISTS tracksolid.parking_events (
-- 3.08 Alarms, OBD, Fault Codes
CREATE TABLE IF NOT EXISTS tracksolid.alarms (
id BIGSERIAL PRIMARY KEY, imei TEXT REFERENCES tracksolid.devices(imei),
alarm_type TEXT, alarm_time TIMESTAMPTZ, geom geometry(Point, 4326), speed NUMERIC(7,2), updated_at TIMESTAMPTZ DEFAULT NOW()
id BIGSERIAL PRIMARY KEY,
imei TEXT REFERENCES tracksolid.devices(imei),
alarm_type TEXT,
alarm_time TIMESTAMPTZ,
geom geometry(Point, 4326),
lat DOUBLE PRECISION,
lng DOUBLE PRECISION,
speed NUMERIC(7,2),
acc_status TEXT,
updated_at TIMESTAMPTZ DEFAULT NOW(),
CONSTRAINT alarms_dedup UNIQUE (imei, alarm_type, alarm_time)
);
CREATE TABLE IF NOT EXISTS tracksolid.obd_readings (
id BIGSERIAL PRIMARY KEY, imei TEXT REFERENCES tracksolid.devices(imei),
reading_time TIMESTAMPTZ, engine_rpm INTEGER, fuel_level_pct NUMERIC(5,2), updated_at TIMESTAMPTZ DEFAULT NOW()
id BIGSERIAL PRIMARY KEY,
imei TEXT REFERENCES tracksolid.devices(imei),
reading_time TIMESTAMPTZ,
engine_rpm INTEGER,
fuel_level_pct NUMERIC(5,2),
updated_at TIMESTAMPTZ DEFAULT NOW(),
CONSTRAINT obd_readings_dedup UNIQUE (imei, reading_time)
);
-- =============================================================================
@ -237,11 +251,16 @@ CREATE TABLE dwh_gold.fact_daily_fleet_metrics (
CREATE OR REPLACE FUNCTION tracksolid.set_updated_at() RETURNS TRIGGER AS $$
BEGIN NEW.updated_at = NOW(); RETURN NEW; END; $$ LANGUAGE plpgsql;
-- Apply trigger to tables
-- Apply trigger to tables with updated_at column
DO $$
DECLARE t TEXT;
BEGIN
FOR t IN SELECT tablename FROM pg_tables WHERE schemaname = 'tracksolid' AND has_column('tracksolid', tablename, 'updated_at')
FOR t IN
SELECT pt.tablename
FROM pg_tables pt
JOIN information_schema.columns c
ON c.table_schema = pt.schemaname AND c.table_name = pt.tablename
WHERE pt.schemaname = 'tracksolid' AND c.column_name = 'updated_at'
LOOP
EXECUTE format('CREATE TRIGGER trg_upd_%I BEFORE UPDATE ON tracksolid.%I FOR EACH ROW EXECUTE FUNCTION tracksolid.set_updated_at()', t, t);
END LOOP;

View file

@ -0,0 +1,128 @@
-- =============================================================================
-- Fireside Communications — Tracksolid Pro Fleet Telemetry
-- Schema Migration: Webhook Receiver Support
-- =============================================================================
-- Adds tables and columns required by the FastAPI webhook receiver service
-- that ingests push data from Jimi's Data Push API.
--
-- All statements are idempotent (IF NOT EXISTS / ADD COLUMN IF NOT EXISTS).
-- Run against production with: psql $DATABASE_URL -f 03_webhook_schema_migration.sql
-- =============================================================================
-- =============================================================================
-- 1. Expand obd_readings for push data
-- =============================================================================
-- The Jimi /pushobd webhook sends a rich obdJson payload with device-model-
-- specific dataID fields. We store the full JSON as JSONB for flexibility,
-- plus commonly-needed columns for direct querying.
ALTER TABLE tracksolid.obd_readings
ADD COLUMN IF NOT EXISTS car_type SMALLINT,
ADD COLUMN IF NOT EXISTS acc_state SMALLINT,
ADD COLUMN IF NOT EXISTS status_flags INTEGER,
ADD COLUMN IF NOT EXISTS lat DOUBLE PRECISION,
ADD COLUMN IF NOT EXISTS lng DOUBLE PRECISION,
ADD COLUMN IF NOT EXISTS geom geometry(Point, 4326),
ADD COLUMN IF NOT EXISTS obd_data JSONB;
COMMENT ON COLUMN tracksolid.obd_readings.obd_data IS
'Raw obdJson from Jimi push. Contains dataID1..N fields (engine RPM, coolant temp, fuel level, etc.)';
-- =============================================================================
-- 2. Create fault_codes table
-- =============================================================================
-- Stores DTC fault codes from /pushfaultinfo webhook.
-- One row per fault code per report for queryability.
CREATE TABLE IF NOT EXISTS tracksolid.fault_codes (
id BIGSERIAL PRIMARY KEY,
imei TEXT NOT NULL REFERENCES tracksolid.devices(imei),
reported_at TIMESTAMPTZ NOT NULL,
fault_code TEXT NOT NULL,
status_flags INTEGER,
lat DOUBLE PRECISION,
lng DOUBLE PRECISION,
geom geometry(Point, 4326),
event_time TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT fault_codes_dedup UNIQUE (imei, reported_at, fault_code)
);
CREATE INDEX IF NOT EXISTS idx_fault_codes_imei_time
ON tracksolid.fault_codes (imei, reported_at DESC);
CREATE INDEX IF NOT EXISTS idx_fault_codes_code
ON tracksolid.fault_codes (fault_code);
-- =============================================================================
-- 3. Create heartbeats table (hypertable)
-- =============================================================================
-- Stores device heartbeat data from /pushhb webhook.
-- High volume, low long-term value — retained for 30 days.
CREATE TABLE IF NOT EXISTS tracksolid.heartbeats (
imei TEXT NOT NULL REFERENCES tracksolid.devices(imei),
gate_time TIMESTAMPTZ NOT NULL,
power_level SMALLINT,
gsm_signal SMALLINT,
acc_status SMALLINT,
power_status SMALLINT,
fortify SMALLINT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
PRIMARY KEY (imei, gate_time)
);
SELECT create_hypertable('tracksolid.heartbeats', 'gate_time',
chunk_time_interval => INTERVAL '7 days', if_not_exists => TRUE);
SELECT add_retention_policy('tracksolid.heartbeats', INTERVAL '30 days',
if_not_exists => TRUE);
-- =============================================================================
-- 4. Expand trips for push data
-- =============================================================================
-- The /pushtripreport webhook provides fuel consumption, idle time, and
-- trip sequence numbers not available from the polling API.
ALTER TABLE tracksolid.trips
ADD COLUMN IF NOT EXISTS fuel_consumed_l NUMERIC(8,2),
ADD COLUMN IF NOT EXISTS idle_time_s INTEGER,
ADD COLUMN IF NOT EXISTS driving_time_s INTEGER,
ADD COLUMN IF NOT EXISTS trip_seq INTEGER,
ADD COLUMN IF NOT EXISTS source TEXT DEFAULT 'poll';
COMMENT ON COLUMN tracksolid.trips.driving_time_s IS 'runTimeSecond from API: total driving time in seconds';
COMMENT ON COLUMN tracksolid.trips.source IS 'poll = from API polling, push = from webhook push';
-- =============================================================================
-- 5. Expand position_history for push data
-- =============================================================================
-- The /pushgps webhook provides altitude and positioning type not available
-- from the polling API.
ALTER TABLE tracksolid.position_history
ADD COLUMN IF NOT EXISTS altitude NUMERIC(8,2),
ADD COLUMN IF NOT EXISTS post_type SMALLINT,
ADD COLUMN IF NOT EXISTS source TEXT DEFAULT 'poll';
-- =============================================================================
-- 6. Expand alarms for push data
-- =============================================================================
ALTER TABLE tracksolid.alarms
ADD COLUMN IF NOT EXISTS alarm_name TEXT,
ADD COLUMN IF NOT EXISTS source TEXT DEFAULT 'poll';
-- =============================================================================
-- 7. Permissions
-- =============================================================================
GRANT ALL ON tracksolid.fault_codes TO tracksolid_owner;
GRANT ALL ON tracksolid.heartbeats TO tracksolid_owner;
GRANT SELECT ON tracksolid.fault_codes TO grafana_ro;
GRANT SELECT ON tracksolid.heartbeats TO grafana_ro;
-- Grant sequence permissions for BIGSERIAL columns
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA tracksolid TO tracksolid_owner;
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA tracksolid TO grafana_ro;

View file

@ -20,6 +20,7 @@ services:
build:
context: .
dockerfile: Dockerfile
command: python ingest_movement_rev.py
restart: always
depends_on:
timescale_db:
@ -30,10 +31,30 @@ services:
build:
context: .
dockerfile: Dockerfile
command: python ingest_events_rev.py
restart: always
depends_on:
timescale_db:
condition: service_healthy
env_file: .env
webhook_receiver:
build:
context: .
dockerfile: Dockerfile
command: uvicorn webhook_receiver_rev:app --host 0.0.0.0 --port 8000 --workers 2
restart: always
depends_on:
timescale_db:
condition: service_healthy
env_file: .env
ports:
- "8000:8000"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 5s
retries: 3
grafana:
image: grafana/grafana:11.0.0

View file

@ -1,26 +1,26 @@
"""
ingest_events_rev.py Fireside Communications · Tracksolid Events Pipeline
RESPONSIBILITY: Alarms, Geofences, and OBD engine diagnostics.
RESPONSIBILITY: Alarm event polling (catch-up/fallback for webhook push data).
OBD diagnostics are received via the webhook_receiver_rev.py push service
jimi.device.obd.list does not exist in the Tracksolid Pro API.
REVISIONS (QA-Verified):
[FIX-E01] Batching: Polls 50 IMEIs per call to stay within API limits.
[FIX-E02] JSONB: Stores raw payloads in alarms/obd for future flexibility.
[FIX-E03] Atomic Logging: One log row per batch per endpoint.
[FIX-E04] Signal Handling: Clean pool closure on SIGTERM/SIGINT.
[FIX-E05] Removed poll_obd: OBD data is push-only via /pushobd webhook.
[FIX-11] Uses shared safe_task/setup_shutdown from ts_shared_rev (DRY).
"""
import signal
import sys
import time
import schedule
import json
from datetime import datetime, timezone, timedelta
from ts_shared_rev import (
api_post,
close_pool,
get_active_imeis,
get_conn,
get_token,
@ -30,28 +30,12 @@ from ts_shared_rev import (
clean_int,
clean_ts,
get_logger,
safe_task,
setup_shutdown,
)
log = get_logger("events")
# ── Graceful Shutdown ─────────────────────────────────────────────────────────
def _shutdown(signum, frame):
log.info("Signal %s received. Closing DB pool...", signum)
close_pool()
sys.exit(0)
signal.signal(signal.SIGTERM, _shutdown)
signal.signal(signal.SIGINT, _shutdown)
def _safe(fn):
def wrapper():
try:
fn()
except Exception:
log.exception("Task %s failed. Scheduler continuing...", fn.__name__)
wrapper.__name__ = fn.__name__
return wrapper
setup_shutdown(log)
# ── 1. Alarms & Geofence Events (Every 5m) ────────────────────────────────────
@ -91,7 +75,7 @@ def poll_alarms():
THEN ST_SetSRID(ST_MakePoint(%s, %s), 4326)
ELSE NULL END,
%s, %s, %s, %s, NOW()
) ON CONFLICT DO NOTHING
) ON CONFLICT (imei, alarm_type, alarm_time) DO NOTHING
""", (
a.get("imei"), clean(a.get("alarmType")), clean_ts(a.get("alarmTime")),
lng, lat, lng, lat, lat, lng,
@ -104,58 +88,17 @@ def poll_alarms():
log.info("Alarms: %d new events inserted.", inserted)
# ── 2. OBD engine diagnostics (Every 10m) ────────────────────────────────────
def poll_obd():
log.info("Polling OBD telemetry...")
t0, token, imeis = time.time(), get_token(), get_active_imeis()
if not token or not imeis: return
inserted = 0
# OBD API often requires per-device polling or specific time ranges
for imei in imeis:
resp = api_post("jimi.device.obd.list", {
"imei": imei,
"page_size": 20
}, token)
readings = resp.get("result", [])
if not readings: continue
with get_conn() as conn:
with conn.cursor() as cur:
for r in readings:
cur.execute("""
INSERT INTO tracksolid.obd_readings (
imei, reading_time, engine_rpm, fuel_level_pct, updated_at
) VALUES (%s, %s, %s, %s, NOW())
ON CONFLICT (imei, reading_time) DO NOTHING
""", (
imei, clean_ts(r.get("readTime")),
clean_int(r.get("engineRpm")), clean_num(r.get("fuelLevel"))
))
inserted += 1
conn.commit()
# Log summary of OBD poll
with get_conn() as conn:
with conn.cursor() as cur:
log_ingestion(cur, "jimi.device.obd.list", len(imeis), 0, inserted, int((time.time()-t0)*1000), True)
conn.commit()
log.info("OBD: %d readings processed.", inserted)
# ── Main Loop ─────────────────────────────────────────────────────────────────
def main():
log.info("Starting EVENTS PIPELINE (v2.0)...")
log.info("Starting EVENTS PIPELINE (v2.1)...")
# OBD removed: Data arrives via webhook push (/pushobd), not polling.
# Startup catch-up
_safe(poll_alarms)()
_safe(poll_obd)()
safe_task(poll_alarms, log)()
# Schedule
schedule.every(5).minutes.do(_safe(poll_alarms))
schedule.every(10).minutes.do(_safe(poll_obd))
schedule.every(5).minutes.do(safe_task(poll_alarms, log))
while True:
schedule.run_pending()

View file

@ -10,11 +10,12 @@ REVISIONS (QA-Verified):
[FIX-M07] Signal Handling: Clean DB pool closure on SIGTERM/SIGINT.
[FIX-M08] Atomic Logging: log_ingestion happens within the data transaction.
[FIX-QA-01] Distance: Explicit km to meters conversion (* 1000).
[FIX-11] Uses shared safe_task/setup_shutdown from ts_shared_rev (DRY).
[FIX-M09] Trips: Captures runTimeSecond and maxSpeed from API.
[FIX-M10] Parking: New poll_parking via jimi.open.platform.report.parking.
"""
import signal
import sys
import time
import schedule
from datetime import datetime, timezone, timedelta
@ -22,7 +23,6 @@ from datetime import datetime, timezone, timedelta
from ts_shared_rev import (
TARGET_ACCOUNT,
api_post,
close_pool,
get_active_imeis,
get_conn,
get_token,
@ -33,29 +33,12 @@ from ts_shared_rev import (
clean_int,
clean_ts,
get_logger,
safe_task,
setup_shutdown,
)
log = get_logger("movement")
# ── Graceful Shutdown ─────────────────────────────────────────────────────────
def _shutdown(signum, frame):
log.info("Signal %s received. Closing DB pool...", signum)
close_pool()
sys.exit(0)
signal.signal(signal.SIGTERM, _shutdown)
signal.signal(signal.SIGINT, _shutdown)
def _safe(fn):
"""Decorator to prevent scheduler death on single function failure."""
def wrapper():
try:
fn()
except Exception:
log.exception("Task %s failed. Scheduler continuing...", fn.__name__)
wrapper.__name__ = fn.__name__
return wrapper
setup_shutdown(log)
# ── 1. Device Registry Sync (Daily) ──────────────────────────────────────────
@ -194,27 +177,90 @@ def poll_trips():
dist_km = clean_num(t.get("distance"))
dist_m = dist_km * 1000 if dist_km is not None else 0 # [QA-01] Conversion
cur.execute("""
INSERT INTO tracksolid.trips (imei, start_time, end_time, distance_m, avg_speed_kmh)
VALUES (%s, %s, %s, %s, %s) ON CONFLICT (imei, start_time) DO NOTHING
""", (t.get("imei"), clean_ts(t.get("startTime")), clean_ts(t.get("endTime")), dist_m, clean_num(t.get("avgSpeed"))))
INSERT INTO tracksolid.trips (
imei, start_time, end_time, distance_m,
avg_speed_kmh, max_speed_kmh, driving_time_s, source
) VALUES (%s, %s, %s, %s, %s, %s, %s, 'poll')
ON CONFLICT (imei, start_time) DO UPDATE SET
end_time = EXCLUDED.end_time,
distance_m = EXCLUDED.distance_m,
max_speed_kmh = COALESCE(EXCLUDED.max_speed_kmh, tracksolid.trips.max_speed_kmh),
driving_time_s = COALESCE(EXCLUDED.driving_time_s, tracksolid.trips.driving_time_s)
""", (
t.get("imei"), clean_ts(t.get("startTime")), clean_ts(t.get("endTime")),
dist_m, clean_num(t.get("avgSpeed")),
clean_num(t.get("maxSpeed")), clean_int(t.get("runTimeSecond"))
))
inserted += 1
conn.commit()
log.info("Trips: %d records processed.", inserted)
# ── 4. Parking Events (Every 15m) ─────────────────────────────────────────────
def poll_parking():
t0 = time.time()
token, imeis = get_token(), get_active_imeis()
if not token or not imeis: return
end_ts = datetime.now(timezone.utc)
start_ts = end_ts - timedelta(hours=1)
inserted = 0
for i in range(0, len(imeis), 50):
batch = imeis[i:i+50]
resp = api_post("jimi.open.platform.report.parking", {
"imeis": ",".join(batch),
"begin_time": start_ts.strftime("%Y-%m-%d %H:%M:%S"),
"end_time": end_ts.strftime("%Y-%m-%d %H:%M:%S"),
}, token)
events = resp.get("result", [])
with get_conn() as conn:
with conn.cursor() as cur:
for p in events:
imei = p.get("imei")
start_time = clean_ts(p.get("startTime"))
if not imei or not start_time:
continue
lat, lng = clean_num(p.get("lat")), clean_num(p.get("lng"))
cur.execute("""
INSERT INTO tracksolid.parking_events (
imei, event_type, start_time, end_time,
duration_seconds, geom, address
) VALUES (
%s, 'parking', %s, %s, %s,
CASE WHEN %s IS NOT NULL AND %s IS NOT NULL
THEN ST_SetSRID(ST_MakePoint(%s, %s), 4326)
ELSE NULL END,
%s
) ON CONFLICT (imei, start_time, event_type) DO NOTHING
""", (
imei, start_time, clean_ts(p.get("endTime")),
clean_int(p.get("seconds")),
lng, lat, lng, lat,
clean(p.get("address"))
))
inserted += 1
log_ingestion(cur, "jimi.open.platform.report.parking", len(batch), 0, inserted,
int((time.time() - t0) * 1000), True)
log.info("Parking: %d events processed.", inserted)
# ── Main Loop ─────────────────────────────────────────────────────────────────
def main():
log.info("Starting MOVEMENT PIPELINE (v2.0)...")
log.info("Starting MOVEMENT PIPELINE (v2.1)...")
# Startup catch-up
_safe(sync_devices)()
_safe(poll_live_positions)()
_safe(poll_trips)()
safe_task(sync_devices, log)()
safe_task(poll_live_positions, log)()
safe_task(poll_trips, log)()
safe_task(poll_parking, log)()
# Schedule
schedule.every(60).seconds.do(_safe(poll_live_positions))
schedule.every(15).minutes.do(_safe(poll_trips))
schedule.every().day.at("02:00").do(_safe(sync_devices))
schedule.every(60).seconds.do(safe_task(poll_live_positions, log))
schedule.every(15).minutes.do(safe_task(poll_trips, log))
schedule.every(15).minutes.do(safe_task(poll_parking, log))
schedule.every().day.at("02:00").do(safe_task(sync_devices, log))
while True:
schedule.run_pending()

View file

@ -13,6 +13,9 @@ dependencies = [
"requests>=2.32.3", # API requests
"schedule>=1.2.2", # Polling loops/scheduler
"urllib3>=2.2.2", # HTTP connection pooling/retries
"fastapi>=0.115.0", # Webhook receiver framework
"uvicorn[standard]>=0.30.0", # ASGI server for FastAPI
"python-multipart>=0.0.9", # Required for FastAPI Form() parsing
]
[build-system]

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@
ts_shared_rev.py Fireside Communications · Tracksolid Pro Ingestion Stack
Shared utilities: config, signing, HTTP, DB pool, token cache, clean helpers.
Imported by ingest_movement_rev.py and ingest_events_rev.py.
Imported by ingest_movement_rev.py, ingest_events_rev.py, and webhook_receiver_rev.py.
REVISIONS (QA-Verified):
[FIX-01] Secrets exclusively from env (Security).
@ -12,6 +12,8 @@ REVISIONS (QA-Verified):
[FIX-05] API rate-limit (1006) back-off + re-sign (Resiliency).
[FIX-QA-01] clean_num/clean_int return None on non-numeric (Data Integrity).
[FIX-QA-02] api_post catches all RequestExceptions for retry (Robustness).
[FIX-09] get_conn auto-commits on success (Data Integrity).
[FIX-11] Consolidated safe_task/setup_shutdown (DRY).
"""
@ -20,6 +22,8 @@ from __future__ import annotations
import hashlib
import logging
import os
import signal
import sys
import time
from contextlib import contextmanager
from datetime import datetime, timezone, timedelta
@ -85,12 +89,13 @@ def _get_pool() -> psycopg2.pool.ThreadedConnectionPool:
@contextmanager
def get_conn():
"""Thread-safe DB connection context manager."""
"""Thread-safe DB connection context manager. Auto-commits on success, rolls back on error."""
pool = _get_pool()
conn = pool.getconn()
try:
conn.autocommit = False
yield conn
conn.commit()
except Exception:
conn.rollback()
raise
@ -103,6 +108,29 @@ def close_pool():
_pool.closeall()
_log.info("DB Pool closed.")
# ── Scheduler / Signal Utilities ─────────────────────────────────────────────
def safe_task(fn, logger=None):
"""Decorator to prevent scheduler death on single function failure."""
_logger = logger or _log
def wrapper():
try:
fn()
except Exception:
_logger.exception("Task %s failed. Scheduler continuing...", fn.__name__)
wrapper.__name__ = fn.__name__
return wrapper
def setup_shutdown(logger=None):
"""Register SIGTERM/SIGINT handlers for clean DB pool closure."""
_logger = logger or _log
def _handler(signum, frame):
_logger.info("Signal %s received. Closing DB pool...", signum)
close_pool()
sys.exit(0)
signal.signal(signal.SIGTERM, _handler)
signal.signal(signal.SIGINT, _handler)
# ── Value Cleaning (QA Fixes) ─────────────────────────────────────────────────
def clean(v: Any) -> Optional[str]:
@ -127,6 +155,17 @@ def clean_int(v: Any) -> Optional[int]:
except (ValueError, TypeError):
return None
def clean_ts(v: Any) -> Optional[str]:
"""Clean timestamp string for PostgreSQL insertion."""
s = clean(v)
if s is None:
return None
try:
datetime.fromisoformat(s.replace("Z", "+00:00"))
return s
except (ValueError, TypeError):
return None
def is_valid_fix(lat: Any, lng: Any) -> bool:
"""Filters out 0,0 'Zero Island' markers and null positions."""
flat, flng = clean_num(lat), clean_num(lng)
@ -166,7 +205,7 @@ def api_post(method: str, extra: dict, access_token: Optional[str] = None, _retr
r = _session.post(API_BASE_URL, data=params, timeout=25)
r.raise_for_status()
data = r.json()
except Exception as e:
except (requests.RequestException, ValueError) as e:
if _retry_count < 3:
time.sleep(2 ** _retry_count)
return api_post(method, extra, access_token, _retry_count + 1)

458
webhook_receiver_rev.py Normal file
View file

@ -0,0 +1,458 @@
"""
webhook_receiver_rev.py Fireside Communications · Tracksolid Webhook Receiver
RESPONSIBILITY: Receives real-time push data from Jimi Tracksolid Pro servers.
Jimi's Data Push API POSTs telemetry to these endpoints as it arrives from
devices, providing real-time ingestion without polling. This is the ONLY way
to receive OBD diagnostics and DTC fault codes those data types have no
polling endpoint.
ENDPOINTS:
/pushobd OBD CAN bus diagnostics (Priority 1)
/pushfaultinfo DTC fault codes (Priority 1)
/pushalarm Alarm events (Priority 2)
/pushgps GPS positions (Priority 2)
/pushhb Device heartbeats (Priority 2)
/pushtripreport Trip reports (Priority 2)
/health Healthcheck for Docker/monitoring
"""
from __future__ import annotations
import json
import os
import time
from contextlib import asynccontextmanager
from datetime import datetime, timezone
from typing import Optional
from fastapi import FastAPI, Form, HTTPException
from fastapi.responses import JSONResponse
from ts_shared_rev import (
close_pool,
get_conn,
log_ingestion,
clean,
clean_num,
clean_int,
clean_ts,
is_valid_fix,
get_logger,
)
log = get_logger("webhook")
# ── Configuration ─────────────────────────────────────────────────────────────
WEBHOOK_TOKEN = os.getenv("JIMI_WEBHOOK_TOKEN", "")
# ── Lifespan ──────────────────────────────────────────────────────────────────
@asynccontextmanager
async def lifespan(app: FastAPI):
log.info("Webhook receiver starting (v1.0)...")
yield
log.info("Webhook receiver shutting down...")
close_pool()
app = FastAPI(title="Tracksolid Webhook Receiver", lifespan=lifespan)
# ── Helpers ───────────────────────────────────────────────────────────────────
SUCCESS = {"code": 0, "msg": "success"}
def _validate_token(token: str) -> None:
"""Raise 403 if token is invalid. Skips validation if JIMI_WEBHOOK_TOKEN is empty."""
if WEBHOOK_TOKEN and token != WEBHOOK_TOKEN:
raise HTTPException(status_code=403, detail="Invalid token")
def _parse_data_list(raw: str) -> list[dict]:
"""Parse the JSON string from Jimi's data_list form field."""
try:
parsed = json.loads(raw)
if isinstance(parsed, list):
return parsed
return [parsed]
except (json.JSONDecodeError, TypeError):
log.warning("Failed to parse data_list: %.200s", raw)
return []
def unix_to_ts(v) -> Optional[str]:
"""Convert Unix timestamp (seconds or milliseconds) to ISO string."""
if v is None:
return None
try:
ts = int(v)
if ts > 1e12:
ts = ts // 1000
return datetime.fromtimestamp(ts, tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
except (ValueError, TypeError, OSError):
return None
def _make_geom_params(lat, lng):
"""Return (lng, lat, lng, lat) tuple for the CASE WHEN ST_MakePoint pattern."""
return (lng, lat, lng, lat)
# ── Health Check ──────────────────────────────────────────────────────────────
@app.get("/health")
def health():
return {"status": "ok"}
# ── 1. OBD Diagnostics (Priority 1) ──────────────────────────────────────────
@app.post("/pushobd")
def push_obd(token: str = Form(""), data_list: str = Form("")):
_validate_token(token)
items = _parse_data_list(data_list)
if not items:
return JSONResponse(content=SUCCESS)
t0 = time.time()
inserted = 0
with get_conn() as conn:
with conn.cursor() as cur:
for item in items:
try:
imei = clean(item.get("deviceImei"))
obd = item.get("obdJson", {})
if isinstance(obd, str):
try:
obd = json.loads(obd)
except json.JSONDecodeError:
obd = {}
event_time = clean_ts(obd.get("event_time"))
if not imei or not event_time:
continue
lat = clean_num(obd.get("lat"))
lng = clean_num(obd.get("lng"))
cur.execute("""
INSERT INTO tracksolid.obd_readings (
imei, reading_time, car_type, acc_state, status_flags,
lat, lng, geom, obd_data, updated_at
) VALUES (
%s, %s, %s, %s, %s, %s, %s,
CASE WHEN %s IS NOT NULL AND %s IS NOT NULL
THEN ST_SetSRID(ST_MakePoint(%s, %s), 4326)
ELSE NULL END,
%s, NOW()
) ON CONFLICT (imei, reading_time) DO UPDATE SET
obd_data = EXCLUDED.obd_data,
updated_at = NOW()
""", (
imei, event_time,
clean_int(obd.get("car_type")),
clean_int(obd.get("AccState")),
clean_int(obd.get("statusFlags")),
lat, lng,
*_make_geom_params(lat, lng),
json.dumps(obd),
))
inserted += 1
except Exception:
log.warning("Failed to process OBD item for %s", item.get("deviceImei"), exc_info=True)
log_ingestion(cur, "webhook/pushobd", len(items), 0, inserted,
int((time.time() - t0) * 1000), True)
conn.commit()
log.info("pushobd: %d/%d items processed.", inserted, len(items))
return JSONResponse(content=SUCCESS)
# ── 2. DTC Fault Codes (Priority 1) ──────────────────────────────────────────
@app.post("/pushfaultinfo")
def push_fault_info(token: str = Form(""), data_list: str = Form("")):
_validate_token(token)
items = _parse_data_list(data_list)
if not items:
return JSONResponse(content=SUCCESS)
t0 = time.time()
inserted = 0
with get_conn() as conn:
with conn.cursor() as cur:
for item in items:
try:
imei = clean(item.get("deviceImei"))
gate_time = clean_ts(item.get("gateTime"))
if not imei or not gate_time:
continue
fault_codes = item.get("faultCodeList", [])
if isinstance(fault_codes, str):
try:
fault_codes = json.loads(fault_codes)
except json.JSONDecodeError:
fault_codes = []
lat = clean_num(item.get("lat"))
lng = clean_num(item.get("lng"))
evt_time = unix_to_ts(item.get("eventTime")) or clean_ts(item.get("eventTime"))
for code in fault_codes:
cur.execute("""
INSERT INTO tracksolid.fault_codes (
imei, reported_at, fault_code, status_flags,
lat, lng, geom, event_time
) VALUES (
%s, %s, %s, %s, %s, %s,
CASE WHEN %s IS NOT NULL AND %s IS NOT NULL
THEN ST_SetSRID(ST_MakePoint(%s, %s), 4326)
ELSE NULL END,
%s
) ON CONFLICT (imei, reported_at, fault_code) DO NOTHING
""", (
imei, gate_time, clean(code),
clean_int(item.get("statusFlags")),
lat, lng,
*_make_geom_params(lat, lng),
evt_time,
))
inserted += 1
except Exception:
log.warning("Failed to process fault item for %s", item.get("deviceImei"), exc_info=True)
log_ingestion(cur, "webhook/pushfaultinfo", len(items), 0, inserted,
int((time.time() - t0) * 1000), True)
conn.commit()
log.info("pushfaultinfo: %d fault codes from %d items.", inserted, len(items))
return JSONResponse(content=SUCCESS)
# ── 3. Alarm Events (Priority 2) ─────────────────────────────────────────────
@app.post("/pushalarm")
def push_alarm(token: str = Form(""), data_list: str = Form("")):
_validate_token(token)
items = _parse_data_list(data_list)
if not items:
return JSONResponse(content=SUCCESS)
t0 = time.time()
inserted = 0
with get_conn() as conn:
with conn.cursor() as cur:
for item in items:
try:
imei = clean(item.get("deviceImei"))
alarm_type = clean(item.get("alarmType"))
alarm_time = clean_ts(item.get("gateTime"))
if not imei or not alarm_time:
continue
lat = clean_num(item.get("lat"))
lng = clean_num(item.get("lng"))
cur.execute("""
INSERT INTO tracksolid.alarms (
imei, alarm_type, alarm_name, alarm_time, geom,
lat, lng, speed, source, updated_at
) VALUES (
%s, %s, %s, %s,
CASE WHEN %s IS NOT NULL AND %s IS NOT NULL
THEN ST_SetSRID(ST_MakePoint(%s, %s), 4326)
ELSE NULL END,
%s, %s, %s, 'push', NOW()
) ON CONFLICT (imei, alarm_type, alarm_time) DO NOTHING
""", (
imei, alarm_type, clean(item.get("alarmName")), alarm_time,
*_make_geom_params(lat, lng),
lat, lng,
clean_num(item.get("speed")),
))
inserted += 1
except Exception:
log.warning("Failed to process alarm for %s", item.get("deviceImei"), exc_info=True)
log_ingestion(cur, "webhook/pushalarm", len(items), 0, inserted,
int((time.time() - t0) * 1000), True)
conn.commit()
log.info("pushalarm: %d/%d items processed.", inserted, len(items))
return JSONResponse(content=SUCCESS)
# ── 4. GPS Positions (Priority 2) ────────────────────────────────────────────
@app.post("/pushgps")
def push_gps(token: str = Form(""), data_list: str = Form("")):
_validate_token(token)
items = _parse_data_list(data_list)
if not items:
return JSONResponse(content=SUCCESS)
t0 = time.time()
inserted = 0
with get_conn() as conn:
with conn.cursor() as cur:
for item in items:
try:
imei = clean(item.get("deviceImei"))
gps_time = clean_ts(item.get("gpsTime"))
lat = clean_num(item.get("lat"))
lng = clean_num(item.get("lng"))
if not imei or not gps_time or not is_valid_fix(lat, lng):
continue
cur.execute("""
INSERT INTO tracksolid.position_history (
imei, gps_time, geom, lat, lng, speed, direction,
acc_status, satellite, current_mileage,
altitude, post_type, source
) VALUES (
%s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326),
%s, %s, %s, %s, %s, %s, %s, %s, %s, 'push'
) ON CONFLICT (imei, gps_time) DO NOTHING
""", (
imei, gps_time, lng, lat,
lat, lng,
clean_num(item.get("gpsSpeed")),
clean_num(item.get("direction")),
str(item.get("acc")) if item.get("acc") is not None else None,
clean_int(item.get("satelliteNum")),
clean_num(item.get("distance")),
clean_num(item.get("altitude")),
clean_int(item.get("postType")),
))
inserted += 1
except Exception:
log.warning("Failed to process GPS for %s", item.get("deviceImei"), exc_info=True)
log_ingestion(cur, "webhook/pushgps", len(items), 0, inserted,
int((time.time() - t0) * 1000), True)
conn.commit()
log.info("pushgps: %d/%d items processed.", inserted, len(items))
return JSONResponse(content=SUCCESS)
# ── 5. Device Heartbeats (Priority 2) ────────────────────────────────────────
@app.post("/pushhb")
def push_heartbeat(token: str = Form(""), data_list: str = Form("")):
_validate_token(token)
items = _parse_data_list(data_list)
if not items:
return JSONResponse(content=SUCCESS)
t0 = time.time()
inserted = 0
with get_conn() as conn:
with conn.cursor() as cur:
for item in items:
try:
imei = clean(item.get("deviceImei"))
gate_time = clean_ts(item.get("gateTime"))
if not imei or not gate_time:
continue
cur.execute("""
INSERT INTO tracksolid.heartbeats (
imei, gate_time, power_level, gsm_signal,
acc_status, power_status, fortify
) VALUES (%s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (imei, gate_time) DO NOTHING
""", (
imei, gate_time,
clean_int(item.get("powerLevel")),
clean_int(item.get("gsmSign")),
clean_int(item.get("acc")),
clean_int(item.get("powerStatus")),
clean_int(item.get("fortify")),
))
inserted += 1
except Exception:
log.warning("Failed to process heartbeat for %s", item.get("deviceImei"), exc_info=True)
log_ingestion(cur, "webhook/pushhb", len(items), 0, inserted,
int((time.time() - t0) * 1000), True)
conn.commit()
log.info("pushhb: %d/%d items processed.", inserted, len(items))
return JSONResponse(content=SUCCESS)
# ── 6. Trip Reports (Priority 2) ─────────────────────────────────────────────
@app.post("/pushtripreport")
def push_trip_report(token: str = Form(""), data_list: str = Form("")):
_validate_token(token)
items = _parse_data_list(data_list)
if not items:
return JSONResponse(content=SUCCESS)
t0 = time.time()
inserted = 0
with get_conn() as conn:
with conn.cursor() as cur:
for item in items:
try:
imei = clean(item.get("deviceImei"))
begin_time = clean_ts(item.get("beginTime"))
end_time = clean_ts(item.get("endTime"))
if not imei or not begin_time:
continue
miles_km = clean_num(item.get("miles"))
distance_m = miles_km * 1000 if miles_km is not None else None
begin_lat = clean_num(item.get("beginLat"))
begin_lng = clean_num(item.get("beginLng"))
end_lat = clean_num(item.get("endLat"))
end_lng = clean_num(item.get("endLng"))
cur.execute("""
INSERT INTO tracksolid.trips (
imei, start_time, end_time, distance_m,
start_geom, end_geom,
fuel_consumed_l, idle_time_s, trip_seq, source,
updated_at
) VALUES (
%s, %s, %s, %s,
CASE WHEN %s IS NOT NULL AND %s IS NOT NULL
THEN ST_SetSRID(ST_MakePoint(%s, %s), 4326)
ELSE NULL END,
CASE WHEN %s IS NOT NULL AND %s IS NOT NULL
THEN ST_SetSRID(ST_MakePoint(%s, %s), 4326)
ELSE NULL END,
%s, %s, %s, 'push', NOW()
) ON CONFLICT (imei, start_time) DO UPDATE SET
end_time = EXCLUDED.end_time,
distance_m = EXCLUDED.distance_m,
end_geom = EXCLUDED.end_geom,
fuel_consumed_l = EXCLUDED.fuel_consumed_l,
idle_time_s = EXCLUDED.idle_time_s,
updated_at = NOW()
""", (
imei, begin_time, end_time, distance_m,
begin_lng, begin_lat, begin_lng, begin_lat,
end_lng, end_lat, end_lng, end_lat,
clean_num(item.get("oils")),
clean_int(item.get("idleTimes")),
clean_int(item.get("tripSeq")),
))
inserted += 1
except Exception:
log.warning("Failed to process trip for %s", item.get("deviceImei"), exc_info=True)
log_ingestion(cur, "webhook/pushtripreport", len(items), 0, inserted,
int((time.time() - t0) * 1000), True)
conn.commit()
log.info("pushtripreport: %d/%d items processed.", inserted, len(items))
return JSONResponse(content=SUCCESS)