DWH pipeline (new):
- dwh/261001_dwh_control.sql — watermarks + per-run audit log schema
- dwh/261002_bronze_constraints_audit.sql — ON CONFLICT key assertion
- dwh/261003_dwh_roles.sql — dwh_owner / grafana_ro contract assertion
- dwh/261004_dwh_observability_views.sql — v_table_freshness,
v_recent_failures, v_watermark_lag (readable by grafana_ro)
- docs/DWH_PIPELINE.md — operations runbook (setup, troubleshooting,
manual re-run, back-fill, rotation)
- DWH_Execution_Manual.md — reusable playbook for future data
projects (extract → blob → load pattern, 7 design principles,
snapshot-vs-incremental matrix, verification gates)
- docs/superpowers/{specs,plans}/2026-04-24-n8n-dwh-bronze-pipeline-*
— design spec + 27-task implementation plan
Security:
- dwh/260423_dwh_ddl_v1.sql — redacted plaintext role passwords to
'CHANGE_ME_BEFORE_APPLY' placeholders; added SECURITY header
documenting generation + rotation flow
Docs:
- CLAUDE.md — §3 adds tracksolid_dwh@31.97.44.246:5888 target,
§4 adds dwh/ + docs/DWH_PIPELINE.md to codebase map, §5 adds
bronze + dwh_control schema roll-up, §10 adds deploy task +
password rotation follow-up
Also includes miscellaneous in-progress files accumulated on this
branch (workspace, analytics notes, vehicle CSVs, extract helpers,
renamed markdown archives).
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
297 lines
13 KiB
Python
297 lines
13 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Tracksolid Pro - Device List Extractor
|
|
Calls jimi.user.device.list and saves all vehicle/device data to CSV and JSON.
|
|
|
|
Uses the same signing approach as tracksolid_update.py (confirmed working):
|
|
- POST as x-www-form-urlencoded
|
|
- All parameter values cast to strings before signing
|
|
|
|
Usage:
|
|
python3 tracksolid_extract.py
|
|
python3 tracksolid_extract.py --target "Fireside Communications"
|
|
python3 tracksolid_extract.py --format json
|
|
python3 tracksolid_extract.py --format both
|
|
|
|
Environment variables (same .env file as tracksolid_update.py):
|
|
TS_USER_ID - Your Tracksolid account username
|
|
TS_USER_PWD_MD5 - MD5 hash of your password (lowercase)
|
|
TS_APP_KEY - Your appKey from JIMI
|
|
TS_APP_SECRET - Your appSecret from JIMI
|
|
TS_API_URL - API base URL (defaults to EU node)
|
|
TS_TARGET - Account to query (defaults to TS_USER_ID)
|
|
"""
|
|
|
|
import hashlib
|
|
import os
|
|
import sys
|
|
import json
|
|
import logging
|
|
import argparse
|
|
import time
|
|
from datetime import datetime, timezone
|
|
from pathlib import Path
|
|
|
|
import requests
|
|
import pandas as pd
|
|
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
# CONFIGURATION — reads from environment / same .env as the updater
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
CONFIG = {
|
|
"user_id": os.getenv("TS_USER_ID", "Fireside Communications"),
|
|
"user_pwd_md5": os.getenv("TS_USER_PWD_MD5", "81a1b005efd3596073e38efd8a2fd3fd"),
|
|
"app_key": os.getenv("TS_APP_KEY", "8FB345B8693CCD00BB70D528C0D4019E"),
|
|
"app_secret": os.getenv("TS_APP_SECRET", "3177c89993b446c6aced0d7c56375d2c"),
|
|
"api_url": os.getenv("TS_API_URL", "https://eu-open.tracksolidpro.com/route/rest"),
|
|
"target": os.getenv("TS_TARGET", ""), # account to query; defaults to user_id
|
|
"expires_in": "7200",
|
|
}
|
|
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
# LOGGING
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format="%(asctime)s %(levelname)-8s %(message)s",
|
|
handlers=[
|
|
logging.StreamHandler(sys.stdout),
|
|
logging.FileHandler("tracksolid_extract.log", encoding="utf-8"),
|
|
],
|
|
)
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
# SIGNING UTILITIES (identical to tracksolid_update.py)
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
|
|
def utc_timestamp() -> str:
|
|
return datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
|
|
def build_sign(params: dict, app_secret: str) -> str:
|
|
sorted_keys = sorted(
|
|
k for k in params
|
|
if k != "sign" and params[k] is not None and str(params[k]).strip() != ""
|
|
)
|
|
param_string = "".join(f"{k}{params[k]}" for k in sorted_keys)
|
|
raw_string = f"{app_secret}{param_string}{app_secret}"
|
|
return hashlib.md5(raw_string.encode("utf-8")).hexdigest().upper()
|
|
|
|
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
# TRACKSOLID CLIENT
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
|
|
class TracksolidClient:
|
|
def __init__(self, cfg: dict):
|
|
self.cfg = cfg
|
|
self._token: str | None = None
|
|
self._token_expires_at: float = 0.0
|
|
self.session = requests.Session()
|
|
|
|
def _post(self, params: dict) -> dict:
|
|
str_params = {
|
|
k: str(v)
|
|
for k, v in params.items()
|
|
if v is not None and str(v).strip() != ""
|
|
}
|
|
str_params["sign"] = build_sign(str_params, self.cfg["app_secret"])
|
|
|
|
log.debug("POST %s params=%s", self.cfg["api_url"], str_params)
|
|
resp = self.session.post(
|
|
self.cfg["api_url"],
|
|
data=str_params, # form-encoded — confirmed working
|
|
timeout=30,
|
|
)
|
|
resp.raise_for_status()
|
|
data = resp.json()
|
|
log.debug("Response: %s", json.dumps(data))
|
|
return data
|
|
|
|
def _common_params(self, method: str) -> dict:
|
|
return {
|
|
"method": method,
|
|
"timestamp": utc_timestamp(),
|
|
"app_key": self.cfg["app_key"],
|
|
"sign_method": "md5",
|
|
"v": "1.0",
|
|
"format": "json",
|
|
}
|
|
|
|
def get_token(self) -> str:
|
|
if self._token and time.time() < self._token_expires_at - 60:
|
|
return self._token
|
|
|
|
log.info("Obtaining access token ...")
|
|
params = self._common_params("jimi.oauth.token.get")
|
|
params.update({
|
|
"user_id": self.cfg["user_id"],
|
|
"user_pwd_md5": self.cfg["user_pwd_md5"],
|
|
"expires_in": self.cfg["expires_in"],
|
|
})
|
|
|
|
data = self._post(params)
|
|
if data.get("code") != 0:
|
|
raise RuntimeError(
|
|
f"Auth failed — code={data.get('code')} message={data.get('message')}"
|
|
)
|
|
|
|
self._token = data["result"]["accessToken"]
|
|
self._token_expires_at = time.time() + int(data["result"]["expiresIn"])
|
|
log.info("Token acquired. Valid for %s seconds.", data["result"]["expiresIn"])
|
|
return self._token
|
|
|
|
def get_device_list(self, target: str) -> list[dict]:
|
|
"""
|
|
Call jimi.user.device.list for the given target account.
|
|
Returns the full list of device/vehicle records.
|
|
"""
|
|
log.info("Fetching device list for account: %s", target)
|
|
token = self.get_token()
|
|
|
|
params = self._common_params("jimi.user.device.list")
|
|
params["access_token"] = token
|
|
params["target"] = target
|
|
|
|
data = self._post(params)
|
|
|
|
if data.get("code") != 0:
|
|
raise RuntimeError(
|
|
f"Device list failed — code={data.get('code')} message={data.get('message')}"
|
|
)
|
|
|
|
devices = data.get("result", [])
|
|
log.info("Retrieved %d devices.", len(devices))
|
|
return devices
|
|
|
|
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
# OUTPUT HELPERS
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
|
|
# Friendly column names for the CSV output
|
|
COLUMN_RENAME = {
|
|
"imei": "IMEI",
|
|
"deviceName": "Device Name",
|
|
"mcType": "Model",
|
|
"mcTypeUseScope": "Vehicle Type",
|
|
"sim": "SIM",
|
|
"expiration": "Platform Expiry",
|
|
"activationTime": "Activated",
|
|
"reMark": "Remarks",
|
|
"vehicleName": "Vehicle Name",
|
|
"vehicleIcon": "Icon",
|
|
"vehicleNumber": "License Plate",
|
|
"vehicleModels": "Vehicle Model",
|
|
"carFrame": "VIN",
|
|
"driverName": "Driver Name",
|
|
"driverPhone": "Driver Phone",
|
|
"enabledFlag": "Active",
|
|
"engineNumber": "Engine Number",
|
|
"deviceGroupId": "Group ID",
|
|
"deviceGroup": "Group",
|
|
}
|
|
|
|
def save_csv(devices: list[dict], path: str):
|
|
df = pd.DataFrame(devices)
|
|
df.rename(columns=COLUMN_RENAME, inplace=True)
|
|
# Put the most useful columns first
|
|
priority = ["IMEI", "License Plate", "Driver Name", "Driver Phone",
|
|
"Device Name", "Vehicle Model", "Vehicle Type", "Group",
|
|
"SIM", "Platform Expiry", "Activated", "Active",
|
|
"VIN", "Engine Number", "Remarks"]
|
|
ordered = [c for c in priority if c in df.columns]
|
|
rest = [c for c in df.columns if c not in ordered]
|
|
df = df[ordered + rest]
|
|
df.to_csv(path, index=False)
|
|
log.info("CSV saved → %s (%d rows, %d columns)", path, len(df), len(df.columns))
|
|
|
|
|
|
def save_json(devices: list[dict], path: str):
|
|
with open(path, "w", encoding="utf-8") as f:
|
|
json.dump(devices, f, indent=2, ensure_ascii=False)
|
|
log.info("JSON saved → %s (%d records)", path, len(devices))
|
|
|
|
|
|
def print_summary(devices: list[dict]):
|
|
df = pd.DataFrame(devices)
|
|
total = len(df)
|
|
active = df["enabledFlag"].eq(1).sum() if "enabledFlag" in df.columns else "?"
|
|
groups = df["deviceGroup"].nunique() if "deviceGroup" in df.columns else "?"
|
|
with_plate = df["vehicleNumber"].notna().sum() if "vehicleNumber" in df.columns else "?"
|
|
with_driver = df["driverName"].notna().sum() if "driverName" in df.columns else "?"
|
|
|
|
print()
|
|
print("=" * 50)
|
|
print(" DEVICE LIST SUMMARY")
|
|
print("=" * 50)
|
|
print(f" Total devices : {total}")
|
|
print(f" Active : {active}")
|
|
print(f" Device groups : {groups}")
|
|
print(f" With plate no. : {with_plate}")
|
|
print(f" With driver name : {with_driver}")
|
|
print("=" * 50)
|
|
|
|
if "deviceGroup" in df.columns:
|
|
print("\n Breakdown by group:")
|
|
for group, count in df["deviceGroup"].value_counts().items():
|
|
print(f" {group:<25} {count} devices")
|
|
print()
|
|
|
|
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
# MAIN
|
|
# ──────────────────────────────────────────────────────────────────────────────
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(
|
|
description="Extract Tracksolid device list to CSV / JSON."
|
|
)
|
|
parser.add_argument(
|
|
"--target", default="",
|
|
help="Account to query (default: same as TS_USER_ID / user_id in CONFIG)."
|
|
)
|
|
parser.add_argument(
|
|
"--format", choices=["csv", "json", "both"], default="csv",
|
|
help="Output format (default: csv)."
|
|
)
|
|
parser.add_argument(
|
|
"--out", default="",
|
|
help="Output filename without extension (default: tracksolid_devices_YYYYMMDD_HHMMSS)."
|
|
)
|
|
args = parser.parse_args()
|
|
|
|
# Resolve target account
|
|
target = args.target or CONFIG["target"] or CONFIG["user_id"]
|
|
|
|
# Resolve output filename base
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
out_base = args.out or f"tracksolid_devices_{timestamp}"
|
|
|
|
# ── Fetch ──────────────────────────────────────────────────────────────────
|
|
client = TracksolidClient(CONFIG)
|
|
try:
|
|
devices = client.get_device_list(target)
|
|
except Exception as exc:
|
|
log.error("Failed to fetch device list: %s", exc)
|
|
sys.exit(1)
|
|
|
|
if not devices:
|
|
log.warning("No devices returned for account: %s", target)
|
|
sys.exit(0)
|
|
|
|
# ── Save ───────────────────────────────────────────────────────────────────
|
|
if args.format in ("csv", "both"):
|
|
save_csv(devices, f"{out_base}.csv")
|
|
|
|
if args.format in ("json", "both"):
|
|
save_json(devices, f"{out_base}.json")
|
|
|
|
# ── Summary ────────────────────────────────────────────────────────────────
|
|
print_summary(devices)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|