Update run_migrations.py: add 04+05, idempotency tracking, expanded verify
- Add 04_bug_fix_migration.sql and 05_enhancement_migration.sql to list - Use schema_migrations table to skip already-applied migrations (prevents migration 04's RENAME from failing on re-run after first deployment) - Expand CRITICAL_TABLES to include all 5 new tables from migration 05 - record_applied() writes to schema_migrations after each success - Cleaner output: APPLY / SKIP / OK per file with summary count On next Coolify redeploy each container will skip 02-05 (already applied) and apply any new migrations added in future commits. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
20a98074a6
commit
aa290151ea
1 changed files with 101 additions and 30 deletions
|
|
@ -1,7 +1,18 @@
|
||||||
"""
|
"""
|
||||||
run_migrations.py — Idempotent SQL migration runner for Docker init.
|
run_migrations.py — Idempotent SQL migration runner for Docker init.
|
||||||
Uses psql (not psycopg2) so each statement runs independently —
|
|
||||||
one error doesn't roll back the entire file.
|
Runs automatically on every container startup via docker-compose command:
|
||||||
|
sh -c "python run_migrations.py && python <service>.py"
|
||||||
|
|
||||||
|
How it works:
|
||||||
|
1. Creates tracksolid.schema_migrations table on first run.
|
||||||
|
2. Skips any migration already recorded in that table.
|
||||||
|
3. Applies pending migrations in filename order.
|
||||||
|
4. Records each successful migration so it never runs twice.
|
||||||
|
5. Verifies critical tables exist before allowing the service to start.
|
||||||
|
|
||||||
|
To add a new migration: create NN_description.sql in the repo and add
|
||||||
|
the filename to MIGRATIONS below. Coolify will apply it on next deploy.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
@ -12,11 +23,15 @@ import psycopg2
|
||||||
|
|
||||||
DATABASE_URL = os.environ["DATABASE_URL"]
|
DATABASE_URL = os.environ["DATABASE_URL"]
|
||||||
|
|
||||||
|
# ── Add new migration filenames here in order ─────────────────────────────────
|
||||||
MIGRATIONS = [
|
MIGRATIONS = [
|
||||||
"02_tracksolid_full_schema_rev.sql",
|
"02_tracksolid_full_schema_rev.sql",
|
||||||
"03_webhook_schema_migration.sql",
|
"03_webhook_schema_migration.sql",
|
||||||
|
"04_bug_fix_migration.sql", # distance_m → distance_km rename + correction
|
||||||
|
"05_enhancement_migration.sql", # new tables, OBD columns, dwh_gold expansion
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# ── Tables that must exist before the service is allowed to start ─────────────
|
||||||
CRITICAL_TABLES = [
|
CRITICAL_TABLES = [
|
||||||
"tracksolid.devices",
|
"tracksolid.devices",
|
||||||
"tracksolid.api_token_cache",
|
"tracksolid.api_token_cache",
|
||||||
|
|
@ -26,62 +41,118 @@ CRITICAL_TABLES = [
|
||||||
"tracksolid.trips",
|
"tracksolid.trips",
|
||||||
"tracksolid.alarms",
|
"tracksolid.alarms",
|
||||||
"tracksolid.obd_readings",
|
"tracksolid.obd_readings",
|
||||||
|
"tracksolid.device_events",
|
||||||
|
"tracksolid.fuel_readings",
|
||||||
|
"tracksolid.temperature_readings",
|
||||||
|
"tracksolid.lbs_readings",
|
||||||
|
"tracksolid.geofences",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_conn():
|
||||||
|
return psycopg2.connect(DATABASE_URL)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_tracking_table(conn):
|
||||||
|
"""Create schema_migrations tracking table if it doesn't exist."""
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS tracksolid.schema_migrations (
|
||||||
|
filename TEXT PRIMARY KEY,
|
||||||
|
applied_at TIMESTAMPTZ NOT NULL DEFAULT now()
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def already_applied(conn, filename):
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"SELECT 1 FROM tracksolid.schema_migrations WHERE filename = %s",
|
||||||
|
(filename,),
|
||||||
|
)
|
||||||
|
return cur.fetchone() is not None
|
||||||
|
|
||||||
|
|
||||||
|
def record_applied(conn, filename):
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"INSERT INTO tracksolid.schema_migrations (filename) VALUES (%s) ON CONFLICT DO NOTHING",
|
||||||
|
(filename,),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
def run_file(path, filename):
|
def run_file(path, filename):
|
||||||
"""Execute a SQL file via psql. Returns True on success."""
|
"""Execute a SQL file via psql. Returns True on success."""
|
||||||
print(f"Running {filename}...")
|
print(f" APPLY {filename} ...")
|
||||||
result = subprocess.run(
|
result = subprocess.run(
|
||||||
["psql", DATABASE_URL, "-f", path],
|
["psql", DATABASE_URL, "-f", path],
|
||||||
capture_output=True, text=True,
|
capture_output=True, text=True,
|
||||||
)
|
)
|
||||||
# psql prints errors to stderr but continues by default
|
|
||||||
errors = [l for l in result.stderr.splitlines() if "ERROR:" in l]
|
errors = [l for l in result.stderr.splitlines() if "ERROR:" in l]
|
||||||
if errors:
|
if errors:
|
||||||
for e in errors:
|
for e in errors:
|
||||||
print(f" WARN: {e.strip()}")
|
print(f" ERROR: {e.strip()}")
|
||||||
return False
|
return False
|
||||||
print(f" OK: {filename}")
|
print(f" OK {filename}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def verify_schema():
|
def verify_schema(conn):
|
||||||
"""Verify critical tables exist. Exit 1 if not — prevents services from starting."""
|
"""Verify critical tables exist. Exit 1 if missing — blocks service start."""
|
||||||
print("Verifying schema...")
|
print("Verifying schema...")
|
||||||
conn = psycopg2.connect(DATABASE_URL)
|
with conn.cursor() as cur:
|
||||||
cur = conn.cursor()
|
missing = []
|
||||||
missing = []
|
for table in CRITICAL_TABLES:
|
||||||
for table in CRITICAL_TABLES:
|
schema, name = table.split(".")
|
||||||
schema, name = table.split(".")
|
cur.execute(
|
||||||
cur.execute(
|
"SELECT 1 FROM information_schema.tables "
|
||||||
"SELECT 1 FROM information_schema.tables WHERE table_schema=%s AND table_name=%s",
|
"WHERE table_schema=%s AND table_name=%s",
|
||||||
(schema, name),
|
(schema, name),
|
||||||
)
|
)
|
||||||
if not cur.fetchone():
|
if not cur.fetchone():
|
||||||
missing.append(table)
|
missing.append(table)
|
||||||
cur.close()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
if missing:
|
if missing:
|
||||||
print(f"FATAL: Missing critical tables: {', '.join(missing)}")
|
print(f"FATAL: missing tables after migrations: {', '.join(missing)}")
|
||||||
print("Schema bootstrap failed. Services cannot start.")
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
print(" All critical tables verified.")
|
print(f" All {len(CRITICAL_TABLES)} critical tables verified.")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
print("=== Database Migration Runner ===")
|
print("=== Database Migration Runner ===")
|
||||||
|
|
||||||
|
conn = get_conn()
|
||||||
|
ensure_tracking_table(conn)
|
||||||
|
|
||||||
|
applied = skipped = 0
|
||||||
for sql_file in MIGRATIONS:
|
for sql_file in MIGRATIONS:
|
||||||
path = os.path.join("/app", sql_file)
|
path = os.path.join("/app", sql_file)
|
||||||
if not os.path.exists(path):
|
|
||||||
print(f" SKIP: {sql_file} (not found)")
|
|
||||||
continue
|
|
||||||
run_file(path, sql_file)
|
|
||||||
|
|
||||||
verify_schema()
|
if not os.path.exists(path):
|
||||||
print("Migrations complete.")
|
print(f" SKIP {sql_file} (file not found in /app)")
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if already_applied(conn, sql_file):
|
||||||
|
print(f" SKIP {sql_file} (already applied)")
|
||||||
|
skipped += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if run_file(path, sql_file):
|
||||||
|
record_applied(conn, sql_file)
|
||||||
|
applied += 1
|
||||||
|
else:
|
||||||
|
print(f"FATAL: migration {sql_file} failed — aborting.")
|
||||||
|
conn.close()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"\nMigrations: {applied} applied, {skipped} skipped.")
|
||||||
|
|
||||||
|
verify_schema(conn)
|
||||||
|
conn.close()
|
||||||
|
print("Startup checks passed.\n")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue