diff --git a/docker-compose.yaml b/docker-compose.yaml index 43d12e4..e042368 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -14,6 +14,17 @@ services: timeout: 5s retries: 5 + db_migrate: + build: + context: . + dockerfile: Dockerfile + command: python run_migrations.py + depends_on: + timescale_db: + condition: service_healthy + env_file: .env + restart: "no" + ingest_movement: build: context: . @@ -21,9 +32,9 @@ services: command: python ingest_movement_rev.py restart: always depends_on: - timescale_db: - condition: service_healthy - env_file: .env # Coolify will inject variables here + db_migrate: + condition: service_completed_successfully + env_file: .env ingest_events: build: @@ -32,8 +43,8 @@ services: command: python ingest_events_rev.py restart: always depends_on: - timescale_db: - condition: service_healthy + db_migrate: + condition: service_completed_successfully env_file: .env webhook_receiver: @@ -43,8 +54,8 @@ services: command: uvicorn webhook_receiver_rev:app --host 0.0.0.0 --port 8000 --workers 2 restart: always depends_on: - timescale_db: - condition: service_healthy + db_migrate: + condition: service_completed_successfully env_file: .env # No host port binding — Coolify's Traefik proxy routes traffic internally. # Set the webhook domain in Coolify UI pointing to this service on port 8000. @@ -58,18 +69,18 @@ services: image: grafana/grafana:11.0.0 restart: always depends_on: - timescale_db: - condition: service_healthy + db_migrate: + condition: service_completed_successfully environment: - GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_ADMIN_PASSWORD} volumes: - grafana-data:/var/lib/grafana # COOLIFY DOMAIN LOGIC: - # You will set the actual URL in the Coolify UI, + # You will set the actual URL in the Coolify UI, # but the service needs to expose port 3000 internally. volumes: timescale-data: name: timescale-data grafana-data: - name: grafana-data \ No newline at end of file + name: grafana-data diff --git a/run_migrations.py b/run_migrations.py new file mode 100644 index 0000000..f4d13a6 --- /dev/null +++ b/run_migrations.py @@ -0,0 +1,66 @@ +""" +run_migrations.py — Idempotent SQL migration runner for Docker init service. +Executes each .sql migration file in order using psycopg2. +Tolerates re-run errors (e.g. "policy already exists") so deploys are safe. +""" + +import os +import sys +import psycopg2 + +DATABASE_URL = os.environ["DATABASE_URL"] + +MIGRATIONS = [ + "02_tracksolid_full_schema_rev.sql", + "03_webhook_schema_migration.sql", +] + + +def run_file(conn, path, filename): + """Execute a SQL file. Returns True on success, False on error.""" + with open(path) as f: + sql = f.read() + try: + with conn.cursor() as cur: + cur.execute(sql) + print(f" OK: {filename}") + return True + except psycopg2.Error as e: + msg = (e.pgerror or str(e)).strip().split("\n")[0] + print(f" WARN: {filename}: {msg}") + # Connection is now in error state — must reset + conn.close() + return False + + +def main(): + print("=== Database Migration Runner ===") + conn = psycopg2.connect(DATABASE_URL) + conn.autocommit = True + + warnings = 0 + for sql_file in MIGRATIONS: + path = os.path.join("/app", sql_file) + if not os.path.exists(path): + print(f" SKIP: {sql_file} (not found)") + continue + + print(f"Running {sql_file}...") + ok = run_file(conn, path, sql_file) + if not ok: + warnings += 1 + # Reconnect for the next file + conn = psycopg2.connect(DATABASE_URL) + conn.autocommit = True + + conn.close() + + if warnings: + print(f"Completed with {warnings} warning(s) (expected on re-deploy).") + else: + print("All migrations applied cleanly.") + sys.exit(0) + + +if __name__ == "__main__": + main()