tracksolid_timescale_grafan.../docker-compose.yaml
David Kiania 326764e1a0 Fix migration failures: switch to full TimescaleDB + use psql runner
- Change image from timescaledb-ha:pg16-ts2.15-oss to pg16-ts2.15
  (OSS edition lacks compression, retention, continuous aggregates)
- Add postgresql-client to Dockerfile for psql binary
- Rewrite run_migrations.py to use psql instead of psycopg2
  (psql runs each statement independently; psycopg2 wraps the
  entire file in one transaction so one error rolls back everything)
- Add schema verification: exits 1 if critical tables missing,
  preventing services from starting with broken schema

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-08 17:17:58 +03:00

75 lines
2 KiB
YAML

services:
timescale_db:
image: timescale/timescaledb-ha:pg16-ts2.15
restart: always
environment:
- POSTGRES_DB=${POSTGRES_DB}
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
volumes:
- timescale-data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
interval: 10s
timeout: 5s
retries: 5
ingest_movement:
build:
context: .
dockerfile: Dockerfile
command: sh -c "python run_migrations.py && python ingest_movement_rev.py"
restart: always
depends_on:
timescale_db:
condition: service_healthy
env_file: .env
ingest_events:
build:
context: .
dockerfile: Dockerfile
command: sh -c "python run_migrations.py && python ingest_events_rev.py"
restart: always
depends_on:
timescale_db:
condition: service_healthy
env_file: .env
webhook_receiver:
build:
context: .
dockerfile: Dockerfile
command: sh -c "python run_migrations.py && uvicorn webhook_receiver_rev:app --host 0.0.0.0 --port 8000 --workers 2"
restart: always
depends_on:
timescale_db:
condition: service_healthy
env_file: .env
# No host port binding — Coolify's Traefik proxy routes traffic internally.
# Set the webhook domain in Coolify UI pointing to this service on port 8000.
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 5s
retries: 3
grafana:
image: grafana/grafana:11.0.0
restart: always
depends_on:
timescale_db:
condition: service_healthy
environment:
- GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_ADMIN_PASSWORD}
volumes:
- grafana-data:/var/lib/grafana
# COOLIFY DOMAIN LOGIC:
# You will set the actual URL in the Coolify UI,
# but the service needs to expose port 3000 internally.
volumes:
timescale-data:
name: timescale-data
grafana-data:
name: grafana-data