refactor(docker-compose): apply DRY principle using YAML anchors for environment variable

This commit is contained in:
Jean-Baptiste DONNETTE 2024-09-30 15:44:28 +02:00
parent 34d4d013fc
commit 7d1a2b6bf1

View File

@ -5,6 +5,53 @@ volumes:
lago_redis_data:
lago_storage_data:
x-common-environment: &common-environment
LAGO_API_URL: ${LAGO_API_URL}
DATABASE_URL: ${DATABASE_URL}
REDIS_URL: ${REDIS_URL}
REDIS_PASSWORD: ${REDIS_PASSWORD}
SECRET_KEY_BASE: ${SECRET_KEY_BASE}
RAILS_ENV: ${RAILS_ENV}
RAILS_LOG_TO_STDOUT: ${LAGO_RAILS_STDOUT}
SENTRY_DSN: ${SENTRY_DSN}
LAGO_FRONT_URL: ${LAGO_FRONT_URL}
RSA_PRIVATE_KEY: ${RSA_PRIVATE_KEY} # Should be base64 encoded
LAGO_RSA_PRIVATE_KEY: ${LAGO_RSA_PRIVATE_KEY} # Should be base64 encoded
LAGO_ENCRYPTION_PRIMARY_KEY: ${LAGO_ENCRYPTION_PRIMARY_KEY}
LAGO_ENCRYPTION_DETERMINISTIC_KEY: ${LAGO_ENCRYPTION_DETERMINISTIC_KEY}
LAGO_ENCRYPTION_KEY_DERIVATION_SALT: ${LAGO_ENCRYPTION_KEY_DERIVATION_SALT}
LAGO_USE_AWS_S3: ${LAGO_USE_AWS_S3}
LAGO_AWS_S3_ACCESS_KEY_ID: ${LAGO_AWS_S3_ACCESS_KEY_ID}
LAGO_AWS_S3_SECRET_ACCESS_KEY: ${LAGO_AWS_S3_SECRET_ACCESS_KEY}
LAGO_AWS_S3_REGION: ${LAGO_AWS_S3_REGION}
LAGO_AWS_S3_BUCKET: ${LAGO_AWS_S3_BUCKET}
LAGO_AWS_S3_ENDPOINT: ${LAGO_AWS_S3_ENDPOINT}
LAGO_USE_GCS: ${LAGO_USE_GCS}
LAGO_GCS_PROJECT: ${LAGO_GCS_PROJECT}
LAGO_GCS_BUCKET: ${LAGO_GCS_BUCKET}
LAGO_PDF_URL: ${LAGO_PDF_URL}
LAGO_REDIS_CACHE_URL: ${LAGO_REDIS_CACHE_URL}
LAGO_REDIS_CACHE_PASSWORD: ${LAGO_REDIS_CACHE_PASSWORD}
LAGO_DISABLE_SEGMENT: ${LAGO_DISABLE_SEGMENT}
LAGO_DISABLE_WALLET_REFRESH: ${LAGO_DISABLE_WALLET_REFRESH}
NANGO_SECRET_KEY: ${NANGO_SECRET_KEY}
LAGO_LICENSE: ${LAGO_LICENSE}
x-api-environment: &api-environment
<<: *common-environment
LAGO_SIDEKIQ_WEB: ${LAGO_SIDEKIQ_WEB}
LAGO_OAUTH_PROXY_URL: ${LAGO_OAUTH_PROXY_URL}
GOOGLE_AUTH_CLIENT_ID: ${GOOGLE_AUTH_CLIENT_ID}
GOOGLE_AUTH_CLIENT_SECRET: ${GOOGLE_AUTH_CLIENT_SECRET}
x-front-environment: &front-environment
API_URL: ${LAGO_API_URL}
APP_ENV: ${APP_ENV}
LAGO_DISABLE_SIGNUP: ${LAGO_DISABLE_SIGNUP}
LAGO_OAUTH_PROXY_URL: ${LAGO_OAUTH_PROXY_URL}
SENTRY_DSN: ${SENTRY_DSN_FRONT}
NANGO_SECRET_KEY: ${NANGO_SECRET_KEY}
services:
db:
image: postgres:14-alpine
@ -48,42 +95,8 @@ services:
# uncomment for a potentially faster startup if you have docker --version > 25.0.0
# start_interval: 2s
environment:
- LAGO_API_URL=${LAGO_API_URL}
- DATABASE_URL=${DATABASE_URL}
- REDIS_URL=${REDIS_URL}
- REDIS_PASSWORD=${REDIS_PASSWORD}
- SECRET_KEY_BASE=${SECRET_KEY_BASE}
- RAILS_ENV=${RAILS_ENV}
- RAILS_LOG_TO_STDOUT=${LAGO_RAILS_STDOUT}
- SENTRY_DSN=${SENTRY_DSN}
- LAGO_FRONT_URL=${LAGO_FRONT_URL}
- RSA_PRIVATE_KEY=${RSA_PRIVATE_KEY} # Should be base64 encoded
- LAGO_RSA_PRIVATE_KEY=${LAGO_RSA_PRIVATE_KEY} # Should be base64 encoded
- LAGO_SIDEKIQ_WEB=${LAGO_SIDEKIQ_WEB}
- LAGO_ENCRYPTION_PRIMARY_KEY=${LAGO_ENCRYPTION_PRIMARY_KEY}
- LAGO_ENCRYPTION_DETERMINISTIC_KEY=${LAGO_ENCRYPTION_DETERMINISTIC_KEY}
- LAGO_ENCRYPTION_KEY_DERIVATION_SALT=${LAGO_ENCRYPTION_KEY_DERIVATION_SALT}
- LAGO_USE_AWS_S3=${LAGO_USE_AWS_S3}
- LAGO_AWS_S3_ACCESS_KEY_ID=${LAGO_AWS_S3_ACCESS_KEY_ID}
- LAGO_AWS_S3_SECRET_ACCESS_KEY=${LAGO_AWS_S3_SECRET_ACCESS_KEY}
- LAGO_AWS_S3_REGION=${LAGO_AWS_S3_REGION}
- LAGO_AWS_S3_BUCKET=${LAGO_AWS_S3_BUCKET}
- LAGO_AWS_S3_ENDPOINT=${LAGO_AWS_S3_ENDPOINT}
- LAGO_USE_GCS=${LAGO_USE_GCS}
- LAGO_GCS_PROJECT=${LAGO_GCS_PROJECT}
- LAGO_GCS_BUCKET=${LAGO_GCS_BUCKET}
- LAGO_PDF_URL=${LAGO_PDF_URL}
- LAGO_REDIS_CACHE_URL=${LAGO_REDIS_CACHE_URL}
- LAGO_REDIS_CACHE_PASSWORD=${LAGO_REDIS_CACHE_PASSWORD}
- LAGO_DISABLE_SEGMENT=${LAGO_DISABLE_SEGMENT}
- LAGO_DISABLE_WALLET_REFRESH=${LAGO_DISABLE_WALLET_REFRESH}
- LAGO_OAUTH_PROXY_URL=${LAGO_OAUTH_PROXY_URL}
- LAGO_LICENSE=${LAGO_LICENSE}
- GOOGLE_AUTH_CLIENT_ID=${GOOGLE_AUTH_CLIENT_ID}
- GOOGLE_AUTH_CLIENT_SECRET=${GOOGLE_AUTH_CLIENT_SECRET}
- NANGO_SECRET_KEY=${NANGO_SECRET_KEY}
# - SIDEKIQ_EVENTS=${SIDEKIQ_EVENTS}
# - SIDEKIQ_PDFS=${SIDEKIQ_PDFS}
<<: *api-environment
volumes:
- lago_storage_data:/app/storage
# If using GCS, you need to put the credentials keyfile here
@ -101,34 +114,9 @@ services:
api:
condition: service_healthy
environment:
- API_URL=${LAGO_API_URL}
- APP_ENV=${APP_ENV}
- LAGO_DISABLE_SIGNUP=${LAGO_DISABLE_SIGNUP}
- LAGO_OAUTH_PROXY_URL=${LAGO_OAUTH_PROXY_URL}
- SENTRY_DSN=${SENTRY_DSN_FRONT}
- NANGO_SECRET_KEY=${NANGO_SECRET_KEY}
<<: *front-environment
ports:
- ${FRONT_PORT}:80
# - 443:443
# Using SSL with Let's Encrypt
# volumes:
# - ./extra/nginx-letsencrypt.conf:/etc/nginx/conf.d/default.conf
# - ./extra/certbot/conf:/etc/letsencrypt
# - ./extra/certbot/www:/var/www/certbot
# Using SSL with self signed certificates
# volumes:
# - ./extra/nginx-selfsigned.conf:/etc/nginx/conf.d/default.conf
# - ./extra/ssl/nginx-selfsigned.crt:/etc/ssl/certs/nginx-selfsigned.crt
# - ./extra/ssl/nginx-selfsigned.key:/etc/ssl/private/nginx-selfsigned.key
# - ./extra/ssl/dhparam.pem:/etc/ssl/certs/dhparam.pem
# Only used for SSL support with Let's Encrypt
# certbot:
# image: certbot/certbot
# entrypoint: "/bin/sh -c 'trap exit TERM; while :; do certbot renew; sleep 12h & wait $${!}; done;'"
# volumes:
# - ./extra/certbot/conf:/etc/letsencrypt
# - ./extra/certbot/www:/var/www/certbot
api-worker:
container_name: lago-worker
@ -141,127 +129,13 @@ services:
healthcheck:
test: ['CMD-SHELL', 'bundle exec sidekiqmon | grep $(hostname) || exit 1']
environment:
- LAGO_API_URL=${LAGO_API_URL}
- DATABASE_URL=${DATABASE_URL}
- REDIS_URL=${REDIS_URL}
- REDIS_PASSWORD=${REDIS_PASSWORD}
- SECRET_KEY_BASE=${SECRET_KEY_BASE}
- RAILS_ENV=${RAILS_ENV}
- RAILS_LOG_TO_STDOUT=${LAGO_RAILS_STDOUT}
- SENTRY_DSN=${SENTRY_DSN}
- LAGO_RSA_PRIVATE_KEY=${LAGO_RSA_PRIVATE_KEY} # Should be base64 encoded
- RSA_PRIVATE_KEY=${RSA_PRIVATE_KEY} # Should be base64 encoded
- LAGO_ENCRYPTION_PRIMARY_KEY=${LAGO_ENCRYPTION_PRIMARY_KEY}
- LAGO_ENCRYPTION_DETERMINISTIC_KEY=${LAGO_ENCRYPTION_DETERMINISTIC_KEY}
- LAGO_ENCRYPTION_KEY_DERIVATION_SALT=${LAGO_ENCRYPTION_KEY_DERIVATION_SALT}
- LAGO_FRONT_URL=${LAGO_FRONT_URL}
- LAGO_USE_AWS_S3=${LAGO_USE_AWS_S3}
- LAGO_AWS_S3_ACCESS_KEY_ID=${LAGO_AWS_S3_ACCESS_KEY_ID}
- LAGO_AWS_S3_SECRET_ACCESS_KEY=${LAGO_AWS_S3_SECRET_ACCESS_KEY}
- LAGO_AWS_S3_REGION=${LAGO_AWS_S3_REGION}
- LAGO_AWS_S3_BUCKET=${LAGO_AWS_S3_BUCKET}
- LAGO_AWS_S3_ENDPOINT=${LAGO_AWS_S3_ENDPOINT}
- LAGO_USE_GCS=${LAGO_USE_GCS}
- LAGO_GCS_PROJECT=${LAGO_GCS_PROJECT}
- LAGO_GCS_BUCKET=${LAGO_GCS_BUCKET}
- LAGO_PDF_URL=${LAGO_PDF_URL}
- LAGO_REDIS_CACHE_URL=${LAGO_REDIS_CACHE_URL}
- LAGO_REDIS_CACHE_PASSWORD=${LAGO_REDIS_CACHE_PASSWORD}
- LAGO_DISABLE_SEGMENT=${LAGO_DISABLE_SEGMENT}
- LAGO_DISABLE_WALLET_REFRESH=${LAGO_DISABLE_WALLET_REFRESH}
- NANGO_SECRET_KEY=${NANGO_SECRET_KEY}
- LAGO_LICENSE=${LAGO_LICENSE}
# - SIDEKIQ_EVENTS=${SIDEKIQ_EVENTS}
# - SIDEKIQ_PDFS=${SIDEKIQ_PDFS}
<<: *common-environment
# Variables spécifiques au worker, si nécessaire
# SIDEKIQ_EVENTS: ${SIDEKIQ_EVENTS}
# SIDEKIQ_PDFS: ${SIDEKIQ_PDFS}
volumes:
- lago_storage_data:/app/storage
# You can uncomment this if you want to use a dedicated Sidekiq worker for the event ingestion.
# It is recommendend if you have a high usage of events to not impact the other Sidekiq Jobs.
# api-events-worker:
# container_name: lago-events-worker
# image: getlago/api:v1.12.2
# restart: unless-stopped
# depends_on:
# api:
# condition: service_healthy
# command: ["./scripts/start.events.worker.sh"]
# environment:
# - LAGO_API_URL=${LAGO_API_URL}
# - DATABASE_URL=${DATABASE_URL}
# - REDIS_URL=${REDIS_URL}
# - REDIS_PASSWORD=${REDIS_PASSWORD}
# - SECRET_KEY_BASE=${SECRET_KEY_BASE}
# - RAILS_ENV=${RAILS_ENV}
# - RAILS_LOG_TO_STDOUT=${LAGO_RAILS_STDOUT}
# - SENTRY_DSN=${SENTRY_DSN}
# - LAGO_RSA_PRIVATE_KEY=${LAGO_RSA_PRIVATE_KEY} # Should be base64 encoded
# - RSA_PRIVATE_KEY=${RSA_PRIVATE_KEY} # Should be base64 encoded
# - LAGO_ENCRYPTION_PRIMARY_KEY=${LAGO_ENCRYPTION_PRIMARY_KEY}
# - LAGO_ENCRYPTION_DETERMINISTIC_KEY=${LAGO_ENCRYPTION_DETERMINISTIC_KEY}
# - LAGO_ENCRYPTION_KEY_DERIVATION_SALT=${LAGO_ENCRYPTION_KEY_DERIVATION_SALT}
# - LAGO_FRONT_URL=${LAGO_FRONT_URL}
# - LAGO_USE_AWS_S3=${LAGO_USE_AWS_S3}
# - LAGO_AWS_S3_ACCESS_KEY_ID=${LAGO_AWS_S3_ACCESS_KEY_ID}
# - LAGO_AWS_S3_SECRET_ACCESS_KEY=${LAGO_AWS_S3_SECRET_ACCESS_KEY}
# - LAGO_AWS_S3_REGION=${LAGO_AWS_S3_REGION}
# - LAGO_AWS_S3_BUCKET=${LAGO_AWS_S3_BUCKET}
# - LAGO_AWS_S3_ENDPOINT=${LAGO_AWS_S3_ENDPOINT}
# - LAGO_USE_GCS=${LAGO_USE_GCS}
# - LAGO_GCS_PROJECT=${LAGO_GCS_PROJECT}
# - LAGO_GCS_BUCKET=${LAGO_GCS_BUCKET}
# - LAGO_PDF_URL=${LAGO_PDF_URL}
# - LAGO_REDIS_CACHE_URL=${LAGO_REDIS_CACHE_URL}
# - LAGO_REDIS_CACHE_PASSWORD=${LAGO_REDIS_CACHE_PASSWORD}
# - LAGO_DISABLE_SEGMENT=${LAGO_DISABLE_SEGMENT}
# - LAGO_DISABLE_WALLET_REFRESH=${LAGO_DISABLE_WALLET_REFRESH}
# - NANGO_SECRET_KEY=${NANGO_SECRET_KEY}
# - SIDEKIQ_EVENTS=true
# - LAGO_LICENSE=${LAGO_LICENSE}
# You can uncomment this if you want to use a dedicated Sidekiq worker for the invoices pdf creation.
# It is recommended if you have a high usage of invoices being created to not impact the other Sidekiq Jobs.
# api-pdfs-worker:
# container_name: lago-pdfs-worker
# image: getlago/api:v1.12.2
# restart: unless-stopped
# depends_on:
# api:
# condition: service_healthy
# command: ["./scripts/start.pdfs.worker.sh"]
# environment:
# - LAGO_API_URL=${LAGO_API_URL}
# - DATABASE_URL=${DATABASE_URL}
# - REDIS_URL=${REDIS_URL}
# - REDIS_PASSWORD=${REDIS_PASSWORD}
# - SECRET_KEY_BASE=${SECRET_KEY_BASE}
# - RAILS_ENV=${RAILS_ENV}
# - RAILS_LOG_TO_STDOUT=${LAGO_RAILS_STDOUT}
# - SENTRY_DSN=${SENTRY_DSN}
# - LAGO_RSA_PRIVATE_KEY=${LAGO_RSA_PRIVATE_KEY} # Should be base64 encoded
# - RSA_PRIVATE_KEY=${RSA_PRIVATE_KEY} # Should be base64 encoded
# - LAGO_ENCRYPTION_PRIMARY_KEY=${LAGO_ENCRYPTION_PRIMARY_KEY}
# - LAGO_ENCRYPTION_DETERMINISTIC_KEY=${LAGO_ENCRYPTION_DETERMINISTIC_KEY}
# - LAGO_ENCRYPTION_KEY_DERIVATION_SALT=${LAGO_ENCRYPTION_KEY_DERIVATION_SALT}
# - LAGO_FRONT_URL=${LAGO_FRONT_URL}
# - LAGO_USE_AWS_S3=${LAGO_USE_AWS_S3}
# - LAGO_AWS_S3_ACCESS_KEY_ID=${LAGO_AWS_S3_ACCESS_KEY_ID}
# - LAGO_AWS_S3_SECRET_ACCESS_KEY=${LAGO_AWS_S3_SECRET_ACCESS_KEY}
# - LAGO_AWS_S3_REGION=${LAGO_AWS_S3_REGION}
# - LAGO_AWS_S3_BUCKET=${LAGO_AWS_S3_BUCKET}
# - LAGO_AWS_S3_ENDPOINT=${LAGO_AWS_S3_ENDPOINT}
# - LAGO_USE_GCS=${LAGO_USE_GCS}
# - LAGO_GCS_PROJECT=${LAGO_GCS_PROJECT}
# - LAGO_GCS_BUCKET=${LAGO_GCS_BUCKET}
# - LAGO_PDF_URL=${LAGO_PDF_URL}
# - LAGO_REDIS_CACHE_URL=${LAGO_REDIS_CACHE_URL}
# - LAGO_REDIS_CACHE_PASSWORD=${LAGO_REDIS_CACHE_PASSWORD}
# - LAGO_DISABLE_SEGMENT=${LAGO_DISABLE_SEGMENT}
# - LAGO_DISABLE_WALLET_REFRESH=${LAGO_DISABLE_WALLET_REFRESH}
# - NANGO_SECRET_KEY=${NANGO_SECRET_KEY}
# - SIDEKIQ_PDFS=true
# - LAGO_LICENSE=${LAGO_LICENSE}
api-clock:
container_name: lago-clock
image: getlago/api:v1.12.2
@ -271,23 +145,7 @@ services:
condition: service_healthy
command: ['./scripts/start.clock.sh']
environment:
- LAGO_API_URL=${LAGO_API_URL}
- DATABASE_URL=${DATABASE_URL}
- REDIS_URL=${REDIS_URL}
- REDIS_PASSWORD=${REDIS_PASSWORD}
- SECRET_KEY_BASE=${SECRET_KEY_BASE}
- RAILS_ENV=${RAILS_ENV}
- RAILS_LOG_TO_STDOUT=${LAGO_RAILS_STDOUT}
- SENTRY_DSN=${SENTRY_DSN}
- LAGO_DISABLE_WALLET_REFRESH=${LAGO_DISABLE_WALLET_REFRESH}
- LAGO_REDIS_CACHE_URL=${LAGO_REDIS_CACHE_URL}
- LAGO_RSA_PRIVATE_KEY=${LAGO_RSA_PRIVATE_KEY} # Should be base64 encoded
- RSA_PRIVATE_KEY=${RSA_PRIVATE_KEY} # Should be base64 encoded
- LAGO_ENCRYPTION_PRIMARY_KEY=${LAGO_ENCRYPTION_PRIMARY_KEY}
- LAGO_ENCRYPTION_DETERMINISTIC_KEY=${LAGO_ENCRYPTION_DETERMINISTIC_KEY}
- LAGO_ENCRYPTION_KEY_DERIVATION_SALT=${LAGO_ENCRYPTION_KEY_DERIVATION_SALT}
- NANGO_SECRET_KEY=${NANGO_SECRET_KEY}
- LAGO_LICENSE=${LAGO_LICENSE}
<<: *common-environment
pdf:
image: getlago/lago-gotenberg:7.8.2
@ -309,3 +167,32 @@ services:
- DATABASE_URL=${DATABASE_URL}
- REDIS_URL=${REDIS_URL}
- REDIS_PASSWORD=${REDIS_PASSWORD}
# You can uncomment this if you want to use a dedicated Sidekiq worker for the event ingestion.
# It is recommendend if you have a high usage of events to not impact the other Sidekiq Jobs.
# api-events-worker:
# container_name: lago-events-worker
# image: getlago/api:v1.12.2
# restart: unless-stopped
# depends_on:
# api:
# condition: service_healthy
# command: ["./scripts/start.events.worker.sh"]
# environment:
# <<: *api-environment
# You can uncomment this if you want to use a dedicated Sidekiq worker for the invoices pdf creation.
# It is recommended if you have a high usage of invoices being created to not impact the other Sidekiq Jobs.
# api-pdfs-worker:
# container_name: lago-pdfs-worker
# image: getlago/api:v1.12.2
# restart: unless-stopped
# depends_on:
# api:
# condition: service_healthy
# command: ["./scripts/start.pdfs.worker.sh"]
# environment:
# <<: *api-environment