Skip to content

Deploy

Deploy #45

Workflow file for this run

name: Deploy
on:
workflow_dispatch: ~
jobs:
deploy-proxy:
name: Deploy proxy
runs-on: ubuntu-latest
environment: proxy-production
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Setup Fly
uses: superfly/flyctl-actions/setup-flyctl@master
- name: Deploy
env:
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
run: |
flyctl deploy --config proxy.fly.toml --local-only
# Import steps
# - name: Install Osmium
# run: |
# sudo apt install osmium-tool
# - name: Generate cache key
# id: cache-key
# run: |
# echo "key=$(date +%Y-%W)" >> $GITHUB_OUTPUT
# - name: Cache downloaded OpenStreetMap data
# id: cache-data
# uses: actions/cache@v4
# with:
# key: 'data-europe-${{ steps.cache-key.outputs.key }}'
# path: data/filtered/europe.osm.pbf
#
# - name: Download data
# if: ${{ steps.cache-data.outputs.cache-hit != 'true' }}
# run: |
# curl --progress-bar http://ftp.snt.utwente.nl/pub/misc/openstreetmap/europe-latest.osm.pbf --output data/europe.osm.pbf
# - name: Import data
# env:
# # Available cores: 4
# OSM2PGSQL_NUMPROC: 4
# # Available memory: 16GB
# OSM2PGSQL_CACHE: 14000
# OSM2PGSQL_DATAFILE: europe.osm.pbf
# run: |
# docker compose pull import
# docker compose build import
# docker compose run import import
# docker compose push import
update-data:
name: Update data
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# The disk and memory in the Github runners is too small to do large imports.
# A Docker image with a pre-imported database state is used.
# During the deployment run, the docker image is updated through the replication updated,
# comitted to a new image and pushed for future used.
- name: Pull import database
run: |
docker compose pull db
docker image ls ghcr.io/hiddewie/openrailwaymap-import-db:latest
docker compose up --no-build --pull never --detach db
- name: Ensure data is up to date
env:
# Available cores: 4
OSM2PGSQL_NUMPROC: 4
# Available memory: 16GB
OSM2PGSQL_CACHE: 14000
run: |
# docker compose pull import
# docker compose build import
docker compose run import update
# docker compose push import
- name: Commit and push new database image
run: |
docker compose stop db
DB_CONTAINER_ID="$(docker compose ps --all --format json | jq -r 'select(.Service == "db") | .ID')"
DB_IMAGE="$(docker compose ps --all --format json | jq -r 'select(.Service == "db") | .Image')"
# docker container commit "$DB_CONTAINER_ID" updated-db
# docker image ls "$DB_IMAGE"
#docker export "$DB_CONTAINER_ID" | docker import - "$DB_IMAGE"
# docker image ls "$DB_IMAGE"
# Persist and squash data in new image
docker cp "$DB_CONTAINER_ID:/var/lib/postgresql/postgres-data" db/postgres-data
echo 'COPY postgres-data /var/lib/postgresql/postgres-data' >> db/Dockerfile
docker system prune --force --all --volumes
# docker compose build --push db
# Squash image to reduce size by removing duplicated added/removed files in Docker layers
# Docker container commit does not support squashing, see https://github.com/docker/for-linux/issues/453#issuecomment-668645475
# echo "FROM $DB_IMAGE" > Dockerfile
# docker build --squash -t squashed .
# docker tag squashed "$DB_IMAGE"
# docker push "$DB_IMAGE"
- name: Push
id: docker_build
uses: docker/build-push-action@v5
with:
context: db
load: false
push: true
tags: ghcr.io/hiddewie/openrailwaymap-import-db:latest
# generate-tiles:
# name: Generate tiles
# runs-on: ubuntu-latest
## needs: update-data
#
# strategy:
# matrix:
# tiles:
# - low-med
# - standard
# - speed
# - signals
# - electrification
# - gauge
#
# steps:
# - name: Checkout
# uses: actions/checkout@v4
#
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@v3
#
# - name: Login to GitHub Container Registry
# uses: docker/login-action@v3
# with:
# registry: ghcr.io
# username: ${{ github.actor }}
# password: ${{ secrets.GITHUB_TOKEN }}
#
# - name: Pull import database
# run: |
# docker compose pull db
# docker compose up --no-build --pull never --detach db
#
# - name: Generate tiles
# env:
# TILES: ${{ matrix.tiles }}
# run: |
# docker compose run martin-cp
# ls -las tiles/
#
# - uses: actions/upload-artifact@v4
# with:
# name: tiles-${{ matrix.tiles }}
# path: |
# tiles/*.mbtiles
# if-no-files-found: error
# retention-days: 3
#
# deploy-tiles:
# name: Deploy tiles
# runs-on: ubuntu-latest
# needs: generate-tiles
# environment: tiles-production
#
# steps:
# - name: Checkout
# uses: actions/checkout@v4
#
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@v3
#
# - name: Setup Fly
# uses: superfly/flyctl-actions/setup-flyctl@master
#
# - name: Login to GitHub Container Registry
# uses: docker/login-action@v3
# with:
# registry: ghcr.io
# username: ${{ github.actor }}
# password: ${{ secrets.GITHUB_TOKEN }}
#
# - uses: actions/download-artifact@v4
# with:
# # No name: download everything
# path: tiles
# merge-multiple: true
#
# - name: List generated tiles
# run: |
# ls -lah tiles/
#
# - name: Deploy
# env:
# FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
# run: |
# flyctl deploy --config martin-static.fly.toml --local-only