-
Notifications
You must be signed in to change notification settings - Fork 5
130 lines (115 loc) · 5.65 KB
/
reprepro.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
name: Deploy Reprepro
on:
workflow_call:
inputs:
incoming:
type: string
required: true
description: Name of incoming deb file
secrets:
GPG_PRIVATE_KEY:
required: false
GPG_PASSPHRASE:
required: false
CF_R2_ACCESS_KEY_ID:
required: false
CF_R2_TOKEN:
required: false
repository_dispatch:
types: [reprepro-incoming]
workflow_dispatch:
inputs:
incoming:
description: Name of incoming deb file
required: true
type: string
# Protect reprepro database using concurrency
concurrency: reprepro
jobs:
reprepro:
name: Deploy debian package
environment: packages.element.io
runs-on: ubuntu-24.04
env:
R2_BUCKET: ${{ vars.R2_BUCKET }}
R2_DB_BUCKET: ${{ vars.R2_DB_BUCKET }}
R2_INCOMING_BUCKET: ${{ vars.R2_INCOMING_BUCKET }}
R2_URL: ${{ vars.CF_R2_S3_API }}
INCOMING_FILE: ${{ inputs.incoming || github.event.client_payload.incoming }}
steps:
- uses: actions/checkout@v3
- name: Fetch deb
run: |
mkdir dist
aws s3 cp "s3://$R2_INCOMING_BUCKET/$INCOMING_FILE" dist/ --endpoint-url $R2_URL --region auto
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_TOKEN }}
- name: Load GPG key
uses: crazy-max/ghaction-import-gpg@111c56156bcc6918c056dbef52164cfa583dc549 # v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
fingerprint: ${{ vars.GPG_FINGERPRINT }}
- name: Install reprepro
run: sudo apt-get install -y reprepro
- name: Fetch database
run: aws s3 cp --recursive "s3://$R2_DB_BUCKET" debian/db/ --endpoint-url "$R2_URL" --region auto
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_TOKEN }}
- name: Run reprepro
id: reprepro
run: |
set -x
# Find all configured suites which can accept a deb for this architecture, "all" can match all non-source
ARCH=$(dpkg --info "dist/$INCOMING_FILE" | awk '/Architecture/ {print $2}')
echo "arch=$ARCH" >> $GITHUB_OUTPUT
DISTROS=$(reprepro -b debian _listconfidentifiers | grep -v source)
if [[ "$ARCH" != "all" ]]; then
DISTROS=$(echo "$DISTROS" | grep "$ARCH")
fi
echo "$DISTROS" | awk -F "|" '{print $1}' | uniq | while read -r target ; do
reprepro -b debian includedeb "$target" "dist/$INCOMING_FILE"
done
- name: Host repository for testing
uses: Eun/http-server-action@856e467dda36cd5d30e93bd7dd168cf3e1676301 # v1
with:
directory: packages.element.io
port: 8000
- name: Check repository works
run: |
set +x
# Add architecture so apt will download for it
sudo dpkg --add-architecture $ARCH || exit 0
# Copy signing keyring
sudo cp debian/element-io-archive-keyring.gpg /usr/share/keyrings/element-io-archive-keyring.gpg
# Point apt at local apt repo overwriting all default sources
echo "deb [signed-by=/usr/share/keyrings/element-io-archive-keyring.gpg] http://localhost:8000/debian/ default main" | sudo tee /etc/apt/sources.list
# Later ubuntu versions use the `conf.d` approach so we need to remove the default sources
sudo rm -R /etc/apt/sources.list.d/*
sudo apt-get update --allow-insecure-repositories
# Validate the package in the repo quacks like the one we expect
info=$(dpkg --info ../dist/$INCOMING_FILE)
package=$(echo "$info" | grep "Package:" | sed -n 's/ Package: //p')
version=$(echo "$info" | grep "Version:" | sed -n 's/ Version: //p')
apt-cache show "$package" -o=APT::Architecture="$ARCH" | grep "Version: $version"
working-directory: ./packages.element.io
env:
ARCH: ${{ steps.reprepro.outputs.arch }}
- name: Deploy debian repo
run: |
aws s3 cp --recursive packages.element.io/debian/ "s3://$R2_BUCKET/debian" --endpoint-url "$R2_URL" --region auto
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_TOKEN }}
- name: Store database
run: aws s3 cp --recursive debian/db/ "s3://$R2_DB_BUCKET" --endpoint-url "$R2_URL" --region auto
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_TOKEN }}
- name: Cleanup incoming
run: aws s3 rm "s3://$R2_INCOMING_BUCKET/$INCOMING_FILE" --endpoint-url "$R2_URL" --region auto
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CF_R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CF_R2_TOKEN }}