interop #3668
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: interop | |
on: | |
schedule: | |
- cron: "0 */8 * * *" # every 8h | |
# Cache key for caching the Wireshark build. | |
# To trigger a rebuild of Wireshark increment this value. | |
# The rebuild will then build the current master of Wireshark and save it under the new key. | |
env: | |
WIRESHARK_CACHEKEY: 7 | |
jobs: | |
wireshark: | |
runs-on: ubuntu-latest | |
steps: | |
- name: Restore from cache | |
id: restore-cache | |
uses: actions/cache@v3 | |
env: | |
VERSION: ${{ env.WIRESHARK_CACHEKEY }} | |
with: | |
key: wireshark-${{ env.VERSION }} | |
path: tshark.tar.gz | |
- name: Show tshark version information | |
if: steps.restore-cache.outputs.cache-hit == 'true' | |
run: | | |
tar xfz tshark.tar.gz | |
./tshark -v | |
- uses: actions/checkout@v3 | |
if: steps.restore-cache.outputs.cache-hit != 'true' | |
with: | |
repository: the-tcpdump-group/libpcap | |
- name: Build libpcap | |
if: steps.restore-cache.outputs.cache-hit != 'true' | |
run: | | |
./autogen.sh | |
./configure --disable-dbus --disable-rdma | |
sudo make install | |
- uses: actions/checkout@v3 | |
if: steps.restore-cache.outputs.cache-hit != 'true' | |
with: | |
repository: wireshark/wireshark | |
- name: Install dependencies | |
if: steps.restore-cache.outputs.cache-hit != 'true' | |
run: sudo apt-get install -y cmake libglib2.0-dev libc-ares-dev libgcrypt20-dev flex bison byacc ninja-build | |
- name: Build Wireshark | |
if: steps.restore-cache.outputs.cache-hit != 'true' | |
run: | | |
cmake -GNinja -DBUILD_wireshark=0 -DBUILD_qtshark=0 -DBUILD_editcap=0 -DBUILD_capinfos=0 -DBUILD_text2pcap=0 -DBUILD_rawshark=0 -DBUILD_sdjournal=0 -DBUILD_sshdump=0 -DBUILD_ciscodump=0 -DBUILD_sharkd=0 -DENABLE_STATIC=1 -DENABLE_PLUGINS=0 -DENABLE_LIBXML2=0 -DENABLE_BROTLI=0 -DUSE_STATIC=1 -DENABLE_GNUTLS=1 . | |
ninja | |
- run: run/tshark -v | |
if: steps.restore-cache.outputs.cache-hit != 'true' | |
- name: Compress | |
if: steps.restore-cache.outputs.cache-hit != 'true' | |
run: tar -czvf tshark.tar.gz -C run/ tshark | |
- name: Upload | |
uses: actions/upload-artifact@v3 | |
with: | |
name: wireshark | |
path: tshark.tar.gz | |
config: | |
runs-on: ubuntu-latest | |
outputs: | |
logname: ${{ steps.set-logname.outputs.logname }} | |
starttime: ${{ steps.set-starttime.outputs.starttime }} | |
servers: ${{ steps.set-servers.outputs.servers }} | |
clients: ${{ steps.set-clients.outputs.clients }} | |
images: ${{ steps.set-images.outputs.images }} | |
steps: | |
- name: Set log name | |
id: set-logname | |
run: | | |
LOGNAME=$(date -u +"%Y-%m-%dT%H:%M") | |
echo $LOGNAME | |
echo "logname=$LOGNAME" >> $GITHUB_OUTPUT | |
- name: Save start time | |
id: set-starttime | |
run: | | |
STARTTIME=$(date +%s) | |
echo $STARTTIME | |
echo "starttime=$STARTTIME" >> $GITHUB_OUTPUT | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: 3.8 | |
- name: Determine servers | |
id: set-servers | |
run: | | |
SERVERS=$(jq -c 'with_entries(select(.value.role == "server" or .value.role == "both")) | keys_unsorted' implementations.json) | |
echo $SERVERS | |
echo "servers=$SERVERS" >> $GITHUB_OUTPUT | |
- name: Determine clients | |
id: set-clients | |
run: | | |
CLIENTS=$(jq -c 'with_entries(select(.value.role == "client" or .value.role == "both")) | keys_unsorted' implementations.json) | |
echo $CLIENTS | |
echo "clients=$CLIENTS" >> $GITHUB_OUTPUT | |
- name: Determine Docker images | |
id: set-images | |
run: | | |
IMAGES=$(jq -c 'keys_unsorted' implementations.json) | |
echo $IMAGES | |
echo "images=$IMAGES" >> $GITHUB_OUTPUT | |
docker-pull-tools: | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
image: [ 'quic-network-simulator', 'quic-interop-iperf-endpoint' ] | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Pull | |
run: | | |
URL="martenseemann/${{ matrix.image }}" | |
docker pull $URL | |
echo "URL=$URL" >> $GITHUB_ENV | |
- name: Docker inspect | |
run: docker image inspect $URL | |
- name: Save Docker image | |
run: | | |
docker save $URL | gzip --best > ${{ matrix.image }}.tar.gz | |
du -sh ${{ matrix.image }}.tar.gz | |
- name: Upload result | |
uses: actions/upload-artifact@v3 | |
with: | |
name: images-tools | |
path: ${{ matrix.image }}.tar.gz | |
if-no-files-found: error | |
docker-pull-images: | |
needs: [ config ] | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
image: ${{ fromJson(needs.config.outputs.images) }} | |
name: Pull ${{ matrix.image }} | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Run docker pull | |
run: | | |
URL=$(jq -r '.["${{ matrix.image }}"].image' implementations.json) | |
echo $URL | |
docker pull $URL | |
echo "URL=$URL" >> $GITHUB_ENV | |
- name: Docker inspect | |
run: docker image inspect $URL | |
- name: Save Docker image | |
run: | | |
docker save $URL | gzip --best > ${{ matrix.image }}.tar.gz | |
du -sh ${{ matrix.image }}.tar.gz | |
- name: Upload result | |
uses: actions/upload-artifact@v3 | |
with: | |
name: image-${{ matrix.image }} | |
path: ${{ matrix.image }}.tar.gz | |
if-no-files-found: error | |
tests: | |
needs: [ wireshark, config, docker-pull-tools, docker-pull-images ] | |
runs-on: ubuntu-latest | |
continue-on-error: true | |
strategy: | |
fail-fast: false | |
matrix: | |
server: ${{ fromJson(needs.config.outputs.servers) }} | |
client: ${{ fromJson(needs.config.outputs.clients) }} | |
name: (${{ matrix.server }} - ${{ matrix.client }}) | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: 3.8 | |
- name: Enable IPv6 support | |
run: sudo modprobe ip6table_filter | |
- run: docker image ls | |
- name: Download tools Docker images | |
uses: actions/download-artifact@v3 | |
with: | |
name: images-tools | |
- name: Download ${{ matrix.server }} Docker image | |
uses: actions/download-artifact@v3 | |
with: | |
name: image-${{ matrix.server }} | |
- name: Download ${{ matrix.client }} Docker image | |
if: ${{ matrix.server != matrix.client }} | |
uses: actions/download-artifact@v3 | |
with: | |
name: image-${{ matrix.client }} | |
- name: Load docker images | |
run: | | |
docker load --input quic-network-simulator.tar.gz | |
docker load --input quic-interop-iperf-endpoint.tar.gz | |
docker load --input ${{ matrix.server }}.tar.gz | |
docker load --input ${{ matrix.client }}.tar.gz | |
- run: docker image ls | |
- name: Download Wireshark | |
uses: actions/download-artifact@v3 | |
with: | |
name: wireshark | |
path: wireshark | |
- name: Install Wireshark | |
run: | | |
cd wireshark | |
tar xfz tshark.tar.gz | |
sudo mv tshark /usr/local/bin | |
cd .. && rm -r wireshark | |
- name: Install Python packages | |
run: | | |
pip install -U pip | |
pip install -r requirements.txt | |
- name: Run tests | |
env: | |
CRON: "true" | |
run: | | |
(python run.py --client ${{ matrix.client }} --server ${{ matrix.server }} --log-dir logs --json ${{ matrix.server }}_${{ matrix.client }}_results.json -t onlyTests || true) | tee output.txt | |
mkdir -p logs/${{ matrix.server }}_${{ matrix.client }} | |
mv output.txt logs/${{ matrix.server }}_${{ matrix.client }}/ | |
- name: Run measurements | |
env: | |
CRON: "true" | |
run: | | |
python run.py --client ${{ matrix.client }} --server ${{ matrix.server }} --log-dir logs_measurement --json ${{ matrix.server }}_${{ matrix.client }}_measurements.json -t onlyMeasurements || true | |
if [ ! -d "logs_measurement" ]; then exit 0; fi | |
find logs_measurement -depth -name "sim" -type d -exec rm -r "{}" \; | |
find logs_measurement -depth -name "client" -type d -exec rm -r "{}" \; | |
find logs_measurement -depth -name "server" -type d -exec rm -r "{}" \; | |
mv logs_measurement/${{ matrix.server }}_${{ matrix.client }}/* logs/${{ matrix.server }}_${{ matrix.client }}/ | |
- name: Upload logs | |
if: ${{ github.event_name == 'schedule' }} | |
uses: appleboy/scp-action@master | |
with: | |
host: interop.seemann.io | |
username: ${{ secrets.INTEROP_SEEMANN_IO_USER }} | |
key: ${{ secrets.INTEROP_SEEMANN_IO_SSH_KEY }} | |
source: logs/${{ matrix.server }}_${{ matrix.client }} | |
target: /mnt/logs/${{ needs.config.outputs.logname }} | |
strip_components: 1 | |
- name: Upload result | |
uses: actions/upload-artifact@v3 | |
with: | |
name: results | |
if-no-files-found: error | |
path: | | |
${{ matrix.server }}_${{ matrix.client }}_results.json | |
${{ matrix.server }}_${{ matrix.client }}_measurements.json | |
- name: Install b2 cli | |
if: ${{ github.event_name == 'schedule' }} | |
uses: sylwit/[email protected] | |
env: | |
B2_APPLICATION_KEY_ID: ${{ secrets.BACKBLAZE_KEY_ID }} | |
B2_APPLICATION_KEY: ${{ secrets.BACKBLAZE_APPLICATION_KEY }} | |
- name: Upload logs | |
if: ${{ github.event_name == 'schedule' }} | |
run: | | |
b2 sync logs/${{ matrix.server }}_${{ matrix.client }} b2://${{ vars.BACKBLAZE_BUCKET }}/${{ needs.config.outputs.logname }}/${{ matrix.server }}_${{ matrix.client }} | |
aggregate: | |
needs: [ config, tests ] | |
runs-on: ubuntu-latest | |
env: | |
LOGNAME: ${{ needs.config.outputs.logname }} | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: 3.8 | |
- name: Download results | |
uses: actions/download-artifact@v3 | |
with: | |
name: results | |
- name: Aggregate results | |
run: | | |
python .github/workflows/aggregate.py \ | |
--start-time ${{ needs.config.outputs.starttime }} \ | |
--server ${{ join(fromJson(needs.config.outputs.servers), ',') }} \ | |
--client ${{ join(fromJson(needs.config.outputs.clients), ',') }} \ | |
--log-dir=$LOGNAME \ | |
--output result.json | |
- name: Print result | |
run: jq '.' result.json | |
- name: Upload result to artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: results | |
path: result.json | |
- name: Upload result to interop.seemann.io | |
if: ${{ github.event_name == 'schedule' }} | |
uses: appleboy/scp-action@master | |
with: | |
host: interop.seemann.io | |
username: ${{ secrets.INTEROP_SEEMANN_IO_USER }} | |
key: ${{ secrets.INTEROP_SEEMANN_IO_SSH_KEY }} | |
source: result.json | |
target: /mnt/logs/${{ needs.config.outputs.logname }} | |
- name: Publish result | |
if: ${{ github.event_name == 'schedule' }} | |
uses: appleboy/ssh-action@master | |
with: | |
host: interop.seemann.io | |
username: ${{ secrets.INTEROP_SEEMANN_IO_USER }} | |
key: ${{ secrets.INTEROP_SEEMANN_IO_SSH_KEY }} | |
envs: LOGNAME | |
script: | | |
cd /mnt/logs | |
jq '. += [ "${{ needs.config.outputs.logname }}" ]' logs.json | sponge logs.json | |
rm latest || true | |
ln -s $LOGNAME latest | |
- name: Install b2 cli | |
if: ${{ github.event_name == 'schedule' }} | |
uses: sylwit/[email protected] | |
env: | |
B2_APPLICATION_KEY_ID: ${{ secrets.BACKBLAZE_KEY_ID }} | |
B2_APPLICATION_KEY: ${{ secrets.BACKBLAZE_APPLICATION_KEY }} | |
- name: Upload result | |
if: ${{ github.event_name == 'schedule' }} | |
run: | | |
b2 upload-file ${{ vars.BACKBLAZE_BUCKET }} result.json ${{ needs.config.outputs.logname }}/result.json | |
- name: Remove old logs | |
if: ${{ github.event_name == 'schedule' }} | |
run: | | |
# b2 ls neither lists the contents of the bucket, nor does it list all files | |
# Instead, it returns exactly *one* (the oldest) file per folder | |
threshold_time=$((($(date +%s) * 1000) - (${{ vars.LOG_RETENTION_DAYS }} * 24 * 60 * 60 * 1000))) | |
# Now delete them | |
b2 ls --json ${{ vars.BACKBLAZE_BUCKET}} | jq -r ".[] | select(.uploadTimestamp < $threshold_time) | .fileName" | while read -r filename; do | |
dir_name="${filename%%/*}" | |
# skip files, we only care about directories | |
if [[ "$dir_name" != "." ]]; then | |
echo "Deleting $dir_name" | |
# see https://github.com/Backblaze/B2_Command_Line_Tool/issues/495#issuecomment-413932585 | |
mkdir empty # create an empty directory | |
b2 sync --delete --allowEmptySource empty b2://${{ vars.BACKBLAZE_BUCKET }}/"$dir_name" | |
rmdir empty | |
fi | |
done | |
- name: Generate logs.json and upload it | |
if: ${{ github.event_name == 'schedule' }} | |
run: | | |
# First delete old version(s) of logs.json | |
# Otherwise, b2 would store multiple versions of this file. | |
b2 ls --withWildcard --recursive --versions --json quic-interop-runner "logs.json" | jq -r ".[] | .fileId" | while read -r fileid; do | |
b2 delete-file-version logs.json $fileid | |
done | |
b2 ls --json quic-interop-runner | jq '[sort_by(.uploadTimestamp) | .[] | select(.fileName | contains("/")) | .fileName | split("/")[0] | select(. != null)]' > logs.json | |
b2 upload-file ${{ vars.BACKBLAZE_BUCKET }} logs.json logs.json |