Skip to content

dump reco-icetray logs #2541

dump reco-icetray logs

dump reco-icetray logs #2541

Workflow file for this run

name: tests
on:
push:
branches:
- '**'
tags-ignore:
- '**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
# don't cancel on main/master/default
cancel-in-progress: ${{ format('refs/heads/{0}', github.event.repository.default_branch) != github.ref }}
env:
PY_COLORS: "1"
BOT_NAME: wipacdevbot
BOT_EMAIL: [email protected]
REALTIME_EVENTS_DIR: /home/runner/work/skymap_scanner/skymap_scanner/tests/data/realtime_events
SKYSCAN_CACHE_DIR: /home/runner/work/skymap_scanner/skymap_scanner/cache
SKYSCAN_OUTPUT_DIR: /home/runner/work/skymap_scanner/skymap_scanner/output
SKYSCAN_DEBUG_DIR: /home/runner/work/skymap_scanner/skymap_scanner/debug
# see source tests/env-vars.sh
jobs:
#############################################################################
# PACKAGING & LINTERS
#############################################################################
py-versions:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.versions.outputs.matrix }}
steps:
- uses: actions/checkout@v3
- id: versions
uses: WIPACrepo/[email protected]
# flake8:
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v3
# - uses: actions/setup-python@v3
# - uses: WIPACrepo/[email protected]
mypy:
needs: [py-versions]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
py3: ${{ fromJSON(needs.py-versions.outputs.matrix) }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v3
with:
python-version: ${{ matrix.py3 }}
- uses: WIPACrepo/[email protected]
py-setup:
# needs: [wait-for-tests] # NOTE: this would be needed if enabling "all tests to pass" branch protection
runs-on: ubuntu-latest
steps:
- uses: jlumbroso/free-disk-space@main
with:
tool-cache: 'true'
large-packages: false # TODO: remove once https://github.com/jlumbroso/free-disk-space/issues/4 is fixed
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: checkout
uses: actions/checkout@v3
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- uses: WIPACrepo/[email protected]
if: github.actor != ${{ env.BOT_NAME }} # no auto-updates for bots # should match all 'git_committer_name' uses
with:
git_committer_name: ${{ env.BOT_NAME }}
git_committer_email: ${{ env.BOT_EMAIL }}
#############################################################################
# TESTS
#############################################################################
test-build-docker:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: docker/setup-buildx-action@v2
- uses: docker/build-push-action@v3
with:
context: .
cache-from: type=gha
cache-to: type=gha,mode=min
file: Dockerfile
tags: icecube/skymap_scanner:local
test-build-docker-pulsar:
needs: test-build-docker
runs-on: ubuntu-latest
steps:
- uses: jlumbroso/free-disk-space@main
with:
tool-cache: 'true'
large-packages: false # TODO: remove once https://github.com/jlumbroso/free-disk-space/issues/4 is fixed
- uses: actions/checkout@v3
- uses: docker/setup-buildx-action@v2
- uses: docker/build-push-action@v3
with:
context: .
cache-from: type=gha
file: Dockerfile_pulsar
tags: icecube/skymap_scanner:local-pulsar
test-run-singularity-dummy-reco:
needs: test-build-docker
runs-on: ubuntu-latest
env:
SKYSCAN_BROKER_ADDRESS: user1:password@localhost/test
# SKYSCAN_BROKER_AUTH: user1 # using this would override password in address
services:
rabbitmq:
image: bitnami/rabbitmq:latest
env:
RABBITMQ_USERNAME: user1
RABBITMQ_PASSWORD: password
RABBITMQ_VHOST: test
BITNAMI_DEBUG: true
# Note: `--network` option is not supported.
options: >-
--name rabbitmq
--health-cmd "rabbitmqctl node_health_check"
--health-interval 5s
--health-timeout 5s
--health-retries 10
ports:
- 5672:5672
- 15672:15672
steps:
- uses: jlumbroso/free-disk-space@main
with:
tool-cache: 'true'
large-packages: false # TODO: remove once https://github.com/jlumbroso/free-disk-space/issues/4 is fixed
- uses: actions/checkout@v3
- uses: docker/setup-buildx-action@v2
- uses: docker/build-push-action@v3
with:
context: .
cache-from: type=gha
# cache-to: type=gha,mode=min
file: Dockerfile
tags: icecube/skymap_scanner:local
load: true
- uses: eWaterCycle/setup-apptainer@v2
with:
apptainer-version: 1.1.2
- name: build singularity image
run: |
sudo singularity build skymap_scanner.sif docker-daemon://icecube/skymap_scanner:local
ls -lh skymap_scanner.sif
- name: run singularity container
run: |
source tests/env-vars.sh
mkdir $SKYSCAN_CACHE_DIR
mkdir $SKYSCAN_OUTPUT_DIR
# Launch Server
singularity run skymap_scanner.sif \
python -m skymap_scanner.server \
--reco-algo dummy \
--event-file $REALTIME_EVENTS_DIR/hese_event_01.json \
--cache-dir $SKYSCAN_CACHE_DIR \
--output-dir $SKYSCAN_OUTPUT_DIR \
--client-startup-json ./startup.json \
--nsides 1:0 \
--simulated-event \
&
./resources/launch_scripts/wait_for_file.sh ./startup.json $CLIENT_STARTER_WAIT_FOR_STARTUP_JSON
# Launch Clients
nclients=$(( $_CLIENTS_PER_CPU * $(nproc) ))
echo "Launching $nclients clients"
mkdir $SKYSCAN_DEBUG_DIR
export EWMS_PILOT_TASK_TIMEOUT=1800 # 30 mins
for i in $( seq 1 $nclients ); do
singularity run skymap_scanner.sif \
python -m skymap_scanner.client \
--client-startup-json ./startup.json \
--debug-directory $SKYSCAN_DEBUG_DIR \
&
echo -e "\tclient #$i launched"
done
wait -n # for server
for i in $( seq 1 $nclients ); do
wait -n # for client
done
- name: look at results file (.npz)
run: |
ls .
ls $SKYSCAN_OUTPUT_DIR
outfile=$(ls -d $SKYSCAN_OUTPUT_DIR/*.npz)
echo $outfile
- name: broker docker logs
if: always()
run: |
docker logs rabbitmq
test-run-nsides-thresholds-dummy:
needs: test-build-docker
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
nsides: [
"1:0",
"1:0 2:12",
"1:0 2:12 4:12"
]
predictive_scanning_threshold: [
1.0,
0.65,
]
env:
_CLIENTS_PER_CPU: 1 # there isn't any improvement when >1
SKYSCAN_BROKER_ADDRESS: user1@localhost/test
SKYSCAN_BROKER_AUTH: password # using this would override password in address
services:
rabbitmq:
image: bitnami/rabbitmq:latest
env:
RABBITMQ_USERNAME: user1
RABBITMQ_PASSWORD: password
RABBITMQ_VHOST: test
BITNAMI_DEBUG: true
# Note: `--network` option is not supported.
options: >-
--name rabbitmq
--health-cmd "rabbitmqctl node_health_check"
--health-interval 5s
--health-timeout 5s
--health-retries 10
ports:
- 5672:5672
- 15672:15672
steps:
- uses: actions/checkout@v3
- uses: docker/setup-buildx-action@v2
- uses: docker/build-push-action@v3
with:
context: .
cache-from: type=gha
# cache-to: type=gha,mode=min
file: Dockerfile
tags: icecube/skymap_scanner:local
load: true
- name: run
run: |
set -x
source tests/env-vars.sh
export _RECO_ALGO=dummy
export _EVENTS_FILE=$(realpath $REALTIME_EVENTS_DIR/hese_event_01.json)
export _NSIDES="${{ matrix.nsides }}"
export _PREDICTIVE_SCANNING_THRESHOLD=${{ matrix.predictive_scanning_threshold }}
cd ./resources/launch_scripts
./local-scan.sh
- name: check no nsides skipped
run: |
ls $SKYSCAN_OUTPUT_DIR
# get newest run*.json
export outfile=$(find $SKYSCAN_OUTPUT_DIR -type f -name "run*.json" -exec stat -c '%y %n' {} + | sort | tail -1 | awk '{print $4}')
echo $outfile
python3 -c '
import json
import os
with open(os.getenv("outfile")) as f:
pydict = json.load(f)
nsides = "${{ matrix.nsides }}"
assert len(pydict) == nsides.count(":")
'
- name: reco-icetray logs
if: always()
run: |
tree $SKYSCAN_DEBUG_DIR | ls -R $SKYSCAN_DEBUG_DIR
find $SKYSCAN_DEBUG_DIR -type f -exec "more" {} + | cat # recursively cats with filenames (delimited by :::::::)
- name: broker docker logs
if: always()
run: |
docker logs rabbitmq
test-run-realistic:
needs: test-build-docker
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
reco_algo: [
millipede_original,
millipede_wilks,
splinempe
]
eventfile: [
hese_event_01.json,
run00136766-evt000007637140-GOLD.pkl,
run00136662-evt000035405932-BRONZE.pkl
]
ewms_pilot_concurrent_tasks: [
0,
3,
]
exclude:
# splinempe should not run on HESE
- reco_algo: splinempe
eventfile: hese_event_01.json
env:
_CLIENTS_PER_CPU: 1 # there isn't any improvement when >1
SKYSCAN_BROKER_ADDRESS: user1@localhost/test
SKYSCAN_BROKER_AUTH: password # using this would override password in address
services:
rabbitmq:
image: bitnami/rabbitmq:latest
env:
RABBITMQ_USERNAME: user1
RABBITMQ_PASSWORD: password
RABBITMQ_VHOST: test
BITNAMI_DEBUG: true
# Note: `--network` option is not supported.
options: >-
--name rabbitmq
--health-cmd "rabbitmqctl node_health_check"
--health-interval 5s
--health-timeout 5s
--health-retries 10
ports:
- 5672:5672
- 15672:15672
steps:
- uses: jlumbroso/free-disk-space@main
with:
tool-cache: 'true'
large-packages: false # TODO: remove once https://github.com/jlumbroso/free-disk-space/issues/4 is fixed
- uses: actionhippie/swap-space@v1
with:
size: 10G
- uses: actions/checkout@v3
- uses: docker/setup-buildx-action@v2
- uses: docker/build-push-action@v3
with:
context: .
cache-from: type=gha
# cache-to: type=gha,mode=min
file: Dockerfile
tags: icecube/skymap_scanner:local
load: true
- name: run
run: |
set -x
source tests/env-vars.sh
export _RECO_ALGO=${{ matrix.reco_algo }}
export _EVENTS_FILE=$(realpath $REALTIME_EVENTS_DIR/${{ matrix.eventfile }})
export _NSIDES="1:0"
export EWMS_PILOT_CONCURRENT_TASKS=${{ matrix.ewms_pilot_concurrent_tasks }}
free # dump memory stats
cd ./resources/launch_scripts
./local-scan.sh
free # dump memory stats
- name: test output against known result (.json)
run: |
ls $SKYSCAN_OUTPUT_DIR
# get newest run*.json
outfile=$(find $SKYSCAN_OUTPUT_DIR -type f -name "run*.json" -exec stat -c '%y %n' {} + | sort | tail -1 | awk '{print $4}')
echo $outfile
cat $outfile
pip install . # don't need icecube, so no docker container needed
python tests/compare_scan_results.py \
--actual $outfile \
--expected tests/data/results_json/${{ matrix.reco_algo }}/$(basename $outfile) \
--assert \
|| (cat $(ls *.diff.json) && false)
- name: reco-icetray logs
if: always()
run: |
tree $SKYSCAN_DEBUG_DIR | ls -R $SKYSCAN_DEBUG_DIR
find $SKYSCAN_DEBUG_DIR -type f -exec "more" {} + | cat # recursively cats with filenames (delimited by :::::::)
- name: broker docker logs
if: always()
run: |
docker logs rabbitmq
test-file-staging:
needs: test-build-docker
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: docker/setup-buildx-action@v2
- uses: docker/build-push-action@v3
with:
context: .
cache-from: type=gha
# cache-to: type=gha,mode=min
file: Dockerfile
tags: icecube/skymap_scanner:local
load: true
- name: run
run: |
docker run --rm -i \
icecube/skymap_scanner:local \
python tests/file_staging.py
test-run-single-pixel:
needs: test-build-docker
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
dir: [
"BRONZE",
"GOLD",
"JSON",
]
reco_algo:
[
millipede_original,
millipede_wilks,
splinempe
]
exclude:
# splinempe should not run on HESE
- reco_algo: splinempe
dir: "JSON"
steps:
- uses: actions/checkout@v3
- uses: docker/setup-buildx-action@v2
- uses: docker/build-push-action@v3
with:
context: .
cache-from: type=gha
# cache-to: type=gha,mode=min
file: Dockerfile
tags: icecube/skymap_scanner:local
load: true
- name: run
run: |
source tests/env-vars.sh
# grab the GCDQp_packet key and throw into a file
jq '.GCDQp_packet' \
tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}/startup.json > \
tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}//GCDQp_packet.json
# move python file to subdir to make bindings simpler
cp tests/data/reco_pixel_pkls/get_toclient_msg_pkl.py \
tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}
# create the in.pkl
docker run --network="host" --rm -i \
--shm-size=6gb \
--mount type=bind,source=$(readlink -f tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}),target=/local/pkls \
--env PY_COLORS=1 \
$(env | grep '^SKYSCAN_' | awk '$0="--env "$0') \
icecube/skymap_scanner:local \
python /local/pkls/get_toclient_msg_pkl.py \
--pframe-pkl /local/pkls/pframe.pkl \
--reco-algo ${{ matrix.reco_algo }}
hexdump -C tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}/in.pkl
# run
docker run --network="host" --rm -i \
--shm-size=6gb \
--mount type=bind,source=$(readlink -f tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}),target=/local/pkls \
--env PY_COLORS=1 \
$(env | grep '^SKYSCAN_' | awk '$0="--env "$0') \
icecube/skymap_scanner:local \
python -m skymap_scanner.client.reco_icetray \
--in-pkl /local/pkls/in.pkl \
--reco-algo ${{ matrix.reco_algo }} \
--gcdqp-packet-json /local/pkls/GCDQp_packet.json \
--baseline-gcd-file $(jq -r '.baseline_GCD_file' tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}/startup.json) \
--out-pkl /local/pkls/out.pkl
- name: test output against known result (.pkl)
run: |
ls tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}
# need icecube for depickling, so docker container needed
docker run --network="host" --rm -i \
--shm-size=6gb \
--mount type=bind,source=$(readlink -f tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}),target=/local/pkls \
--env PY_COLORS=1 \
$(env | grep '^SKYSCAN_' | awk '$0="--env "$0') \
icecube/skymap_scanner:local \
python tests/compare_reco_pixel_pkl.py \
--actual /local/pkls/out.pkl \
--expected /local/pkls/$(basename $(ls tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}/*.out.pkl)) \
--diff-out-dir /local/pkls/ \
--assert \
|| (cat $(ls tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}/*.diff.json) && false)
#############################################################################
# GITHUB RELEASE
#############################################################################
release:
# only run on main/master/default
if: format('refs/heads/{0}', github.event.repository.default_branch) == github.ref
# have to wait for tests so python-semantic-release can push (branch protection on main don't work w/ the bot)
needs: [
mypy,
py-setup,
test-build-docker,
test-build-docker-pulsar,
test-run-singularity-dummy-reco,
test-file-staging,
test-run-nsides-thresholds-dummy,
test-run-realistic,
test-run-single-pixel
]
runs-on: ubuntu-latest
concurrency: release
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Python Semantic Release
uses: python-semantic-release/[email protected]
with:
git_committer_name: ${{ env.BOT_NAME }}
git_committer_email: ${{ env.BOT_EMAIL }}
github_token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
# repository_username: __token__
# repository_password: ${{ secrets.PYPI_TOKEN }}