Skip to content

revert to version d7ff45d #3974

revert to version d7ff45d

revert to version d7ff45d #3974

Workflow file for this run

name: ci/cd
on:
# only on branch pushes
push:
branches:
- '**'
tags-ignore:
- '**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
# don't cancel on main/master/default
cancel-in-progress: ${{ format('refs/heads/{0}', github.event.repository.default_branch) != github.ref }}
env:
# pilot image tags — versioned!
_PILOT_IMAGE_FOR_DOCKER_SCANNER_CLIENT: "ghcr.io/observation-management-service/ewms-pilot:1.2.0-docker-tasks"
_PILOT_IMAGE_FOR_APPTAINER_SCANNER_CLIENT: "ghcr.io/observation-management-service/ewms-pilot:1.2.0"
#
# url pointing to remote helper scripts — versioned!
CI_SCRIPT_URL_INSTALL_APPTAINER: https://raw.githubusercontent.com/WIPACrepo/wipac-dev-tools/refs/tags/v1.17.8/resources/apptainer/install-apptainer.sh
CI_SCRIPT_URL_DOOD_RUN: https://raw.githubusercontent.com/WIPACrepo/wipac-dev-tools/refs/tags/v1.17.8/resources/docker/run-docker-outside-of-docker.sh
CI_SCRIPT_URL_RUN_MQ_BROKER: https://raw.githubusercontent.com/Observation-Management-Service/ewms-pilot/refs/tags/v1.1.2/tests/integration/run-broker.sh
#
# used for/by mq broker script
RABBITMQ_IMAGE_TAG: bitnamilegacy/rabbitmq:3.13.5
MQ_BROKER: "scanner_broker"
#
# use by local_scan.py
CI_LOCAL_SCAN_TAIL: 20
#
_SCANNER_IMAGE_DOCKER: icecube/skymap_scanner:local
#
# a special network set up so docker scanner clients (inside of docker pilots) can talk to the mq broker
DOOD_NETWORK: mynet
#
# used for configuring the scanner runtime
CI_TEST_RUN_STDOUT_STDERR_DIR: /home/runner/work/skymap_scanner/testrun_outputs
N_WORKERS: 2
REALTIME_EVENTS_DIR: /home/runner/work/skymap_scanner/skymap_scanner/tests/data/realtime_events
CI_SKYSCAN_CACHE_DIR: /home/runner/work/skymap_scanner/skymap_scanner/cache
CI_SKYSCAN_OUTPUT_DIR: /home/runner/work/skymap_scanner/skymap_scanner/output
CI_SKYSCAN_DEBUG_DIR: /home/runner/work/skymap_scanner/skymap_scanner/debug
#
# see source tests/env-vars.sh
jobs:
py-versions:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.versions.outputs.matrix }}
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
- id: versions
uses: WIPACrepo/wipac-dev-py-versions-action@v2.8
_yaml_templates:
if: ${{ false }} # never runs; holds YAML anchors only — bit of a hack, but reduces code duplication
runs-on: ubuntu-latest
steps:
- &df_disk_space
name: "— Disk Space —"
if: always()
run: df -h /
- &free_disk_space
uses: Jayllyz/free-disk-space@3bda29d61d3f1fa7bf46c5a9a11f22dd20af07c9
with:
docker-images: false
- &dump_output_central_server
name: "dump output — central server"
if: always()
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
cat $CI_TEST_RUN_STDOUT_STDERR_DIR/server.out
- &dump_output_worker_1_pilot
name: "dump output — worker #1 — pilot"
if: always()
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
more $CI_TEST_RUN_STDOUT_STDERR_DIR/worker-1/pilot.out | cat
- &dump_output_worker_1_clients
name: "dump output — worker #1 — clients (icetray pixel reconstructions)"
if: always()
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
find $CI_TEST_RUN_STDOUT_STDERR_DIR/worker-1/pilot-* -name "stderrfile" -o -name "stdoutfile" | xargs more | cat
echo "::::::::::::::" && tree $CI_TEST_RUN_STDOUT_STDERR_DIR/worker-1/pilot-*
- &dump_output_worker_2_pilot
name: "dump output — worker #2 — pilot"
if: always() && env.N_WORKERS == '2'
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
more $CI_TEST_RUN_STDOUT_STDERR_DIR/worker-2/pilot.out | cat
- &dump_output_worker_2_clients
name: "dump output — worker #2 — clients (icetray pixel reconstructions)"
if: always() && env.N_WORKERS == '2'
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
find $CI_TEST_RUN_STDOUT_STDERR_DIR/worker-2/pilot-* -name "stderrfile" -o -name "stdoutfile" | xargs more | cat
echo "::::::::::::::" && tree $CI_TEST_RUN_STDOUT_STDERR_DIR/worker-2/pilot-*
- &dump_rabbitmq_diagnostics
name: RabbitMQ diagnostics
if: always()
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
echo "=== docker logs (rabbitmq) ==="
docker logs "$MQ_BROKER" || true
echo "=== rabbitmqctl status ==="
docker exec "$MQ_BROKER" rabbitmqctl status || true
echo "=== rabbitmq-diagnostics memory ==="
docker exec "$MQ_BROKER" rabbitmq-diagnostics memory || true
echo "=== rabbitmq-diagnostics environment ==="
docker exec "$MQ_BROKER" rabbitmq-diagnostics environment || true
echo "=== rabbitmq-diagnostics alarms ==="
docker exec "$MQ_BROKER" rabbitmq-diagnostics alarms || true
#############################################################################
# LINTERS
#############################################################################
flake8:
needs: [ py-versions ]
runs-on: ubuntu-latest
strategy:
# don't fail fast on default branch — so we can rerun just the one job in case of transient failure
fail-fast: ${{ format('refs/heads/{0}', github.event.repository.default_branch) != github.ref }}
matrix:
py3: ${{ fromJSON(needs.py-versions.outputs.matrix) }}
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.py3 }}
- uses: WIPACrepo/wipac-dev-flake8-action@v1.3
with:
max-complexity: 16 # ideal is ~10-15
mypy:
needs: [ py-versions ]
runs-on: ubuntu-latest
strategy:
# don't fail fast on default branch — so we can rerun just the one job in case of transient failure
fail-fast: ${{ format('refs/heads/{0}', github.event.repository.default_branch) != github.ref }}
matrix:
py3: ${{ fromJSON(needs.py-versions.outputs.matrix) }}
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit ('github.ref is dynamic)
fetch-depth: 0 # setuptools-scm needs to access git tags
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.py3 }}
- uses: WIPACrepo/wipac-dev-mypy-action@v2.0
#############################################################################
# PACKAGING
#############################################################################
py-setup:
if: ${{ github.actor != 'dependabot[bot]' }} # dependabot cannot access PAT
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
ref: ${{ github.ref }} # dont lock to sha (action needs to push)
- uses: WIPACrepo/wipac-dev-py-setup-action@v5.7
with:
mode: PACKAGING
python_min: '3.10'
keywords_comma: "WIPAC, IceCube, Skymap Scanner, Reconstruction, IceTray, EWMS"
auto_mypy_option: True
py-dependencies:
needs: [ flake8 ]
runs-on: ubuntu-latest
steps:
# optimization: ask the action if it would proceed before building our expensive images
- uses: WIPACrepo/wipac-dev-py-dependencies-action@v3.4
id: pydep-precheck
with:
only_precheck: true
# pre-check passed, so proceed...
- if: ${{ steps.pydep-precheck.outputs.do_generation == 'true' }}
uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
fetch-depth: 0 # setuptools-scm needs to access git tags
- if: ${{ steps.pydep-precheck.outputs.do_generation == 'true' }}
uses: docker/setup-buildx-action@v3
- if: ${{ steps.pydep-precheck.outputs.do_generation == 'true' }}
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
tags: skymap_scanner:py-dep-this
load: true
secrets: |
github_token=$${{ secrets.PERSONAL_ACCESS_TOKEN }}
build-args: INCLUDE_GCD=0 # for pip recording we don't need GCD files
- if: ${{ steps.pydep-precheck.outputs.do_generation == 'true' }}
uses: WIPACrepo/wipac-dev-py-dependencies-action@v3.4
#############################################################################
# TESTS
#############################################################################
test-build-docker:
needs: [ flake8 ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
fetch-depth: 0 # setuptools-scm needs to access git tags
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
tags: ${{ env._SCANNER_IMAGE_DOCKER }}
load: false # for checking if this builds, no loading needed
secrets: |
github_token=${{ secrets.PERSONAL_ACCESS_TOKEN }}
test-run-dummy:
needs: [ flake8 ]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
reco_algo: [
"dummy",
"crash_dummy"
]
container_platform: [
"docker",
"apptainer",
]
env:
_RECO_ALGO: "${{ matrix.reco_algo }}"
_CI_SCANNER_CONTAINER_PLATFORM: "${{ matrix.container_platform }}"
_NSIDES: "1:0 2:12 4:12"
EWMS_PILOT_STOP_LISTENING_ON_TASK_ERROR: True # one crash per pilot is enough
EWMS_PILOT_TASK_TIMEOUT: 15
steps:
- *df_disk_space
- *free_disk_space
- *df_disk_space
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
fetch-depth: 0 # setuptools-scm needs to access git tags
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
tags: ${{ env._SCANNER_IMAGE_DOCKER }}
load: true
secrets: |
github_token=${{ secrets.PERSONAL_ACCESS_TOKEN }}
build-args: INCLUDE_GCD=0 # for dummy tests we don't need GCD files
- if: ${{ matrix.container_platform == 'apptainer' }}
name: "apptainer only: install apptainer"
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
curl -fsSL "$CI_SCRIPT_URL_INSTALL_APPTAINER" -o install-apptainer.sh && chmod +x install-apptainer.sh
./install-apptainer.sh --sif no # we will run sandboxes (dirs), not .sif files
- if: ${{ matrix.container_platform == 'apptainer' }}
name: "apptainer only: build apptainer image"
env:
# keep caches off $HOME and easy to delete
APPTAINER_CACHEDIR: ${{ runner.temp }}/apptainer-cache
APPTAINER_TMPDIR: ${{ runner.temp }}
run: ./resources/build_apptainer_image.sh
- name: run
timeout-minutes: 15 # on average ~9min
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
set -x
# step 1: CREATE DOCKER NETWORK
if [[ "${{ matrix.container_platform }}" == "docker" ]]; then
docker network create "$DOOD_NETWORK"
fi
# step 2: START MQ BROKER — this is always in a docker container, even in 'apptainer' tests
curl -fsSL "$CI_SCRIPT_URL_RUN_MQ_BROKER" -o run-mq-broker.sh && chmod +x run-mq-broker.sh
if [[ "${{ matrix.container_platform }}" == "docker" ]]; then
# USE THE DOCKER NETWORK CREATED IN PREVIOUS STEP
source ./run-mq-broker.sh "rabbitmq" "$MQ_BROKER" "--network=$DOOD_NETWORK"
export SKYSCAN_MQ_TOCLIENT_BROKER_ADDRESS="$MQ_BROKER"
export SKYSCAN_MQ_FROMCLIENT_BROKER_ADDRESS="$MQ_BROKER"
elif [[ "${{ matrix.container_platform }}" == "apptainer" ]]; then
# USE THE HOST NETWORK SINCE APPTAINER DOESN'T HAVE AS ISOLATED NETWORKING AS DOCKER DOES
source ./run-mq-broker.sh "rabbitmq" "$MQ_BROKER" "--network=host"
export SKYSCAN_MQ_TOCLIENT_BROKER_ADDRESS="localhost" # localhost b/c using --network=host
export SKYSCAN_MQ_FROMCLIENT_BROKER_ADDRESS="localhost" # localhost b/c using --network=host
else
echo "::error::unknown container_platform: ${{ matrix.container_platform }}"
exit 2
fi
docker ps
# step 3: SET ENV VARS — these require bash to set, see this job's and yaml's `env` blocks for others
source tests/env-vars.sh
export _EVENTS_FILE=$(realpath $REALTIME_EVENTS_DIR/hese_event_01.json)
if [ "${{ matrix.container_platform }}" == "apptainer" ]; then
export _SCANNER_IMAGE_APPTAINER="$(realpath skymap_scanner.sandbox)"
fi
if [ "${{ matrix.reco_algo }}" == "crash_dummy" ]; then
export _SKYSCAN_CI_CRASH_DUMMY_PROBABILITY=0.75
fi
# step 4: RUN!
cd ./resources/launch_scripts
if [ "${{ matrix.reco_algo }}" == "crash_dummy" ]; then
set +e # allow `||` to supress first non-zero exit
python3 local_scan.py $N_WORKERS $CI_TEST_RUN_STDOUT_STDERR_DIR \
|| echo "::warning::scan failed, checking outputs to see if it failed as expected..."
else
python3 local_scan.py $N_WORKERS $CI_TEST_RUN_STDOUT_STDERR_DIR
fi
- if: ${{ matrix.reco_algo == 'crash_dummy' }}
name: "crash_dummy only: see the crashes happened as expected"
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
set -x
# check for fails/errors
error_type_1='intentional crash-dummy error'
error_type_2='\[Timeout-Error\] timed out after'
pattern="$error_type_1|$error_type_2"
if find "$CI_TEST_RUN_STDOUT_STDERR_DIR/worker-"*/pilot.out -type f -exec grep -qE "$pattern" {} +; then
echo "Match(es) found: PilotSubprocessError and/or TimeoutError occurred."
else
echo "::error::Could not find the expected error(s) in worker/pilot outputs."
exit 1
fi
- if: ${{ matrix.reco_algo == 'dummy' }}
name: "dummy only: dump results file (.npz)"
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
ls .
ls $CI_SKYSCAN_OUTPUT_DIR
outfile=$(ls -d $CI_SKYSCAN_OUTPUT_DIR/*.npz)
echo $outfile
- *dump_output_central_server
- *dump_output_worker_1_pilot
- *dump_output_worker_1_clients
- *dump_output_worker_2_pilot
- *dump_output_worker_2_clients
- *dump_rabbitmq_diagnostics
- *df_disk_space
test-run-nsides-thresholds-dummy:
if: ${{ github.actor != 'dependabot[bot]' }} # don't run expensive job when not needed
needs: [ flake8 ]
runs-on: ubuntu-latest
strategy:
# don't fail fast on default branch — so we can rerun just the one job in case of transient failure
fail-fast: ${{ format('refs/heads/{0}', github.event.repository.default_branch) != github.ref }}
matrix:
nsides: [
"1:0",
"1:0 2:12",
"1:0 2:12 4:12"
]
predictive_scanning_threshold: [
1.0,
0.65,
]
env:
_CI_SCANNER_CONTAINER_PLATFORM: docker
_RECO_ALGO: dummy
_NSIDES: "${{ matrix.nsides }}"
_PREDICTIVE_SCANNING_THRESHOLD: ${{ matrix.predictive_scanning_threshold }}
steps:
- *df_disk_space
- *free_disk_space
- *df_disk_space
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
fetch-depth: 0 # setuptools-scm needs to access git tags
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
tags: ${{ env._SCANNER_IMAGE_DOCKER }}
load: true
build-args: INCLUDE_GCD=0 # for dummy tests we don't need GCD files
- name: run
timeout-minutes: 12 # on average max~=8.5min
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
# step 1: CREATE DOCKER NETWORK
docker network create "$DOOD_NETWORK"
# step 2: START MQ BROKER — using the docker network created above
curl -fsSL "$CI_SCRIPT_URL_RUN_MQ_BROKER" -o run-mq-broker.sh && chmod +x run-mq-broker.sh
source ./run-mq-broker.sh "rabbitmq" "$MQ_BROKER" "--network=$DOOD_NETWORK"
export SKYSCAN_MQ_TOCLIENT_BROKER_ADDRESS="$MQ_BROKER"
export SKYSCAN_MQ_FROMCLIENT_BROKER_ADDRESS="$MQ_BROKER"
# step 3: SET ENV VARS — these require bash to set, see this job's and yaml's `env` blocks for others
set -x
source tests/env-vars.sh
export _EVENTS_FILE=$(realpath $REALTIME_EVENTS_DIR/hese_event_01.json)
# step 4: RUN!
cd ./resources/launch_scripts
python3 local_scan.py $N_WORKERS $CI_TEST_RUN_STDOUT_STDERR_DIR
- name: check no nsides skipped
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
ls $CI_SKYSCAN_OUTPUT_DIR
# get newest run*.json
export outfile=$(find $CI_SKYSCAN_OUTPUT_DIR -type f -name "run*.json" -exec stat -c '%y %n' {} + | sort | tail -1 | awk '{print $4}')
echo $outfile
python3 -c '
import json
import os
with open(os.getenv("outfile")) as f:
pydict = json.load(f)
nsides = "${{ matrix.nsides }}"
assert len(pydict) == nsides.count(":")
'
- *dump_output_central_server
- *dump_output_worker_1_pilot
- *dump_output_worker_1_clients
- *dump_output_worker_2_pilot
- *dump_output_worker_2_clients
- *dump_rabbitmq_diagnostics
- *df_disk_space
test-run-realistic:
if: ${{ github.actor != 'dependabot[bot]' }} # don't run expensive job when not needed
needs: [ flake8 ]
runs-on: ubuntu-latest
strategy:
# don't fail fast on default branch — so we can rerun just the one job in case of transient failure
fail-fast: ${{ format('refs/heads/{0}', github.event.repository.default_branch) != github.ref }}
matrix:
reco_algo: [
millipede_original,
millipede_wilks,
splinempe,
splinempe_pointed
]
eventfile: [
hese_event_01.json,
run00136766-evt000007637140-GOLD.pkl,
run00136662-evt000035405932-BRONZE.pkl,
138632_31747601.json
]
exclude:
# splinempe should not run on HESE
- reco_algo: splinempe
eventfile: hese_event_01.json
- reco_algo: splinempe_pointed
eventfile: hese_event_01.json
env:
_RECO_ALGO: "${{ matrix.reco_algo }}"
_CI_SCANNER_CONTAINER_PLATFORM: "docker"
_NSIDES: "1:0"
steps:
- *df_disk_space
- *free_disk_space
- *df_disk_space
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
fetch-depth: 0 # setuptools-scm needs to access git tags
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
tags: ${{ env._SCANNER_IMAGE_DOCKER }}
load: true
secrets: |
github_token=${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: run
timeout-minutes: 55 # on average max~=35min
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
# step 1: CREATE DOCKER NETWORK
docker network create "$DOOD_NETWORK"
# step 2: START MQ BROKER — using the docker network created above
curl -fsSL "$CI_SCRIPT_URL_RUN_MQ_BROKER" -o run-mq-broker.sh && chmod +x run-mq-broker.sh
source ./run-mq-broker.sh "rabbitmq" "$MQ_BROKER" "--network=$DOOD_NETWORK"
export SKYSCAN_MQ_TOCLIENT_BROKER_ADDRESS="$MQ_BROKER"
export SKYSCAN_MQ_FROMCLIENT_BROKER_ADDRESS="$MQ_BROKER"
# setp 3: SET ENV VARS — these require bash to set, see this job's and yaml's `env` blocks for others
source tests/env-vars.sh
export _EVENTS_FILE=$(realpath $REALTIME_EVENTS_DIR/${{ matrix.eventfile }})
# step 4: RUN!
cd ./resources/launch_scripts
python3 local_scan.py $N_WORKERS $CI_TEST_RUN_STDOUT_STDERR_DIR
- name: test output against known result (.json)
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
ls $CI_SKYSCAN_OUTPUT_DIR
# get newest run*.json
outfile=$(find $CI_SKYSCAN_OUTPUT_DIR -type f -name "run*.json" -exec stat -c '%y %n' {} + | sort | tail -1 | awk '{print $4}')
echo $outfile
cat $outfile
pip install . # don't need icecube, so no docker container needed
# --------------------------------------------------------------------
# splinempe: special handling + gh-issue reminder timer
if [[ "${{ matrix.reco_algo }}" == "splinempe" ]]; then
python tests/compare_scan_results.py \
--actual $outfile \
--expected tests/data/results_json/${{ matrix.reco_algo }}/$(basename $outfile) \
--assert \
--compare-different-versions-ok \
|| (cat $(ls *.diff.json) && false)
# put the timer after, so at least we know if the test passes
start_time="2025-11-7" # Set the starting date
end_time=$(date -d "$start_time +2 months" +%s)
current_time=$(date +%s)
if (( current_time >= end_time )); then
echo "::error::alarm went off, how's https://github.com/icecube/skymap_scanner/issues/242 going? Are version-0 result files still being used for splinempe?"
exit 1
fi
exit 0
fi
# --------------------------------------------------------------------
python tests/compare_scan_results.py \
--actual $outfile \
--expected tests/data/results_json/${{ matrix.reco_algo }}/$(basename $outfile) \
--assert \
|| (cat $(ls *.diff.json) && false)
- *dump_output_central_server
- *dump_output_worker_1_pilot
- *dump_output_worker_1_clients
- *dump_output_worker_2_pilot
- *dump_output_worker_2_clients
- *dump_rabbitmq_diagnostics
- *df_disk_space
- name: Upload results as artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: test-run-realistic-${{ matrix.reco_algo }}-${{ matrix.eventfile }}-results
path: |
${{ env.CI_SKYSCAN_OUTPUT_DIR }}**
*.diff.json
if-no-files-found: warn
retention-days: 7
test-file-staging:
needs: [ flake8 ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
fetch-depth: 0 # setuptools-scm needs to access git tags
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
tags: ${{ env._SCANNER_IMAGE_DOCKER }}
load: true
secrets: |
github_token=${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: run
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
# SET ENV VARS — these require bash to set, see this job's and yaml's `env` blocks for others
source tests/env-vars.sh
# RUN!
docker run --rm -i \
--mount type=bind,source=$(readlink -f tests/),target=/local/tests/ \
$(env | grep -E '^(SKYSCAN_|_SKYSCAN_)' | cut -d'=' -f1 | sed 's/^/--env /') \
$_SCANNER_IMAGE_DOCKER \
python /local/tests/file_staging.py
test-run-single-pixel:
needs: [ flake8 ]
runs-on: ubuntu-latest
strategy:
# don't fail fast on default branch — so we can rerun just the one job in case of transient failure
fail-fast: ${{ format('refs/heads/{0}', github.event.repository.default_branch) != github.ref }}
matrix:
dir: [
"BRONZE",
"GOLD",
"JSON",
]
reco_algo:
[
millipede_original,
millipede_wilks,
splinempe,
splinempe_pointed
]
exclude:
# splinempe should not run on HESE
- reco_algo: splinempe
dir: "JSON"
- reco_algo: splinempe_pointed
dir: "JSON"
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.sha }} # lock to triggered commit (github.ref is dynamic)
fetch-depth: 0 # setuptools-scm needs to access git tags
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
tags: ${{ env._SCANNER_IMAGE_DOCKER }}
load: true
secrets: |
github_token=${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: run
timeout-minutes: 10 # on average max~=5min
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
set -e
# SET ENV VARS — these require bash to set, see this job's and yaml's `env` blocks for others
source tests/env-vars.sh
# RUN!
# -> run reco directly without an ewms pilot wrapper
docker run --network="host" --rm -i \
--shm-size=6gb \
--mount type=bind,source=$(readlink -f tests/),target=/local/tests/ \
--env PY_COLORS=1 \
$(env | grep -E '^(SKYSCAN_|_SKYSCAN_)' | cut -d'=' -f1 | sed 's/^/--env /') \
$_SCANNER_IMAGE_DOCKER \
python -m skymap_scanner.client \
--infile /local/tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/in.json \
--client-startup-json /local/tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/startup.json \
--outfile /local/tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/out-actual.json
- name: test output against known result
run: |
set -euo pipefail; echo "now: $(date -u +"%Y-%m-%dT%H:%M:%S.%3N")"
source tests/env-vars.sh
ls tests/data/reco_pixel_pkls/${{ matrix.reco_algo }}/${{ matrix.dir }}
ls tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}
# need icecube for depickling, so docker container needed
docker run --network="host" --rm -i \
--shm-size=6gb \
--mount type=bind,source=$(readlink -f tests/),target=/local/tests/ \
--env PY_COLORS=1 \
$(env | grep -E '^(SKYSCAN_|_SKYSCAN_)' | cut -d'=' -f1 | sed 's/^/--env /') \
$_SCANNER_IMAGE_DOCKER \
python /local/tests/compare_reco_pixel_single.py \
--actual /local/tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/out-actual.json \
--expected /local/tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/out.json \
--diff-out-dir /local/tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/ \
--assert \
|| (cat $(ls tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/*.diff.json) && false)
- name: Upload results as artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: test-run-single-pixel-${{ matrix.reco_algo }}-${{ matrix.dir }}-results
path: |
tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/out-actual.json
tests/data/reco_pixel_single/${{ matrix.reco_algo }}/${{ matrix.dir }}/*.diff.json
if-no-files-found: warn
retention-days: 7
#############################################################################
# GIT TAG & GITHUB RELEASE
#############################################################################
tag-and-release:
# only run on main/default branch
if: format('refs/heads/{0}', github.event.repository.default_branch) == github.ref
needs: [
py-versions,
mypy,
flake8,
py-setup,
py-dependencies,
test-build-docker,
test-run-dummy,
test-file-staging,
test-run-nsides-thresholds-dummy,
test-run-realistic,
test-run-single-pixel,
]
uses: WIPACrepo/wipac-dev-workflows/.github/workflows/tag-and-release.yml@v1.25
permissions: # for GITHUB_TOKEN
contents: write
with:
project-type: python
python-version: "${{ fromJSON(needs.py-versions.outputs.matrix)[0] }}"
release-artifacts: |
py-dependencies-logs
secrets:
TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} # trigger tag-event gha workflows