Skip to content

Merger trees artefact treatment: mass swapping and massive transients #167

Merger trees artefact treatment: mass swapping and massive transients

Merger trees artefact treatment: mass swapping and massive transients #167

name: Build and test
on: [push, pull_request]
env:
CI_BUILD_DIR: build
TEST_INPUTS_DIR: inputs
TEST_OUTPUTS_DIR: outputs
TEST_CONFIG_FILE: ci-tests.cfg
TEST_SIM_NAME: mini-SURFS
TEST_FIXED_SEED: 123456
jobs:
build_and_test:
name: Build and test shark. OS=${{ matrix.os }}, omp=${{ matrix.omp }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
#os: [ubuntu-latest, macos-latest]
os: [ubuntu-latest]
omp: [true, false]
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install system dependencies (Linux)
if: ${{ matrix.os == 'ubuntu-latest' }}
run: |
sudo apt install ninja-build libhdf5-dev hdf5-tools libboost-filesystem-dev libboost-program-options-dev libboost-log-dev cxxtest libgsl-dev
- name: Install system dependencies (MacOS)
if: ${{ matrix.os == 'macos-latest' }}
run: |
brew install ninja hdf5 boost cxxtest gsl ${{ matrix.omp && 'libomp' || '' }}
- name: Point CMake to libomp (MacOS)
if: ${{ matrix.os == 'macos-latest' && matrix.omp }}
run: |
# libomp is installed as keg-only, so we need to manually point to it
HOMEBREW_LIBOMP_PREFIX=`brew --prefix libomp`
OMP_FLAGS="-Xpreprocessor -fopenmp -I${HOMEBREW_LIBOMP_PREFIX}/include"
echo "EXTRA_CMAKE_ARGS=-DOpenMP_C_FLAGS=\"$OMP_FLAGS\" -DOpenMP_C_LIB_NAMES=omp -DOpenMP_CXX_FLAGS=\"$OMP_FLAGS\" -DOpenMP_CXX_LIB_NAMES=omp -DOpenMP_omp_LIBRARY=$HOMEBREW_LIBOMP_PREFIX/lib/libomp.dylib" >> "$GITHUB_ENV"
- name: Configure
run: |
eval cmake -B ${CI_BUILD_DIR} -G Ninja \
-DSHARK_TEST=ON -DSHARK_NO_OPENMP=${{ matrix.omp && 'OFF' || 'ON' }} -DCMAKE_CXX_FLAGS="-Wall -Werror" \
$EXTRA_CMAKE_ARGS
- name: Build
run: cmake --build ${CI_BUILD_DIR}
- name: Run unit tests
run: |
cd ${CI_BUILD_DIR}
ctest --output-on-failure
- name: Tar shark build before upload
run: tar cf shark-build.tar.gz ${{ env.CI_BUILD_DIR }}
- name: Upload shark build for next jobs (Linux)
if: ${{ matrix.os == 'ubuntu-latest' && matrix.omp }}
uses: actions/upload-artifact@v4
with:
name: shark-build
path: shark-build.tar.gz
initial_shark_run:
name: Initial shark run (with fixed seed)
needs: build_and_test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/download-shark-build
- name: Download test datasets
run: |
mkdir -p ${TEST_INPUTS_DIR}
curl -L -o ${TEST_INPUTS_DIR}/redshifts.txt 'https://docs.google.com/uc?export=download&id=1xvNmJB_KmoBHuQz-QzdPnY0HFs7smkUB'
curl -L -o ${TEST_INPUTS_DIR}/tree_199.0.hdf5 'https://docs.google.com/uc?export=download&id=1JDK8ak13bEhzg9H9xt0uE8Fh_2LD3KpZ'
- uses: ./.github/actions/setup-config-file
- name: Run shark with fixed seed
run: .ci/run_shark.sh my_model -o execution.seed=${TEST_FIXED_SEED}
- name: Upload shark output for next jobs
uses: actions/upload-artifact@v4
with:
name: shark-output
path: ${{ env.TEST_OUTPUTS_DIR }}
- name: Upload shark inputs for next jobs
uses: actions/upload-artifact@v4
with:
name: shark-input
path: ${{ env.TEST_INPUTS_DIR }}
check_hdf5_docs:
name: Check HDF5 properties documentation is up to date
needs: initial_shark_run
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install HDF5 tools
run: sudo apt install hdf5-tools
- uses: actions/download-artifact@v4
with:
name: shark-output
path: ${{ env.TEST_OUTPUTS_DIR }}
- name: Check output properties' documentation
run: |
.ci/check_hdf5_docs.sh my_model/199/0/galaxies.hdf5 galaxies.rst
.ci/check_hdf5_docs.sh my_model/156/0/star_formation_histories.hdf5 star_formation_histories.rst
.ci/check_hdf5_docs.sh my_model/199/0/black_hole_histories.hdf5 black_hole_histories.rst
generate_plots:
name: Generate all standard plots
needs: initial_shark_run
runs-on: ubuntu-latest
# Currently skipped because there are a few minor problems
# with some plots
if: false
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
name: shark-output
path: ${{ env.TEST_OUTPUTS_DIR }}
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install Python dependencies
run: pip install matplotlib h5py scipy
- uses: ./.github/actions/setup-config-file
- name: Generate all plots
run: |
echo "backend: Agg" >> matplotlibrc
python standard_plots/all.py -c ${{ env.TEST_CONFIG_FILE }}
check_reproducibility:
name: Check shark runs are reproducible
needs: initial_shark_run
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/download-shark-build
- uses: actions/download-artifact@v4
with:
name: shark-output
path: ${{ env.TEST_OUTPUTS_DIR }}
- uses: actions/download-artifact@v4
with:
name: shark-input
path: ${{ env.TEST_INPUTS_DIR }}
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install Python dependencies
run: pip install h5py
- uses: ./.github/actions/setup-config-file
- name: Check fixed seed is reproducible
run: |
.ci/run_shark.sh my_model_same_seed -o execution.seed=${TEST_FIXED_SEED}
.ci/compare_galaxies.sh my_model my_model_same_seed
- name: Check fixed seed is reproducible when multithreaded
run: |
# "-t 0" lets shark use the maximum number of OpenMP threads,
# which OpenMP implementations usually constrain to the available hardware
.ci/run_shark.sh my_model_same_seed_parallel -o execution.seed=123456 -t 0
.ci/compare_galaxies.sh my_model my_model_same_seed_parallel
- name: Check random seed gives different results
run: |
.ci/run_shark.sh my_model_random_seed -t 0
.ci/compare_galaxies.sh my_model my_model_random_seed --expect-unequal