Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Trees issues fixes #15

Closed
wants to merge 27 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
0ee81fe
Remove unused variables in gas_cooling
rtobar Sep 14, 2023
72eb976
Receive exceptions by reference, not by value
rtobar Sep 14, 2023
d429e1e
Add documentation for black_hole_histories.hdf5 outputs
rtobar Sep 11, 2023
8c773fa
Prefer histogram's "density" argument over "normed"
rtobar Sep 12, 2023
4199fb3
Deal with OpenMP_CXX_FLAGS correctly
rtobar Sep 14, 2023
4bf3cd5
Improve OpenMP checks
rtobar Sep 14, 2023
ab93cda
Finish porting all tests to GitHub Actions
rtobar Sep 12, 2023
c887fff
Run build/tests on PRs too
rtobar Sep 14, 2023
4732bd6
Add black hole histories output documentation to TOC
rtobar Sep 14, 2023
e1d904a
Specify build.os to keep RTD happy
rtobar Sep 14, 2023
6176d6f
Add GitHub action to ensure docs are correctly built
rtobar Sep 14, 2023
fbe4fa7
Add build.tools section, needed by RTD
rtobar Sep 14, 2023
efa8b0a
Bump dependency on Boost to >= 1.68
rtobar Oct 3, 2023
e75f8ff
Fix documentation building in GHA
rtobar Jan 17, 2024
0f5a785
Include cstdint to ensure std::int64_t is visibile
rtobar Jan 17, 2024
0cf9462
Add spin_mass_dependence option to dark_matter_halo group
rtobar Jan 17, 2024
602e808
Actually read new dark_matter_halo.spin_mass_dependence
rtobar Jan 17, 2024
c0a4f42
Tidal stripping: lost mass considered and galaxy stellar mass at infa…
angel-chandro Jun 20, 2024
7ea18ad
Tidal stripping: lost mass considered and galaxy stellar mass at infa…
angel-chandro Jun 20, 2024
896dc0f
Fix mass swapping: use host halo properties for central subhalos and …
angel-chandro Jun 20, 2024
6282ba2
Mass swapping: use host halo properties for central subhalos and curr…
angel-chandro Jun 21, 2024
34507a6
Mass swapping: fix bugs in implementation
angel-chandro Jun 21, 2024
2cdcedb
Mass swapping: fix bugs in implementation (part 2)
angel-chandro Jun 21, 2024
bb2b39e
Made transients_prefix optional for easier use
rtobar Jun 26, 2024
22d1216
Modifications to mass swapping cases
cdplagos Jun 27, 2024
c666ece
Implementing the mass swap fix as an option
cdplagos Jun 27, 2024
3011fba
Fixed several small bugs
cdplagos Jun 27, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions .ci/check_hdf5_docs.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generate the HDF5 output documentation and check it's up to date
# otherwise tell the user how to update it
hdf5_file="$1"
rst_file="$2"
hdf5_file_basename=`basename "${hdf5_file}"`
full_rst_file="doc/hdf5_properties/${rst_file}"

scripts/properties_as_list.sh "${TEST_OUTPUTS_DIR}/${TEST_SIM_NAME}/${hdf5_file}" > props.rst
_diff="`diff -Naur "${full_rst_file}" props.rst`"
if [ -n "${_diff}" ]; then
echo "\nThe file ${full_rst_file} is out of date. This probably means that you added a new\n" \
"dataset to shark's output, but forgot to update the corresponding documentation.\n" \
"The full difference follows:\n\n${_diff}\n\n" \
"Please run the script/properties_as_lish.sh script against a ${hdf5_file_basename} file\n" \
"to re-generate its documentation, then commit your changes. For example:\n\n" \
"scripts/properties_as_list.sh my-output/model/199/0/${hdf5_file_basename} > ${full_rst_file}" >&2
exit 1
fi
9 changes: 9 additions & 0 deletions .ci/compare_galaxies.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
reference_model_name=$1
shift
this_model_name=$1
shift

python scripts/compare_galaxies.py -m \
"${TEST_OUTPUTS_DIR}/${TEST_SIM_NAME}/${reference_model_name}/199/0/galaxies.hdf5" \
"${TEST_OUTPUTS_DIR}/${TEST_SIM_NAME}/${this_model_name}/199/0/galaxies.hdf5" \
$@
10 changes: 10 additions & 0 deletions .ci/run_shark.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
model_name=$1
shift

${CI_BUILD_DIR}/shark ${TEST_CONFIG_FILE} \
-o execution.output_bh_histories=true -o execution.snapshots_bh_histories=199 \
-o simulation.redshift_file=${TEST_INPUTS_DIR}/redshifts.txt \
-o simulation.tree_files_prefix=${TEST_INPUTS_DIR}/tree_199 \
-o simulation.sim_name=${TEST_SIM_NAME} \
-o execution.name_model=$model_name \
$@
17 changes: 17 additions & 0 deletions .github/actions/download-shark-build/action.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: Get shark up and running from previous build
runs:
using: composite
steps:
- name: Download shark build tarball
uses: actions/download-artifact@v3
with:
name: shark-build

- name: Untar shark build after download
run: tar xf shark-build.tar.gz
shell: bash

- name: Install system runtime dependencies
run: sudo apt install libhdf5-103 libboost-filesystem1.74.0 libboost-program-options1.74.0 libboost-log1.74.0 libgsl27
shell: bash

12 changes: 12 additions & 0 deletions .github/actions/setup-config-file/action.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
name: Setup the shark configuration file used for tests
runs:
using: "composite"
steps:
- run: |
sed "
s|output_directory.*|output_directory = ${TEST_OUTPUTS_DIR}|
s|redshift_file.*|redshift_file = ${TEST_INPUTS_DIR}/redshifts.txt|
s|tree_files_prefix.*|tree_files_prefix = ${TEST_INPUTS_DIR}/tree_199|
s|sim_name.*|sim_name = ${TEST_SIM_NAME}|
" sample.cfg > ${{ env.TEST_CONFIG_FILE }}
shell: bash
184 changes: 170 additions & 14 deletions .github/workflows/build-and-test.yaml
Original file line number Diff line number Diff line change
@@ -1,40 +1,196 @@
name: Build and test
on: [push, pull_request]

# Build on every branch push, tag push, and pull request change:
on: [push]
env:
CI_BUILD_DIR: build
TEST_INPUTS_DIR: inputs
TEST_OUTPUTS_DIR: outputs
TEST_CONFIG_FILE: ci-tests.cfg
TEST_SIM_NAME: mini-SURFS
TEST_FIXED_SEED: 123456

jobs:
build_wheels:
name: Build and test shark on ${{ matrix.os }}
build_and_test:
name: Build and test shark. OS=${{ matrix.os }}, omp=${{ matrix.omp }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
#os: [ubuntu-latest, macos-latest]
os: [ubuntu-latest]
omp: [true, false]

steps:
- uses: actions/checkout@v3
with:
submodules: true

- name: Install dependencies
- name: Install system dependencies (Linux)
if: ${{ matrix.os == 'ubuntu-latest' }}
run: |
sudo apt install libhdf5-dev hdf5-tools libboost-filesystem-dev libboost-program-options-dev libboost-log-dev cxxtest libgsl-dev
sudo apt install ninja-build libhdf5-dev hdf5-tools libboost-filesystem-dev libboost-program-options-dev libboost-log-dev cxxtest libgsl-dev
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

suggestion: Consider adding -y flag to apt install

Adding the -y flag to apt install can help automate the installation process by automatically agreeing to the installation of packages.

Suggested change
sudo apt install ninja-build libhdf5-dev hdf5-tools libboost-filesystem-dev libboost-program-options-dev libboost-log-dev cxxtest libgsl-dev
sudo apt install -y ninja-build libhdf5-dev hdf5-tools libboost-filesystem-dev libboost-program-options-dev libboost-log-dev cxxtest libgsl-dev


- name: Install dependencies
- name: Install system dependencies (MacOS)
if: ${{ matrix.os == 'macos-latest' }}
run: |
brew install hdf5 boost cxxtest gsl
brew install ninja hdf5 boost cxxtest gsl ${{ matrix.omp && 'libomp' || '' }}

- name: Point CMake to libomp (MacOS)
if: ${{ matrix.os == 'macos-latest' && matrix.omp }}
run: |
# libomp is installed as keg-only, so we need to manually point to it
HOMEBREW_LIBOMP_PREFIX=`brew --prefix libomp`
OMP_FLAGS="-Xpreprocessor -fopenmp -I${HOMEBREW_LIBOMP_PREFIX}/include"
echo "EXTRA_CMAKE_ARGS=-DOpenMP_C_FLAGS=\"$OMP_FLAGS\" -DOpenMP_C_LIB_NAMES=omp -DOpenMP_CXX_FLAGS=\"$OMP_FLAGS\" -DOpenMP_CXX_LIB_NAMES=omp -DOpenMP_omp_LIBRARY=$HOMEBREW_LIBOMP_PREFIX/lib/libomp.dylib" >> "$GITHUB_ENV"

- name: Configure
# Leaving Werror out for now because there *are* errors
# and I don't know what the proper fix is.
run: cmake -B build/ -DSHARK_TEST=ON -DCMAKE_CXX_FLAGS="-Wall" #-Werror
run: |
eval cmake -B ${CI_BUILD_DIR} -G Ninja \
-DSHARK_TEST=ON -DSHARK_NO_OPENMP=${{ matrix.omp && 'OFF' || 'ON' }} -DCMAKE_CXX_FLAGS="-Wall -Werror" \
$EXTRA_CMAKE_ARGS

- name: Build
run: cmake --build build/
run: cmake --build ${CI_BUILD_DIR}

- name: Run unit tests
run: |
cd build
cd ${CI_BUILD_DIR}
ctest --output-on-failure

- name: Tar shark build before upload
run: tar cf shark-build.tar.gz ${{ env.CI_BUILD_DIR }}

- name: Upload shark build for next jobs (Linux)
if: ${{ matrix.os == 'ubuntu-latest' && matrix.omp }}
uses: actions/upload-artifact@v3
with:
name: shark-build
path: shark-build.tar.gz

initial_shark_run:
name: Initial shark run (with fixed seed)
needs: build_and_test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- uses: ./.github/actions/download-shark-build

- name: Download test datasets
run: |
mkdir -p ${TEST_INPUTS_DIR}
curl -L -o ${TEST_INPUTS_DIR}/redshifts.txt 'https://docs.google.com/uc?export=download&id=1xvNmJB_KmoBHuQz-QzdPnY0HFs7smkUB'
curl -L -o ${TEST_INPUTS_DIR}/tree_199.0.hdf5 'https://docs.google.com/uc?export=download&id=1JDK8ak13bEhzg9H9xt0uE8Fh_2LD3KpZ'

- uses: ./.github/actions/setup-config-file

- name: Run shark with fixed seed
run: .ci/run_shark.sh my_model -o execution.seed=${TEST_FIXED_SEED}

- name: Upload shark output for next jobs
uses: actions/upload-artifact@v3
with:
name: shark-output
path: ${{ env.TEST_OUTPUTS_DIR }}

- name: Upload shark inputs for next jobs
uses: actions/upload-artifact@v3
with:
name: shark-input
path: ${{ env.TEST_INPUTS_DIR }}

check_hdf5_docs:
name: Check HDF5 properties documentation is up to date
needs: initial_shark_run
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Install HDF5 tools
run: sudo apt install hdf5-tools

- uses: actions/download-artifact@v3
with:
name: shark-output
path: ${{ env.TEST_OUTPUTS_DIR }}

- name: Check output properties' documentation
run: |
.ci/check_hdf5_docs.sh my_model/199/0/galaxies.hdf5 galaxies.rst
.ci/check_hdf5_docs.sh my_model/156/0/star_formation_histories.hdf5 star_formation_histories.rst
.ci/check_hdf5_docs.sh my_model/199/0/black_hole_histories.hdf5 black_hole_histories.rst

generate_plots:
name: Generate all standard plots
needs: initial_shark_run
runs-on: ubuntu-latest
# Currently skipped because there are a few minor problems
# with some plots
if: false
steps:
- uses: actions/checkout@v3

- uses: actions/download-artifact@v3
with:
name: shark-output
path: ${{ env.TEST_OUTPUTS_DIR }}

- name: Install Python
uses: actions/setup-python@v4
with:
python-version: "3.10"

- name: Install Python dependencies
run: pip install matplotlib h5py scipy

- uses: ./.github/actions/setup-config-file

- name: Generate all plots
run: |
echo "backend: Agg" >> matplotlibrc
python standard_plots/all.py -c ${{ env.TEST_CONFIG_FILE }}

check_reproducibility:
name: Check shark runs are reproducible
needs: initial_shark_run
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- uses: ./.github/actions/download-shark-build

- uses: actions/download-artifact@v3
with:
name: shark-output
path: ${{ env.TEST_OUTPUTS_DIR }}

- uses: actions/download-artifact@v3
with:
name: shark-input
path: ${{ env.TEST_INPUTS_DIR }}

- name: Install Python
uses: actions/setup-python@v4
with:
python-version: "3.10"

- name: Install Python dependencies
run: pip install h5py

- uses: ./.github/actions/setup-config-file

- name: Check fixed seed is reproducible
run: |
.ci/run_shark.sh my_model_same_seed -o execution.seed=${TEST_FIXED_SEED}
.ci/compare_galaxies.sh my_model my_model_same_seed

- name: Check fixed seed is reproducible when multithreaded
run: |
# "-t 0" lets shark use the maximum number of OpenMP threads,
# which OpenMP implementations usually constrain to the available hardware
.ci/run_shark.sh my_model_same_seed_parallel -o execution.seed=123456 -t 0
.ci/compare_galaxies.sh my_model my_model_same_seed_parallel

- name: Check random seed gives different results
run: |
.ci/run_shark.sh my_model_random_seed -t 0
.ci/compare_galaxies.sh my_model my_model_random_seed --expect-unequal
15 changes: 15 additions & 0 deletions .github/workflows/build-docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: Build documentation
on: [push, pull_request]
env:
SPHINXOPTS: -W --keep going

jobs:
build-docs:
name: Build documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ammaraskar/sphinx-action@master
with:
docs-folder: "doc"
pre-build-command: 'pip install -U sphinx_rtd_theme docutils'
4 changes: 4 additions & 0 deletions .readthedocs.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.10"

submodules:
include: all
Expand Down
2 changes: 2 additions & 0 deletions .travis/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ curl -L -o input/tree_199.0.hdf5 'https://docs.google.com/uc?export=download&id=
run_shark() {
model_name=$1; shift
./shark ../sample.cfg \
-o execution.output_bh_histories=true -o execution.snapshots_bh_histories=199 \
-o simulation.redshift_file=input/redshifts.txt \
-o simulation.tree_files_prefix=input/tree_199 \
-o execution.name_model=$model_name $@ || fail "failure during execution of shark"
Expand All @@ -67,6 +68,7 @@ check_hdf5_doc() {

check_hdf5_doc 199/0/galaxies.hdf5 galaxies.rst
check_hdf5_doc 156/0/star_formation_histories.hdf5 star_formation_histories.rst
check_hdf5_doc 199/0/black_hole_histories.hdf5 black_hole_histories.rst

if [ -n "$PYTHON" ]; then

Expand Down
15 changes: 9 additions & 6 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ macro(find_boost)
# with homebrew). Let's thus not use Boost's cmake config file and stick to
# cmake's own version of it for the time being
set(Boost_NO_BOOST_CMAKE ON)
find_package(Boost 1.54 COMPONENTS filesystem log program_options system REQUIRED)
find_package(Boost 1.68 COMPONENTS filesystem log program_options system REQUIRED)
set(SHARK_LIBS ${SHARK_LIBS} ${Boost_LIBRARIES})
include_directories(${Boost_INCLUDE_DIRS})
endmacro()
Expand All @@ -185,7 +185,7 @@ endmacro()
#
macro(find_openmp)
find_package(OpenMP)
if (OPENMP_FOUND)
if (OpenMP_FOUND)

# We require at least OpenMP 2.0, so let's double check
# we have that at least
Expand All @@ -196,9 +196,11 @@ macro(find_openmp)
# and OpenMP_<lang>_LIBRARIES/LIBRARY/LIB_NAMES variables
# This is important for Intel compilers in particular, which require
# additional linking
if ((DECLARED OPENMP_VERSION) AND (NOT OPENMP_VERSION VERSION_LESS 2.0))
if ((DEFINED OpenMP_VERSION) AND (NOT OpenMP_VERSION VERSION_LESS 2.0))
set(SHARK_OPENMP ON)
elseif((DECLARED OPENMP_CXX_SPEC_DATE) AND (NOT OPENMP_CXX_SPEC_DATE LESS 200203))
elseif ((DEFINED OpenMP_CXX_VERSION) AND (NOT OpenMP_CXX_VERSION VERSION_LESS 2.0))
set(SHARK_OPENMP ON)
elseif ((DEFINED OpenMP_CXX_SPEC_DATE) AND (NOT OpenMP_CXX_SPEC_DATE LESS 200203))
set(SHARK_OPENMP ON)
else()
set(OPENMP_VERSION_CHECK_SOURCE "
Expand Down Expand Up @@ -237,9 +239,10 @@ int main(int argc, char *argv[]) {
endif()

if (SHARK_OPENMP)
add_compile_options("${OpenMP_CXX_FLAGS}")
string(REPLACE " " ";" OpenMP_CXX_FLAGS_as_list ${OpenMP_CXX_FLAGS})
add_compile_options(${OpenMP_CXX_FLAGS_as_list})
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${OpenMP_CXX_FLAGS}")
set(SHARK_LIBS ${SHARK_LIBS} ${OpenMP_CXX_LIBRARIES})
list(APPEND SHARK_LIBS ${OpenMP_CXX_LIBRARIES})
endif()
endmacro()

Expand Down
2 changes: 1 addition & 1 deletion doc/building.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Requirements

* `GSL <https://www.gnu.org/software/gsl/>`_ >= 2.0
* `HDF5 <https://support.hdfgroup.org/HDF5/>`_ >= 1.8.0
* `Boost <http://www.boost.org/>`_ >= 1.54
* `Boost <http://www.boost.org/>`_ >= 1.68

Optional requirements are:

Expand Down
Loading
Loading