Skip to content

Commit

Permalink
docs and dynamic versioning
Browse files Browse the repository at this point in the history
  • Loading branch information
bnb32 committed Oct 4, 2024
1 parent fc20841 commit 006f50e
Show file tree
Hide file tree
Showing 34 changed files with 889 additions and 182 deletions.
47 changes: 47 additions & 0 deletions .github/workflows/codecov.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
name: Codecov

on:
push:
branches: [main, master]

jobs:
run:
runs-on: ${{ matrix.os }}
strategy:
max-parallel: 3
matrix:
os: [ubuntu-latest]
env:
OS: ${{ matrix.os }}
PYTHON: '3.9'
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
fetch-depth: 0
fetch-tags: true
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: install dependencies
run: |
python -m pip install --upgrade pip
conda install hdf4
python -m pip install pyhdf
python -m pip install pytest
python -m pip install pytest-cov
python -m pip install .
- name: Generate coverage report
run: |
python -m pytest --cov=./ --cov-report=xml:coverage.xml
- name: Upload coverage to Codecov
uses: codecov/[email protected]
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: ./coverage.xml
flags: unittests
env_vars: OS,PYTHON
name: codecov-umbrella
fail_ci_if_error: false
verbose: true
37 changes: 37 additions & 0 deletions .github/workflows/gh_pages.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
name: Documentation

on:
push:
branches: [main, master]

jobs:
make-pages:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
fetch-depth: 0
fetch-tags: true
- name: select python version
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install .
python -m pip install sphinx
python -m pip install sphinx_rtd_theme
python -m pip install sphinx-click
- name: build documentation
run: |
cd docs
make html
- name: deploy
uses: peaceiris/[email protected]
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./docs/_build/html
force_orphan: true
full_commit_message: ${{ github.event.head_commit.message }}
27 changes: 8 additions & 19 deletions .github/workflows/linter.yml
Original file line number Diff line number Diff line change
@@ -1,30 +1,19 @@
name: Lint Code Base

on:
workflow_dispatch:
push:
branches-ignore:
- 'gh-pages'

jobs:
build:
name: Lint Code Base
ruff:
name: Ruff
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
with:
fetch-depth: 0
- name: Lint Code Base
uses: super-linter/super-linter@v4
env:
VALIDATE_ALL_CODEBASE: false
VALIDATE_PYTHON_BLACK: false
VALIDATE_PYTHON_ISORT: false
VALIDATE_PYTHON_MYPY: false
VALIDATE_DOCKERFILE_HADOLINT: false
VALIDATE_JSCPD: false
VALIDATE_JSON: false
VALIDATE_MARKDOWN: false
VALIDATE_YAML: false
DEFAULT_BRANCH: main
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
version: 0.4.10
args: check --output-format=github
src: "./mlclouds ./tests"
2 changes: 1 addition & 1 deletion .github/workflows/publish_to_pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
run: |
python -m pip install --upgrade pip
pip install setuptools build
python -m build --sdist --wheel --outdir dist/ .
python -m build --wheel --outdir dist/ .
- name: Check distribution files
run: |
pip install twine
Expand Down
56 changes: 15 additions & 41 deletions .github/workflows/pull_request_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,62 +2,36 @@ name: Pytests

on: pull_request


jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version: ['3.10']
python-version: ['3.11']
include:
- os: ubuntu-latest
python-version: 3.9
python-version: '3.10'
- os: ubuntu-latest
python-version: '3.9'

steps:
- name: checkout mlclouds
uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
path: mlclouds
- name: checkout rest2
uses: actions/checkout@v2
with:
repository: nrel/rest2
ssh-key: ${{ secrets.SSH_KEY }}
path: rest2
- name: checkout nsrdb
uses: actions/checkout@v2
with:
repository: nrel/nsrdb
ssh-key: ${{ secrets.SSH_KEY }}
path: nsrdb
fetch-depth: 0
fetch-tags: true
- name: Set up Python ${{ matrix.python-version }}
uses: conda-incubator/setup-miniconda@v2
uses: actions/setup-python@v5
with:
auto-update-conda: true
python-version: ${{ matrix.python-version }}
- name: Install rest2 dependencies
working-directory: ./rest2
shell: bash -l {0}
run: |
conda install pip
pip install -e .
- name: Install nsrdb dependencies
working-directory: ./nsrdb
shell: bash -l {0}
run: |
conda install hdf4
conda install -c conda-forge pyhdf
pip install -e .
- name: Install mlclouds dependencies
working-directory: ./mlclouds
shell: bash -l {0}
cache: 'pip'
- name: Install dependencies
run: |
pip install -e .
pip install pytest
- name: Run mlclouds pytest
working-directory: ./mlclouds
shell: bash -l {0}
python -m pip install --upgrade pip
python -m pip install .[test]
- name: Run pytest
run: |
pytest -v --disable-warnings
python -m pytest -v --disable-warnings
14 changes: 14 additions & 0 deletions .github/workflows/release_drafter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: Release Drafter

on:
push:
branches: [main, master]

jobs:
update_release_draft:
runs-on: ubuntu-latest
steps:
- name: Release Drafter
uses: release-drafter/[email protected]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ instance/

# Sphinx documentation
docs/_build/
docs/source/_autosummary/

# PyBuilder
target/
Expand Down
63 changes: 57 additions & 6 deletions README.rst
Original file line number Diff line number Diff line change
@@ -1,17 +1,68 @@
####################
Welcome to MLClouds!
####################
|Docs| |Tests| |Linter| |PyPi| |PythonV| |Codecov| |Zenodo|

.. |Docs| image:: https://github.com/NREL/mlclouds/workflows/Documentation/badge.svg
:target: https://nrel.github.io/mlclouds/

.. |Tests| image:: https://github.com/NREL/mlclouds/workflows/Pytests/badge.svg
:target: https://github.com/NREL/mlclouds/actions?query=workflow%3A%22Pytests%22

.. |Linter| image:: https://github.com/NREL/mlclouds/workflows/Lint%20Code%20Base/badge.svg
:target: https://github.com/NREL/mlclouds/actions?query=workflow%3A%22Lint+Code+Base%22

.. |PyPi| image:: https://img.shields.io/pypi/pyversions/NREL-mlclouds.svg
:target: https://pypi.org/project/NREL-mlclouds/

.. |PythonV| image:: https://badge.fury.io/py/NREL-mlclouds.svg
:target: https://badge.fury.io/py/NREL-mlclouds

.. |Codecov| image:: https://codecov.io/gh/nrel/mlclouds/branch/main/graph/badge.svg
:target: https://codecov.io/gh/nrel/mlclouds

.. |Zenodo| image:: https://zenodo.org/badge/340209614.svg
:target: https://zenodo.org/badge/latestdoi/340209614

.. inclusion-intro
A machine learning approach to predicting missing cloud properties in the National Solar Radiation Database (NSRDB)
====================================================================================================================
-------------------------------------------------------------------------------------------------------------------

The National Solar Radiation Database (NSRDB) is NRELs flagship solar data resource. With over 20 years of high-resolution surface irradiance
The National Solar Radiation Database (NSRDB) is NREL's flagship solar data resource. With over 20 years of high-resolution surface irradiance
data covering most of the western hemisphere, the NSRDB is a crucial public data asset. A fundamental input to accurate surface irradiance in the
NSRDB is high quality cloud property data. Cloud properties are used in radiative transfer calculations and are sourced from satellite imagery.
Improving the accuracy of cloud property inputs is a tractable method for improving the accuracy of the irradiance data in the NSRDB. For example,
in July of 2018, an average location in the Continental United States is missing cloud property data for nearly one quarter of all daylight cloudy timesteps.
This project aims to improve the cloud data inputs to the NSRDB by using machine learning techniques to exploit the NSRDBs massive data resources.
This project aims to improve the cloud data inputs to the NSRDB by using machine learning techniques to exploit the NSRDB's massive data resources.
More accurate cloud property input data will yield more accurate surface irradiance data in the NSRDB, providing direct benefit to researchers at NREL
and to public data users everywhere.

Installation
------------
============

It is recommended that you first follow the `install instructions for the NSRDB <https://github.com/NREL/nsrdb>`_.
Then run `pip install -e .` from the mlclouds directory containing setup.py.
If you are a developer, also run `pre-commit install` in the same directory.
Then run ``pip install -e .`` from the mlclouds directory containing ``setup.py``.
If you are a developer, also run ``pre-commit install`` in the same directory.


Acknowledgments
===============

This work (SWR-23-77) was authored by the National Renewable Energy Laboratory,
operated by Alliance for Sustainable Energy, LLC, for the U.S. Department of
Energy (DOE) under Contract No. DE-AC36-08GO28308. Funding provided by the DOE
Grid Deployment Office (GDO), the DOE Advanced Scientific Computing Research
(ASCR) program, the DOE Solar Energy Technologies Office (SETO), the DOE Wind
Energy Technologies Office (WETO), the United States Agency for International
Development (USAID), and the Laboratory Directed Research and Development
(LDRD) program at the National Renewable Energy Laboratory. The research was
performed using computational resources sponsored by the Department of Energy's
Office of Energy Efficiency and Renewable Energy and located at the National
Renewable Energy Laboratory. The views expressed in the article do not
necessarily represent the views of the DOE or the U.S. Government. The U.S.
Government retains and the publisher, by accepting the article for publication,
acknowledges that the U.S. Government retains a nonexclusive, paid-up,
irrevocable, worldwide license to publish or reproduce the published form of
this work, or allow others to do so, for U.S. Government purposes.
29 changes: 29 additions & 0 deletions docs/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Minimal makefile for Sphinx documentation
#

# You can set these variables from the command line.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = _build

# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

.PHONY: help Makefile

# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
rm -rf _build
rm -rf source/_autosummary
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

github: html
-git branch -D gh-pages
-git push origin --delete gh-pages
ghp-import -n -b gh-pages -m "Update documentation" ./_build/html
git checkout gh-pages
git push --set-upstream origin gh-pages
git checkout ${BRANCH}
Loading

0 comments on commit 006f50e

Please sign in to comment.