Skip to content

Merge pull request #4925 from mdahlhausen/fix/issue4924 #626

Merge pull request #4925 from mdahlhausen/fix/issue4924

Merge pull request #4925 from mdahlhausen/fix/issue4924 #626

name: Python Bindings
on:
push:
branches: [ master, develop, actions_pypi ]
# Sequence of patterns matched against refs/tags
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
pull_request:
branches: [ master ]
env:
BUILD_TYPE: Release
BUILD_CLI: OFF
BUILD_RUBY_BINDINGS: OFF
MAKE_SPACE: false
jobs:
python_bindings:
name: Build ${{ matrix.name }}
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.allow_failure }}
strategy:
# fail-fast: Default is true, switch to false to allow one platform to fail and still run others
fail-fast: false
matrix:
name: [Ubuntu, macOS, Windows_py37, Windows_py38, Windows_py39, Windows_py310]
include:
- name: Ubuntu
os: ubuntu-20.04
python-version: 3.8
allow_failure: false
extra_cmake_args: "-DCONAN_FIRST_TIME_BUILD_ALL:BOOL=ON"
- name: macOS
os: macos-11
python-version: 3.8
allow_failure: false
MACOSX_DEPLOYMENT_TARGET: 10.15
SDKROOT: /Applications/Xcode_13.2.1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk
DEVELOPER_DIR: /Applications/Xcode_13.2.1.app/Contents/Developer
extra_cmake_args: ""
- name: Windows_py37
os: windows-2019
python-version: 3.7
allow_failure: false
- name: Windows_py38
os: windows-2019
python-version: 3.8
allow_failure: false
- name: Windows_py39
os: windows-2019
python-version: 3.9
allow_failure: false
- name: Windows_py310
os: windows-2019
python-version: '3.10'
allow_failure: true # Possible this fails, don't care yet
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
id: setup-python
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install conan==1.59
pip install setuptools wheel twine requests packaging
conan --version
echo "Enabling conan revisions and setting parallel_download"
conan config set general.revisions_enabled=True
conan config set general.parallel_download=8
conan remote add --insert 0 --force nrel https://conan.openstudio.net/artifactory/api/conan/openstudio
- name: Install System dependencies
shell: bash
run: |
set -x
if [ "$RUNNER_OS" == "Linux" ]; then
echo "Using Apt to install ninja"
sudo apt update
sudo apt install ninja-build
# Weirdly enough, ninja makes ubuntu unresponsive...
#echo CMAKE_GENERATOR='Ninja' >> $GITHUB_ENV
# Use preinstalled gcc-10 to support C++20
#echo CC=gcc-10 >> $GITHUB_ENV
#echo CXX=g++-10 >> $GITHUB_ENV
elif [ "$RUNNER_OS" == "macOS" ]; then
echo "Setting up MACOSX_DEPLOYMENT_TARGET and SDKROOT"
echo MACOSX_DEPLOYMENT_TARGET=${{ matrix.MACOSX_DEPLOYMENT_TARGET }} >> $GITHUB_ENV
echo SDKROOT=${{ matrix.SDKROOT }} >> $GITHUB_ENV
echo DEVELOPER_DIR=${{ matrix.DEVELOPER_DIR }} >> $GITHUB_ENV
# The MACOSX_DEPLOYMENT_TARGET environment variable sets the default value for the CMAKE_OSX_DEPLOYMENT_TARGET variable.
# echo CMAKE_MACOSX_DEPLOYMENT_TARGET='-DCMAKE_OSX_DEPLOYMENT_TARGET=$MACOSX_DEPLOYMENT_TARGET' >> $GITHUB_ENV
echo "Using brew to install ninja"
brew install ninja
echo CMAKE_GENERATOR='Ninja' >> $GITHUB_ENV
elif [ "$RUNNER_OS" == "Windows" ]; then
#echo "Setting CMAKE_GENERATOR options equivalent to ='-G \"Visual Studio 16 2019\" -A x64'"
#echo CMAKE_GENERATOR='Visual Studio 16 2019' >> $GITHUB_ENV
#echo CMAKE_GENERATOR_PLATFORM=x64 >> $GITHUB_ENV
echo "Using chocolatey to install ninja"
choco install ninja
# C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise
MSVC_DIR=$(cmd.exe /c "vswhere -products * -requires Microsoft.Component.MSBuild -property installationPath -latest")
echo "Latest is: $MSVC_DIR"
echo "MSVC_DIR=$MSVC_DIR" >> $GITHUB_ENV
# add folder containing vcvarsall.bat
echo "$MSVC_DIR\VC\Auxiliary\Build" >> $GITHUB_PATH
fi;
if [[ "$GITHUB_REF" == *"refs/tags"* ]]; then
PYTHON_PIP_REPOSITORY=pypi
else
PYTHON_PIP_REPOSITORY=testpypi
fi;
echo "PYTHON_PIP_REPOSITORY=$PYTHON_PIP_REPOSITORY" >> $GITHUB_ENV
- name: Create Build Directory
run: cmake -E make_directory ./build/
- name: Clean up some stuff to avoid running out of disk space
if: env.MAKE_SPACE == 'true'
shell: bash
run: |
set -x
df -h || true
if [ "$RUNNER_OS" == "Windows" ]; then
# du -sh /c/Program\ Files/* | sort -rh || true
# 20G /c/Program Files/dotnet
# 959M /c/Program Files/Java
# 830M /c/Program Files/Azure Cosmos DB Emulator
# 702M /c/Program Files/MongoDB
# 619M /c/Program Files/PostgreSQL
# 472M /c/Program Files/Microsoft Service Fabric
# 448M /c/Program Files/Google
# 309M /c/Program Files/Microsoft SDKs
# 272M /c/Program Files/Amazon
# 268M /c/Program Files/R
# 267M /c/Program Files/Microsoft SQL Server
# 266M /c/Program Files/Git
# 243M /c/Program Files/PowerShell
# 238M /c/Program Files/Docker
# 205M /c/Program Files/Mozilla Firefox
# 202M /c/Program Files/Unity Hub
# 147M /c/Program Files/Android
/bin/rm -Rf /c/Program\ Files/dotnet || true
/bin/rm -Rf /c/Program\ Files/Amazon/ || true
/bin/rm -Rf /c/Program\ Files/Azure\ Cosmos\ DB\ Emulator/ || true
/bin/rm -Rf /c/Program\ Files/Android/ || true
/bin/rm -Rf /c/Program\ Files/Google/ || true
/bin/rm -Rf /c/Program\ Files/Java/ || true
/bin/rm -Rf /c/Program\ Files/Microsoft\ Service\ Fabric || true
/bin/rm -Rf /c/Program\ Files/Microsoft\ SQL\ Server || true
/bin/rm -Rf /c/Program\ Files/PostgreSQL || true
/bin/rm -Rf /c/Program\ Files/Unity\ Hub/ || true
echo $BOOST_ROOT_1_72_0
/bin/rm -Rf $BOOST_ROOT_1_72_0 || true
elif [ "$RUNNER_OS" == "Linux" ]; then
sudo apt remove -y '^ghc-8.*'
sudo apt remove -y '^dotnet-.*'
sudo apt remove -y '^llvm-.*'
sudo apt remove -y 'php.*'
sudo apt remove -y azure-cli google-cloud-sdk hhvm google-chrome-stable firefox powershell mono-devel
sudo apt autoremove -y
sudo apt clean
sudo rm -rf /usr/local/share/boost/
sudo rm -rf /usr/share/dotnet/
fi;
df -h || true
- name: Configure CMake & build (Windows)
working-directory: ./build
if: runner.os == 'Windows'
shell: cmd
run: |
echo "Using vcvarsall to initialize the development environment"
call vcvarsall.bat x64
echo "Launching a build with BUILD_PYTHON_BINDINGS=ON and BUILD_PYTHON_PIP_PACKAGE=ON, turning off as many other things as possible to speed it up"
cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ^
-DBUILD_PYTHON_BINDINGS:BOOL=ON -DBUILD_PYTHON_PIP_PACKAGE:BOOL=ON -DPYTHON_PIP_REPOSITORY:STRING=${{ env.PYTHON_PIP_REPOSITORY }} ^
-DPYTHON_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} ^
-DBUILD_TESTING:BOOL=OFF -DBUILD_RUBY_BINDINGS:BOOL=${{ env.BUILD_RUBY_BINDINGS }} -DBUILD_CLI:BOOL=${{ env.BUILD_CLI }} ../
ninja
ninja python_package
- name: Configure CMake (Unix)
working-directory: ./build
if: runner.os != 'Windows'
shell: bash
run: |
set -x
echo "Launching a build with BUILD_PYTHON_BINDINGS=ON and BUILD_PYTHON_PIP_PACKAGE=ON, turning off as many other things as possible to speed it up"
cmake ${{ matrix.extra_cmake_args}} -DCMAKE_BUILD_TYPE:STRING=Release \
-DBUILD_PYTHON_BINDINGS:BOOL=ON -DBUILD_PYTHON_PIP_PACKAGE:BOOL=ON -DPYTHON_PIP_REPOSITORY:STRING=${{ env.PYTHON_PIP_REPOSITORY }} \
-DPYTHON_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} -DPython_ROOT_DIR:PATH=$RUNNER_TOOL_CACHE/Python/${{ steps.setup-python.outputs.python-version }}/x64/ \
-DBUILD_TESTING:BOOL=OFF -DBUILD_RUBY_BINDINGS:BOOL=${{ env.BUILD_RUBY_BINDINGS }} -DBUILD_CLI:BOOL=${{ env.BUILD_CLI }} \
../
- name: Build (Unix)
working-directory: ./build
if: runner.os != 'Windows'
shell: bash
run: |
set -x
if [ "$RUNNER_OS" == "Linux" ]; then
cmake --build . --target package -j 1
elif [ "$RUNNER_OS" == "macOS" ]; then
cmake --build . --target package -j 2
fi;
# We always upload to testpypi
- name: Build python wheel
shell: bash
working-directory: ./build
run: |
cmake --build . --target python_package -j 2
cd Products/python_package/
python setup.py bdist_wheel
- name: Zip the wheel to maintain case sensitivy and file permissions
working-directory: ./build/Products/python_package/
shell: bash
run: |
tar -cvzf openstudio-${{ matrix.name }}.tar.gz dist/
- name: Upload .whl to artifact
uses: actions/upload-artifact@v2
with:
name: openstudio-${{ matrix.name }} # something like openstudio-Windows_py37
path: ./build/Products/python_package/openstudio-${{ matrix.name }}.tar.gz
# Only upload if all builds succeed
upload_python_bindings_to_testpypi:
needs: python_bindings
name: Upload to TestPyPi
runs-on: ubuntu-20.04
steps:
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
# urllib3 2.0 breaks
pip install urllib3==1.26.15
pip install setuptools wheel twine requests packaging
mkdir wheels
- name: Download ALL wheels
uses: actions/download-artifact@v2
with:
path: ./wheels
- name: Display structure of downloaded files
working-directory: ./wheels
run: |
ls -R
- name: Extract all tar.gz, then upload
working-directory: ./wheels
env:
TWINE_USERNAME: __token__
TWINE_REPOSITORY: testpypi
# Go to repo on github > Settings > Secrets and add it
TWINE_PASSWORD: ${{ secrets.TESTPYPI_TOKEN }}
shell: bash
run: |
for f in **/*.tar.gz; do
mv $f .
done;
for f in *.tar.gz; do
tar -xzvf "$f";
done;
ls -R
python -m twine upload --skip-existing --repository testpypi dist/*
test_python_bindings:
needs: upload_python_bindings_to_testpypi
name: Test ${{ matrix.name }}
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.allow_failure }}
strategy:
# fail-fast: Default is true, switch to false to allow one platform to fail and still run others
fail-fast: false
matrix:
name: [Ubuntu, macOS, Windows_py37, Windows_py38, Windows_py39, Windows_py310]
include:
- name: Ubuntu
os: ubuntu-20.04
python-version: 3.8
allow_failure: false
- name: macOS
os: macos-11
python-version: 3.8
allow_failure: false
- name: Windows_py37
os: windows-2019
python-version: 3.7
allow_failure: false
- name: Windows_py38
os: windows-2019
python-version: 3.8
allow_failure: false
- name: Windows_py39
os: windows-2019
python-version: 3.9
allow_failure: false
- name: Windows_py310
os: windows-2019
python-version: '3.10'
allow_failure: true # Possible this fails, don't care yet
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install openstudio bindings from testpypi
shell: bash
run: |
set -x
python -m pip install --upgrade pip
# Note: if we choose to do ALL or nothing, remove setuptools wheel twine (not needed unless trying to upload within this step)
pip install requests packaging setuptools wheel twine
if [[ "$GITHUB_REF" == *"refs/tags"* ]]; then
bindings_v=$(python ./python/module/find_pypi_tag.py --pypi)
else
bindings_v=$(python ./python/module/find_pypi_tag.py --current)
fi;
pip install -i https://test.pypi.org/simple/ openstudio==$bindings_v
mkdir wheel
- name: Test the bindings
shell: python
run: |
import openstudio
m = openstudio.model.exampleModel()
print(m)
# if python_bindings succeeds but test_python_bindings fails and this is a release,
# remove that version from testpypi
upload_python_bindings_to_pypi:
if: contains(github.ref, 'refs/tags')
needs: test_python_bindings
name: Upload to PyPi
runs-on: ubuntu-20.04
steps:
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine requests packaging
mkdir wheels
- name: Download ALL wheels
uses: actions/download-artifact@v2
with:
path: ./wheels
- name: Display structure of downloaded files
working-directory: ./wheels
run: |
ls -R
- name: Extract all tar.gz, then upload
working-directory: ./wheels
env:
TWINE_USERNAME: __token__
TWINE_REPOSITORY: pypi
# Go to repo on github > Settings > Secrets and add it
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
shell: bash
run: |
for f in **/*.tar.gz; do
mv $f .
done;
for f in *.tar.gz; do
tar -xzvf "$f";
done;
ls -R
python -m twine upload --skip-existing --repository pypi dist/*