Skip to content

Updated os-extension and os-standards for rc3. #713

Updated os-extension and os-standards for rc3.

Updated os-extension and os-standards for rc3. #713

Workflow file for this run

name: Python Bindings
on:
push:
branches: [ master, actions_pypi ]
# Sequence of patterns matched against refs/tags
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
pull_request:
branches: [ master ]
env:
BUILD_TYPE: Release
BUILD_CLI: OFF
BUILD_RUBY_BINDINGS: OFF
MAKE_SPACE: false
jobs:
python_bindings:
name: Build ${{ matrix.name }}
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.allow_failure }}
strategy:
# fail-fast: Default is true, switch to false to allow one platform to fail and still run others
fail-fast: false
matrix:
name: [Ubuntu, macOS, macOS_arm64, Windows_py37, Windows_py38, Windows_py39, Windows_py310, Windows_py311, Windows_py312]
include:
- name: Ubuntu
os: ubuntu-20.04
python-version: 3.8
allow_failure: false
- name: macOS
os: macos-11
python-version: 3.8
allow_failure: false
MACOSX_DEPLOYMENT_TARGET: 10.15
SDKROOT: /Applications/Xcode_13.2.1.app
- name: macOS_arm64
os: macos-14
python-version: 3.8
allow_failure: false
MACOSX_DEPLOYMENT_TARGET: 12.1
SDKROOT: /Applications/Xcode_14.3.1.app
- name: Windows_py37
os: windows-2019
python-version: 3.7
allow_failure: false
- name: Windows_py38
os: windows-2019
python-version: 3.8
allow_failure: false
- name: Windows_py39
os: windows-2019
python-version: 3.9
allow_failure: false
- name: Windows_py310
os: windows-2019
python-version: '3.10'
allow_failure: false
- name: Windows_py311
os: windows-2019
python-version: '3.11'
allow_failure: false
- name: Windows_py312
os: windows-2019
python-version: '3.12'
allow_failure: true # Possible this fails, don't care yet
steps:
- uses: actions/checkout@v4
# with:
# ref: develop
- name: Set up Python ${{ matrix.python-version }}
id: setup-python
# 3.10 is the first one to have a prebuilt binary for macos-14 (arm64) on the actions/setup-python, so I forked it and actions/python-versions to add 3.8
uses: jmarrec/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install System dependencies
shell: bash
run: |
set -x
if [ "$RUNNER_OS" == "Linux" ]; then
echo "Using Apt to install ninja"
sudo apt update
sudo apt install ninja-build
# Weirdly enough, ninja makes ubuntu unresponsive...
#echo CMAKE_GENERATOR='Ninja' >> $GITHUB_ENV
# Use preinstalled gcc-10 to support C++20
#echo CC=gcc-10 >> $GITHUB_ENV
#echo CXX=g++-10 >> $GITHUB_ENV
elif [ "$RUNNER_OS" == "macOS" ]; then
echo "Setting up MACOSX_DEPLOYMENT_TARGET and SDKROOT"
sudo xcode-select -switch "${{ matrix.SDKROOT }}"
sdk_path=$(xcrun --sdk macosx --show-sdk-path)
echo "sdk_path=$sdk_path"
# The MACOSX_DEPLOYMENT_TARGET environment variable sets the default value for the CMAKE_OSX_DEPLOYMENT_TARGET variable.
echo MACOSX_DEPLOYMENT_TARGET=${{ matrix.MACOSX_DEPLOYMENT_TARGET }} >> $GITHUB_ENV
echo "Using brew to install ninja"
brew install ninja
echo CMAKE_GENERATOR='Ninja' >> $GITHUB_ENV
elif [ "$RUNNER_OS" == "Windows" ]; then
#echo "Setting CMAKE_GENERATOR options equivalent to ='-G \"Visual Studio 16 2019\" -A x64'"
#echo CMAKE_GENERATOR='Visual Studio 16 2019' >> $GITHUB_ENV
#echo CMAKE_GENERATOR_PLATFORM=x64 >> $GITHUB_ENV
echo "Using chocolatey to install ninja"
choco install ninja
# C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise
MSVC_DIR=$(vswhere -products '*' -requires Microsoft.Component.MSBuild -property installationPath -latest)
echo "Latest is: $MSVC_DIR"
echo "MSVC_DIR=$MSVC_DIR" >> $GITHUB_ENV
# add folder containing vcvarsall.bat
echo "$MSVC_DIR\VC\Auxiliary\Build" >> $GITHUB_PATH
fi;
if [[ "$GITHUB_REF" == *"refs/tags"* ]]; then
PYTHON_PIP_REPOSITORY=pypi
else
PYTHON_PIP_REPOSITORY=testpypi
fi;
echo "PYTHON_PIP_REPOSITORY=$PYTHON_PIP_REPOSITORY" >> $GITHUB_ENV
- name: Install Conan
shell: bash
run: |
begin_group() { echo -e "::group::\033[93m$1\033[0m"; }
begin_group "Install other python dependencies"
python -m pip install --upgrade pip
pip install setuptools wheel twine requests packaging
echo -e "::endgroup::"
set -x
begin_group "Install conan 2"
python --version
pip install 'conan>2'
conan --version
echo -e "::endgroup::"
begin_group "Default profile"
if [ "$RUNNER_OS" == "Windows" ]; then
DIR_SEP="\\"
else
DIR_SEP="/"
fi
echo DIR_SEP=$DIR_SEP >> $GITHUB_ENV
export CONAN_USER_HOME="${{ github.workspace }}${DIR_SEP}conan-cache"
echo CONAN_USER_HOME="$CONAN_USER_HOME" >> $GITHUB_ENV
export CONAN_HOME="$CONAN_USER_HOME${DIR_SEP}.conan2"
echo CONAN_HOME="$CONAN_HOME" >> $GITHUB_ENV
export CONAN_PROFILE_DEFAULT="$CONAN_HOME${DIR_SEP}profiles${DIR_SEP}default"
echo CONAN_PROFILE_DEFAULT="$CONAN_PROFILE_DEFAULT" >> $GITHUB_ENV
conan profile detect --force --name default
cat $CONAN_PROFILE_DEFAULT
# Mac has the FreeBSD flavor of sed and MUST take a backup suffix...
sed -i.bak 's/cppstd=.*$/cppstd=20/g' $CONAN_PROFILE_DEFAULT
sed -i.bak 's/build_type=.*$/build_type=${{ env.BUILD_TYPE }}/g' $CONAN_PROFILE_DEFAULT
# Windows only
sed -i.bak 's/compiler.runtime_type=.*$/compiler.runtime_type=${{ env.BUILD_TYPE }}/g' $CONAN_PROFILE_DEFAULT
rm -Rf $CONAN_PROFILE_DEFAULT.bak || true
conan profile show
echo -e "::endgroup::"
begin_group "Global.conf"
echo "core:non_interactive = True" >> $CONAN_HOME/global.conf
echo "core.download:parallel = {{os.cpu_count() - 2}}" >> $CONAN_HOME/global.conf
echo "core.sources:download_cache = $CONAN_USER_HOME/.conan-download-cache" >> $CONAN_HOME/global.conf
cat $CONAN_HOME/global.conf
echo -e "::endgroup::"
begin_group "Remotes"
conan remote add --force nrel-v2 https://conan.openstudio.net/artifactory/api/conan/conan-v2
conan remote list
echo -e "::endgroup::"
- name: Create Build Directory
run: cmake -E make_directory ./build/
- name: Clean up some stuff to avoid running out of disk space
if: env.MAKE_SPACE == 'true'
shell: bash
run: |
set -x
df -h || true
if [ "$RUNNER_OS" == "Windows" ]; then
# du -sh /c/Program\ Files/* | sort -rh || true
# 20G /c/Program Files/dotnet
# 959M /c/Program Files/Java
# 830M /c/Program Files/Azure Cosmos DB Emulator
# 702M /c/Program Files/MongoDB
# 619M /c/Program Files/PostgreSQL
# 472M /c/Program Files/Microsoft Service Fabric
# 448M /c/Program Files/Google
# 309M /c/Program Files/Microsoft SDKs
# 272M /c/Program Files/Amazon
# 268M /c/Program Files/R
# 267M /c/Program Files/Microsoft SQL Server
# 266M /c/Program Files/Git
# 243M /c/Program Files/PowerShell
# 238M /c/Program Files/Docker
# 205M /c/Program Files/Mozilla Firefox
# 202M /c/Program Files/Unity Hub
# 147M /c/Program Files/Android
/bin/rm -Rf /c/Program\ Files/dotnet || true
/bin/rm -Rf /c/Program\ Files/Amazon/ || true
/bin/rm -Rf /c/Program\ Files/Azure\ Cosmos\ DB\ Emulator/ || true
/bin/rm -Rf /c/Program\ Files/Android/ || true
/bin/rm -Rf /c/Program\ Files/Google/ || true
/bin/rm -Rf /c/Program\ Files/Java/ || true
/bin/rm -Rf /c/Program\ Files/Microsoft\ Service\ Fabric || true
/bin/rm -Rf /c/Program\ Files/Microsoft\ SQL\ Server || true
/bin/rm -Rf /c/Program\ Files/PostgreSQL || true
/bin/rm -Rf /c/Program\ Files/Unity\ Hub/ || true
echo $BOOST_ROOT_1_72_0
/bin/rm -Rf $BOOST_ROOT_1_72_0 || true
elif [ "$RUNNER_OS" == "Linux" ]; then
sudo apt remove -y '^ghc-8.*'
sudo apt remove -y '^dotnet-.*'
sudo apt remove -y '^llvm-.*'
sudo apt remove -y 'php.*'
sudo apt remove -y azure-cli google-cloud-sdk hhvm google-chrome-stable firefox powershell mono-devel
sudo apt autoremove -y
sudo apt clean
sudo rm -rf /usr/local/share/boost/
sudo rm -rf /usr/share/dotnet/
fi;
df -h || true
- name: Configure CMake & build (Windows)
if: runner.os == 'Windows'
shell: cmd
run: |
echo "::group::Conan Install"
conan install . --output-folder=./build --build=missing -c tools.cmake.cmaketoolchain:generator=Ninja -s compiler.cppstd=20 -s build_type=Release -o with_testing=False -o with_benchmark=False
echo "::engroup::"
echo "::group::CMake Configure"
call ./build/conanbuild.bat
cmake --preset conan-release ^
-DBUILD_PYTHON_BINDINGS:BOOL=ON -DBUILD_PYTHON_PIP_PACKAGE:BOOL=ON -DPYTHON_PIP_REPOSITORY:STRING=${{ env.PYTHON_PIP_REPOSITORY }} ^
-DPYTHON_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} ^
-DBUILD_TESTING:BOOL=OFF -DBUILD_RUBY_BINDINGS:BOOL=${{ env.BUILD_RUBY_BINDINGS }} -DBUILD_CLI:BOOL=${{ env.BUILD_CLI }}
echo "::engroup::"
echo "::group::Build"
cmake --build --preset conan-release
echo "::engroup::"
- name: Configure CMake & build (Unix)
if: runner.os != 'Windows'
shell: bash
run: |
begin_group() { echo -e "::group::\033[93m$1\033[0m"; }
set -x
begin_group "Conan install"
if [ "$RUNNER_OS" == "macOS" ]; then
# Avoid "builtin __has_nothrow_assign is deprecated; use __is_nothrow_assignable instead" in boost/1.79 with recent clang
conan install . --output-folder=./build --build=missing -c tools.cmake.cmaketoolchain:generator=Ninja -s compiler.cppstd=20 -s build_type=Release -o with_testing=False -o with_benchmark=False -c tools.build:cxxflags="['-D_LIBCPP_ENABLE_CXX17_REMOVED_UNARY_BINARY_FUNCTION']"
else
conan install . --output-folder=./build --build=missing -c tools.cmake.cmaketoolchain:generator=Ninja -s compiler.cppstd=20 -s build_type=Release -o with_testing=False -o with_benchmark=False
fi
echo -e "::endgroup::"
begin_group "CMake Configure"
cmake --preset conan-release \
-DBUILD_PYTHON_BINDINGS:BOOL=ON -DBUILD_PYTHON_PIP_PACKAGE:BOOL=ON -DPYTHON_PIP_REPOSITORY:STRING=${{ env.PYTHON_PIP_REPOSITORY }} \
-DPYTHON_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} \
-DPython_ROOT_DIR:PATH=$RUNNER_TOOL_CACHE/Python/${{ steps.setup-python.outputs.python-version }}/x64/ \
-DBUILD_TESTING:BOOL=OFF -DBUILD_RUBY_BINDINGS:BOOL=${{ env.BUILD_RUBY_BINDINGS }} -DBUILD_CLI:BOOL=${{ env.BUILD_CLI }}
echo -e "::endgroup::"
begin_group "Build"
cmake --build --preset conan-release
echo -e "::endgroup::"
# We always upload to testpypi
- name: Build python wheel
shell: bash
working-directory: ./build
run: |
cmake --build --preset conan-release --target python_package
cd Products/python_package/
python setup.py bdist_wheel
- name: Zip the wheel to maintain case sensitivy and file permissions
working-directory: ./build/Products/python_package/
shell: bash
run: |
tar -cvzf openstudio-${{ matrix.name }}.tar.gz dist/
- name: Upload .whl to artifact
uses: actions/upload-artifact@v4
with:
name: openstudio-${{ matrix.name }} # something like openstudio-Windows_py37
path: ./build/Products/python_package/openstudio-${{ matrix.name }}.tar.gz
# Only upload if all builds succeed
upload_python_bindings_to_testpypi:
needs: python_bindings
name: Upload to TestPyPi
runs-on: ubuntu-20.04
steps:
- name: Set up Python
uses: actions/setup-python@v5
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
# urllib3 2.0 breaks
pip install urllib3==1.26.15
pip install setuptools wheel twine requests packaging
mkdir wheels
- name: Download ALL wheels
uses: actions/download-artifact@v4
with:
path: ./wheels
- name: Display structure of downloaded files
working-directory: ./wheels
run: |
ls -R
- name: Extract all tar.gz, then upload
working-directory: ./wheels
env:
TWINE_USERNAME: __token__
TWINE_REPOSITORY: testpypi
# Go to repo on github > Settings > Secrets and add it
TWINE_PASSWORD: ${{ secrets.TESTPYPI_TOKEN }}
shell: bash
run: |
for f in **/*.tar.gz; do
mv $f .
done;
for f in *.tar.gz; do
tar -xzvf "$f";
done;
ls -R
python -m twine upload --skip-existing --repository testpypi dist/*
test_python_bindings:
needs: upload_python_bindings_to_testpypi
name: Test ${{ matrix.name }}
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.allow_failure }}
strategy:
# fail-fast: Default is true, switch to false to allow one platform to fail and still run others
fail-fast: false
matrix:
name: [Ubuntu, macOS, macOS_arm64, Windows_py37, Windows_py38, Windows_py39, Windows_py310, Windows_py311, Windows_py312]
include:
- name: Ubuntu
os: ubuntu-20.04
python-version: 3.11 # Voluntarily using a newer version than built with for Unix
allow_failure: false
- name: macOS
os: macos-11
python-version: 3.11
allow_failure: false
- name: macOS_arm64
os: macos-14
python-version: 3.11
allow_failure: false
- name: Windows_py37
os: windows-2019
python-version: 3.7
allow_failure: false
- name: Windows_py38
os: windows-2019
python-version: 3.8
allow_failure: false
- name: Windows_py39
os: windows-2019
python-version: 3.9
allow_failure: false
- name: Windows_py310
os: windows-2019
python-version: '3.10'
allow_failure: false
- name: Windows_py311
os: windows-2019
python-version: '3.11'
allow_failure: false
- name: Windows_py312
os: windows-2019
python-version: '3.12'
allow_failure: true # Possible this fails, don't care yet
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install openstudio bindings from testpypi
shell: bash
run: |
set -x
python -m pip install --upgrade pip
# Note: if we choose to do ALL or nothing, remove setuptools wheel twine (not needed unless trying to upload within this step)
pip install requests packaging setuptools wheel twine
if [[ "$GITHUB_REF" == *"refs/tags"* ]]; then
bindings_v=$(python ./python/module/find_pypi_tag.py --pypi)
else
bindings_v=$(python ./python/module/find_pypi_tag.py --current)
fi;
pip install -i https://test.pypi.org/simple/ openstudio==$bindings_v
mkdir wheel
- name: Test the bindings
shell: python
run: |
import openstudio
m = openstudio.model.exampleModel()
print(m)
# if python_bindings succeeds but test_python_bindings fails and this is a release,
# remove that version from testpypi
upload_python_bindings_to_pypi:
if: contains(github.ref, 'refs/tags')
needs: test_python_bindings
name: Upload to PyPi
runs-on: ubuntu-20.04
steps:
- name: Set up Python
uses: actions/setup-python@v5
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine requests packaging
mkdir wheels
- name: Download ALL wheels
uses: actions/download-artifact@v4
with:
path: ./wheels
- name: Display structure of downloaded files
working-directory: ./wheels
run: |
ls -R
- name: Extract all tar.gz, then upload
working-directory: ./wheels
env:
TWINE_USERNAME: __token__
TWINE_REPOSITORY: pypi
# Go to repo on github > Settings > Secrets and add it
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
shell: bash
run: |
for f in **/*.tar.gz; do
mv $f .
done;
for f in *.tar.gz; do
tar -xzvf "$f";
done;
ls -R
python -m twine upload --skip-existing --repository pypi dist/*