Skip to content
This repository has been archived by the owner on Feb 26, 2025. It is now read-only.

Updating copyright year: 2024 (#488) #1264

Updating copyright year: 2024 (#488)

Updating copyright year: 2024 (#488) #1264

name: Publish sdist and wheels for macos, manylinux, and windows, publish to pypi if a release
on: [pull_request, push]
env:
apt_options: -o Acquire::Retries=3
CIBW_BUILD_VERBOSITY: 3
CIBW_BUILD: 'cp*'
CIBW_SKIP: 'cp35-* cp36-* cp37-* *-manylinux_i686 *-musllinux_* *-win32'
CIBW_BEFORE_TEST: pip install -r {project}/tests/requirement_tests.txt
CIBW_TEST_COMMAND: pytest -s -v {project}/tests
UNIXY_AEC_VERSION: 1.0.4
UNIXY_HDF5_VERSION: 1.14.2
WINDOWS_HDF5: 1.14.2
jobs:
build_wheels:
name: Build wheels on ${{ matrix.os }}
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04, macos-11, windows-2022]
steps:
- uses: actions/checkout@v3
with:
submodules: 'true'
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install cibuildwheel
run: |
python -m pip install cibuildwheel
- name: Cache HDF5 On Linux/macOS
if: runner.os == 'Linux' || runner.os == 'macOS'
uses: actions/cache@v3
id: cache-hdf5-posix
env:
cache-name: cache-hdf5-posix
with:
path: src-cache/
key: ${{ runner.os }}-build-${{ env.cache-name }}
- name: Build wheels on Linux
if: runner.os == 'Linux'
env:
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_BEFORE_ALL: |
yum -y install zlib-devel
# CMake complains if the dependencies come from within the same tree
# as the source, so we'll just pretend they are elsewhere
mkdir -p $PWD/src-cache
ln -s $PWD/src-cache /opt/src-cache
bash ci/hdf5-build.sh /opt/src-cache
run: |
export HDF5_ROOT=/opt/src-cache/install-
export CIBW_ENVIRONMENT_PASS=HDF5_ROOT
python -m cibuildwheel --output-dir dist
- name: Build wheels Mac OS
if: runner.os == 'macOS'
env:
CIBW_ENVIRONMENT_PASS: "HDF5_ROOT CMAKE_OSX_ARCHITECTURES MACOSX_DEPLOYMENT_TARGET"
CIBW_BEFORE_BUILD: |
# CMake complains if the dependencies come from within the same tree
# as the source, so we'll just pretend they are elsewhere
mkdir -p $PWD/src-cache
ln -s $PWD/src-cache /Users/runner/work/src-cache
bash ci/hdf5-build.sh /Users/runner/work/src-cache
run: |
# x86_64 macOS allows for cross compilation; first we do arm64,
# only for the 11.0 target; and store in the cache the compiled code...
export CIBW_ARCHS_MACOS="arm64"
export CMAKE_OSX_ARCHITECTURES="arm64"
export MACOSX_DEPLOYMENT_TARGET="11.0"
export HDF5_ROOT=/Users/runner/work/src-cache/install-$CIBW_ARCHS_MACOS
python -m cibuildwheel --output-dir dist
# ...and now we do both targets for x86_64
export CIBW_ARCHS_MACOS="x86_64"
export CMAKE_OSX_ARCHITECTURES="x86_64"
unset MACOSX_DEPLOYMENT_TARGET
export HDF5_ROOT=/Users/runner/work/src-cache/install-$CIBW_ARCHS_MACOS
python -m cibuildwheel --output-dir dist
- name: Cache HDF5 On Windows
if: runner.os == 'Windows'
id: cache-hdf5-windows
uses: actions/cache@v3
env:
cache-name: cache-hdf5-windows
with:
path: C:\cache\hdf5\${{ env.WINDOWS_HDF5 }}
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.WINDOWS_HDF5 }}
- name: Fill Windows Cache
if: runner.os == 'Windows' && steps.cache-hdf5-windows.outputs.cache-hit != 'true'
continue-on-error: false
shell: cmd
run: |
mkdir C:\cache\hdf5\${{ env.WINDOWS_HDF5 }}
curl -L -o C:\cache\hdf5\${{ env.WINDOWS_HDF5 }}\hdf5.zip https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.14/hdf5-1.14.2/bin/windows/hdf5-1.14.2-Std-win10_64-vs16.zip
- name: Build wheels Windows
if: runner.os == 'Windows'
shell: cmd
# the v140 toolchain is used, so that fewer vc_redist installs are required
# some of the information is available here:
# https://docs.microsoft.com/en-us/cpp/build/building-on-the-command-line
# but tracking down the 14.0 magic number (and especially newer ones isn't always easy)
# this has a partial table:
# https://devblogs.microsoft.com/cppblog/side-by-side-minor-version-msvc-toolsets-in-visual-studio-2017/
run: |
C:\windows\system32\tar.exe xf C:\cache\hdf5\${{ env.WINDOWS_HDF5 }}\hdf5.zip
start /wait msiexec /a "%cd%\hdf\HDF5-${{ env.WINDOWS_HDF5 }}-win64.msi" /qn TARGETDIR="c:\hdf5\"
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat" -vcvars_ver=14.1
set HDF5_DIR=C:\hdf5\HDF_Group\HDF5\${{ env.WINDOWS_HDF5 }}\cmake
python -m cibuildwheel --output-dir dist
- name: Store wheel as artifact
uses: actions/upload-artifact@v3
with:
name: dist
path: dist/*.whl
build_sdist:
name: Build sdist
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: 'true'
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Build a source tarball
run:
python setup.py sdist
- name: Test tarball
run: |
sudo apt-get ${{env.apt_options}} update -y
sudo apt-get ${{env.apt_options}} install -y libhdf5-dev
pip install -r tests/requirement_tests.txt
pip install dist/*
pytest -s -v tests
- name: Store sdist as artifact
uses: actions/upload-artifact@v3
with:
name: dist
path: dist/*.tar.gz
upload_artifacts:
name: Upload wheels to PyPI
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
runs-on: ubuntu-latest
needs: [build_wheels, build_sdist]
steps:
- name: Download artifacts produced during the build_wheels and build_sdist jobs
uses: actions/download-artifact@v3
with:
name: dist
path: dist/
- name: Display structure of downloaded files
run: ls -R
working-directory: dist
- name: Publish package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_PASSWORD }}
packages_dir: dist/