Skip to content

Commit

Permalink
Merge pull request #3 from IntelPython/transition-to-scikit-build
Browse files Browse the repository at this point in the history
Transition from numpy.distutils to scikit-build
  • Loading branch information
oleksandr-pavlyk authored Sep 24, 2024
2 parents 417010a + 6e73a4c commit 0eb645b
Show file tree
Hide file tree
Showing 34 changed files with 2,203 additions and 986 deletions.
1 change: 1 addition & 0 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
* @oleksandr-pavlyk @xaleryb @ekomarova
6 changes: 6 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
290 changes: 290 additions & 0 deletions .github/workflows/conda-package.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,290 @@
name: Conda package

on: push

env:
PACKAGE_NAME: mkl_umath
MODULE_NAME: mkl_umath

jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python: ['3.10', '3.11', '3.12']
steps:
- uses: actions/[email protected]
with:
fetch-depth: 0

- name: Set pkgs_dirs
run: |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc
- name: Cache conda packages
uses: actions/cache@v4
env:
CACHE_NUMBER: 0 # Increase to reset cache
with:
path: ~/.conda/pkgs
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Add conda to system path
run: echo $CONDA/bin >> $GITHUB_PATH

- name: Install conda-build
run: conda install conda-build

- name: Build conda package
run: |
CHANNELS="-c conda-forge -c https://software.repos.intel.com/python/conda --override-channels"
VERSIONS="--python ${{ matrix.python }}"
TEST="--no-test"
echo "CONDA_BLD=${CONDA}/conda-bld/linux-64" >> $GITHUB_ENV
conda build \
$TEST \
$VERSIONS \
$CHANNELS \
conda-recipe-cf
- name: Upload artifact
uses: actions/[email protected]
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.CONDA_BLD }}/${{ env.PACKAGE_NAME }}-*.tar.bz2

test:
needs: build
runs-on: ${{ matrix.runner }}

strategy:
matrix:
python: ['3.10', '3.11', '3.12']
experimental: [false]
runner: [ubuntu-latest]
continue-on-error: ${{ matrix.experimental }}
env:
CHANNELS: -c conda-forge -c https://software.repos.intel.com/python/conda --override-channels

steps:
- name: Download artifact
uses: actions/download-artifact@v4
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
- name: Add conda to system path
run: echo $CONDA/bin >> $GITHUB_PATH
- name: Install conda-build
run: conda install conda-build
- name: Create conda channel
run: |
mkdir -p $GITHUB_WORKSPACE/channel/linux-64
mv ${PACKAGE_NAME}-*.tar.bz2 $GITHUB_WORKSPACE/channel/linux-64
conda index $GITHUB_WORKSPACE/channel
# Test channel
conda search $PACKAGE_NAME -c $GITHUB_WORKSPACE/channel --override-channels
- name: Collect dependencies
run: |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}"
conda create -n test_mkl_umath $PACKAGE_NAME python=${{ matrix.python }} $CHANNELS --only-deps --dry-run > lockfile
- name: Display lockfile
run: cat lockfile

- name: Set pkgs_dirs
run: |
echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc
- name: Cache conda packages
uses: actions/cache@v4
env:
CACHE_NUMBER: 0 # Increase to reset cache
with:
path: ~/.conda/pkgs
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Install mkl_umath
run: |
CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}"
conda create -n test_mkl_umath python=${{ matrix.python }} $PACKAGE_NAME pytest $CHANNELS
# Test installed packages
conda list -n test_mkl_umath
- name: Run tests
run: |
source $CONDA/etc/profile.d/conda.sh
conda activate test_mkl_umath
python -c "import mkl_umath, numpy as np; mkl_umath.use_in_numpy(); np.sin(np.linspace(0, 1, num=10**6));"
build_windows:
runs-on: windows-2019

strategy:
matrix:
python: ['3.10', '3.11', '3.12']
env:
conda-bld: C:\Miniconda\conda-bld\win-64\
steps:
- uses: actions/[email protected]
with:
fetch-depth: 0

- uses: conda-incubator/setup-miniconda@v3
with:
miniforge-variant: Miniforge3
miniforge-version: latest
activate-environment: build
channels: conda-forge
python-version: ${{ matrix.python }}

- name: Cache conda packages
uses: actions/cache@v4
env:
CACHE_NUMBER: 3 # Increase to reset cache
with:
path: /home/runner/conda_pkgs_dir
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Store conda paths as envs
shell: bash -l {0}
run: |
echo "CONDA_BLD=$CONDA/conda-bld/win-64/" | tr "\\\\" '/' >> $GITHUB_ENV
- name: Install conda build
run: |
conda activate
conda install -y conda-build
conda list -n base
- name: Build conda package
run: |
conda activate
conda build --no-test --python ${{ matrix.python }} -c conda-forge -c https://software.repos.intel.com/python/conda --override-channels conda-recipe-cf
- name: Upload artifact
uses: actions/[email protected]
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.CONDA_BLD }}${{ env.PACKAGE_NAME }}-*.tar.bz2

test_windows:
needs: build_windows
runs-on: ${{ matrix.runner }}
defaults:
run:
shell: cmd /C CALL {0}
strategy:
matrix:
python: ['3.10', '3.11', '3.12']
experimental: [false]
runner: [windows-2019]
continue-on-error: ${{ matrix.experimental }}
env:
workdir: '${{ github.workspace }}'
CHANNELS: -c conda-forge -c https://software.repos.intel.com/python/conda --override-channels

steps:
- name: Download artifact
uses: actions/download-artifact@v4
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}

- uses: conda-incubator/setup-miniconda@v3
with:
auto-update-conda: true
conda-build-version: '*'
miniforge-variant: Miniforge3
miniforge-version: latest
activate-environment: mkl_umath_test
channels: conda-forge
python-version: ${{ matrix.python }}

- name: Create conda channel with the artifact bit
shell: cmd /C CALL {0}
run: |
echo ${{ env.workdir }}
mkdir ${{ env.workdir }}\channel\win-64
move ${{ env.PACKAGE_NAME }}-*.tar.bz2 ${{ env.workdir }}\channel\win-64
dir ${{ env.workdir }}\channel\win-64
- name: Index the channel
shell: cmd /C CALL {0}
run: |
conda index ${{ env.workdir }}\channel
- name: Dump mkl_umath version info from created channel to STDOUT
shell: cmd /C CALL {0}
run: |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.workdir }}/channel --override-channels --info --json
- name: Dump mkl_umath version info from created channel into ver.json
shell: cmd /C CALL {0}
run: |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.workdir }}/channel --override-channels --info --json > ${{ env.workdir }}\ver.json
- name: Output content of workdir
shell: pwsh
run: Get-ChildItem -Path ${{ env.workdir }}
- name: Output content of produced ver.json
shell: pwsh
run: Get-Content -Path ${{ env.workdir }}\ver.json
- name: Collect dependencies
shell: cmd /C CALL {0}
run: |
IF NOT EXIST ver.json (
copy /Y ${{ env.workdir }}\ver.json .
)
SET "SCRIPT=%VER_SCRIPT1% %VER_SCRIPT2%"
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO (
SET PACKAGE_VERSION=%%F
)
conda install -n mkl_umath_test ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% python=${{ matrix.python }} -c ${{ env.workdir }}/channel ${{ env.CHANNELS }} --only-deps --dry-run > lockfile
- name: Display lockfile content
shell: pwsh
run: Get-Content -Path .\lockfile
- name: Cache conda packages
uses: actions/cache@v4
env:
CACHE_NUMBER: 0 # Increase to reset cache
with:
path: /home/runner/conda_pkgs_dir
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
- name: Install mkl_umath
shell: cmd /C CALL {0}
run: |
@ECHO ON
IF NOT EXIST ver.json (
copy /Y ${{ env.workdir }}\ver.json .
)
set "SCRIPT=%VER_SCRIPT1% %VER_SCRIPT2%"
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO (
SET PACKAGE_VERSION=%%F
)
SET "TEST_DEPENDENCIES=pytest pytest-cov"
conda install -n mkl_umath_test ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% %TEST_DEPENDENCIES% python=${{ matrix.python }} -c ${{ env.workdir }}/channel ${{ env.CHANNELS }}
- name: Report content of test environment
shell: cmd /C CALL {0}
run: |
conda activate
echo "Value of CONDA enviroment variable was: " %CONDA%
echo "Value of CONDA_PREFIX enviroment variable was: " %CONDA_PREFIX%
conda info && conda list -n mkl_umath_test
- name: Run tests
shell: cmd /C CALL {0}
run: >-
conda activate mkl_umath_test && python -c "import mkl_umath, numpy as np; mkl_umath.use_in_numpy(); np.sin(np.linspace(0, 1, num=10**6));"
74 changes: 74 additions & 0 deletions .github/workflows/openssf-scorecard.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# This workflow uses actions that are not certified by GitHub. They are provided
# by a third-party and are governed by separate terms of service, privacy
# policy, and support documentation.

name: Scorecard supply-chain security
on:
# For Branch-Protection check. Only the default branch is supported. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
branch_protection_rule:
# To guarantee Maintained check is occasionally updated. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
schedule:
- cron: '28 2 * * 1'
- cron: '28 2 * * 4'
push:
branches: [ "master" ]

# Declare default permissions as read only.
permissions: read-all

jobs:
analysis:
name: Scorecard analysis
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
# Needed to upload the results to code-scanning dashboard.
security-events: write
# Needed to publish results and get a badge (see publish_results below).
id-token: write
# Uncomment the permissions below if installing in a private repository.
# contents: read
# actions: read

steps:
- name: "Checkout code"
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
persist-credentials: false

- name: "Run analysis"
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0
with:
results_file: results.sarif
results_format: sarif
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
# - you want to enable the Branch-Protection check on a *public* repository, or
# - you are installing Scorecard on a *private* repository
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
# repo_token: ${{ secrets.SCORECARD_TOKEN }}

# Public repositories:
# - Publish results to OpenSSF REST API for easy access by consumers
# - Allows the repository to include the Scorecard badge.
# - See https://github.com/ossf/scorecard-action#publishing-results.
# For private repositories:
# - `publish_results` will always be set to `false`, regardless
# of the value entered here.
publish_results: true

# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: SARIF file
path: results.sarif
retention-days: 14

# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@294a9d92911152fe08befb9ec03e240add280cb3 # v3.26.8
with:
sarif_file: results.sarif
Loading

0 comments on commit 0eb645b

Please sign in to comment.