diff --git a/.github/workflows/conda-package-cf.yml b/.github/workflows/conda-package-cf.yml new file mode 100644 index 0000000..364ac5f --- /dev/null +++ b/.github/workflows/conda-package-cf.yml @@ -0,0 +1,238 @@ +name: Conda package using conda-forge + +on: push + +permissions: read-all + +env: + PACKAGE_NAME: mkl_random + MODULE_NAME: mkl_random + TEST_ENV_NAME: test_mkl_random + VER_SCRIPT1: "import json; f = open('ver.json', 'r'); j = json.load(f); f.close(); " + VER_SCRIPT2: "d = j['mkl_random'][0]; print('='.join((d[s] for s in ('version', 'build'))))" + +jobs: + build_linux: + runs-on: ubuntu-latest + strategy: + matrix: + python: ["3.9", "3.10", "3.11", "3.12"] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set pkgs_dirs + run: | + echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc + - name: Cache conda packages + uses: actions/cache@v4 + env: + CACHE_NUMBER: 0 # Increase to reset cache + with: + path: ~/.conda/pkgs + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + + - name: Add conda to system path + run: echo $CONDA/bin >> $GITHUB_PATH + - name: Install conda-build + run: conda install conda-build + - name: Build conda package + run: | + CHANNELS="-c conda-forge --override-channels" + VERSIONS="--python ${{ matrix.python }} --numpy 2" + TEST="--no-test" + + conda build \ + $TEST \ + $VERSIONS \ + $CHANNELS \ + conda-recipe-cf + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + path: /usr/share/miniconda/conda-bld/linux-64/${{ env.PACKAGE_NAME }}-*.tar.bz2 + + build_windows: + runs-on: windows-2019 + + strategy: + matrix: + python: ["3.9", "3.10", "3.11", "3.12"] + env: + conda-bld: C:\Miniconda\conda-bld\win-64\ + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: conda-incubator/setup-miniconda@v3 + with: + auto-activate-base: true + activate-environment: "" + + - name: Cache conda packages + uses: actions/cache@v4 + env: + CACHE_NUMBER: 3 # Increase to reset cache + with: + path: /home/runner/conda_pkgs_dir + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + - name: Install conda-build + run: conda install conda-build + - name: Build conda package + run: conda build --no-test --python ${{ matrix.python }} --numpy 2 -c conda-forge --override-channels conda-recipe-cf + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + path: ${{ env.conda-bld }}${{ env.PACKAGE_NAME }}-*.tar.bz2 + + test_linux: + needs: build_linux + runs-on: ${{ matrix.runner }} + + strategy: + matrix: + python: ["3.9", "3.10", "3.11", "3.12"] + numpy: ["1.26*", "2*"] + experimental: [false] + runner: [ubuntu-latest] + continue-on-error: ${{ matrix.experimental }} + env: + CHANNELS: -c conda-forge --override-channels + + steps: + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + - name: Add conda to system path + run: echo $CONDA/bin >> $GITHUB_PATH + - name: Install conda-build + run: conda install conda-build + - name: Create conda channel + run: | + mkdir -p $GITHUB_WORKSPACE/channel/linux-64 + conda index $GITHUB_WORKSPACE/channel || exit 1 + mv ${PACKAGE_NAME}-*.tar.bz2 $GITHUB_WORKSPACE/channel/linux-64 || exit 1 + conda index $GITHUB_WORKSPACE/channel || exit 1 + # Test channel + conda search $PACKAGE_NAME -c $GITHUB_WORKSPACE/channel --override-channels --info --json > $GITHUB_WORKSPACE/ver.json + cat ver.json + - name: Collect dependencies + run: | + . $CONDA/etc/profile.d/conda.sh + CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" + export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") + conda create -n ${{ env.TEST_ENV_NAME }} $PACKAGE_NAME=${PACKAGE_VERSION} python=${{ matrix.python }} numpy=${{ matrix.numpy }} $CHANNELS --only-deps --dry-run > lockfile + cat lockfile + - name: Set pkgs_dirs + run: | + echo "pkgs_dirs: [~/.conda/pkgs]" >> ~/.condarc + - name: Cache conda packages + uses: actions/cache@v4 + env: + CACHE_NUMBER: 0 # Increase to reset cache + with: + path: ~/.conda/pkgs + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + + - name: Install mkl_random + run: | + . $CONDA/etc/profile.d/conda.sh + CHANNELS="-c $GITHUB_WORKSPACE/channel ${{ env.CHANNELS }}" + export PACKAGE_VERSION=$(python -c "${VER_SCRIPT1} ${VER_SCRIPT2}") + conda create -n ${{ env.TEST_ENV_NAME }} $PACKAGE_NAME=${PACKAGE_VERSION} pytest python=${{ matrix.python }} numpy=${{ matrix.numpy }} $CHANNELS + # Test installed packages + conda list + - name: Run tests + run: | + . $CONDA/etc/profile.d/conda.sh + conda activate ${{ env.TEST_ENV_NAME }} + pytest -vv --pyargs ${{ env.MODULE_NAME }} + + test_windows: + needs: build_windows + runs-on: ${{ matrix.runner }} + + strategy: + matrix: + python: ["3.9", "3.10", "3.11", "3.12"] + numpy: ["1.26*", "2*"] + experimental: [false] + runner: [windows-2019] + continue-on-error: ${{ matrix.experimental }} + env: + CHANNELS: -c conda-forge --override-channels + + steps: + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + - uses: conda-incubator/setup-miniconda@v3 + with: + auto-activate-base: true + activate-environment: "" + - name: Install conda-build + # Needed to be able to run conda index + run: conda install conda-build + - name: Create conda channel + run: | + mkdir ${{ env.GITHUB_WORKSPACE }}\channel\win-64 + move ${{ env.PACKAGE_NAME }}-*.tar.bz2 ${{ env.GITHUB_WORKSPACE }}\channel\win-64 + conda index ${{ env.GITHUB_WORKSPACE }}/channel + # Test channel + conda search ${{ env.PACKAGE_NAME }} -c ${{ env.GITHUB_WORKSPACE }}/channel --override-channels --info --json > ${{ env.GITHUB_WORKSPACE }}\ver.json + more ${{ env.GITHUB_WORKSPACE }}\ver.json + - name: Collect dependencies + shell: cmd + run: | + @ECHO ON + copy /Y ${{ env.GITHUB_WORKSPACE }}\ver.json . + set "SCRIPT=%VER_SCRIPT1% %VER_SCRIPT2%" + FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( + SET PACKAGE_VERSION=%%F + ) + conda create -n ${{ env.TEST_ENV_NAME }} ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% python=${{ matrix.python }} numpy=${{ matrix.numpy }} -c ${{ env.GITHUB_WORKSPACE }}/channel ${{ env.CHANNELS }} --only-deps --dry-run > lockfile + more lockfile + - name: Cache conda packages + uses: actions/cache@v4 + env: + CACHE_NUMBER: 3 # Increase to reset cache + with: + path: /home/runner/conda_pkgs_dir + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + - name: Install mkl_random + shell: cmd + run: | + @ECHO ON + copy /Y ${{ env.GITHUB_WORKSPACE }}\ver.json . + set "SCRIPT=%VER_SCRIPT1% %VER_SCRIPT2%" + FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( + SET PACKAGE_VERSION=%%F + ) + conda create -n ${{ env.TEST_ENV_NAME }} ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% pytest python=${{ matrix.python }} numpy=${{ matrix.numpy }} -c ${{ env.GITHUB_WORKSPACE }}/channel ${{ env.CHANNELS }} + # Test installed packages + conda list + - name: Run tests + run: | + conda activate -n ${{ env.TEST_ENV_NAME }} + pytest -v --pyargs ${{ env.MODULE_NAME }} diff --git a/conda-recipe-cf/bld.bat b/conda-recipe-cf/bld.bat new file mode 100644 index 0000000..1b04207 --- /dev/null +++ b/conda-recipe-cf/bld.bat @@ -0,0 +1,5 @@ +@rem Remember to source the compiler + +set MKLROOT=%CONDA_PREFIX% +%PYTHON% setup.py install +if errorlevel 1 exit 1 diff --git a/conda-recipe-cf/build.sh b/conda-recipe-cf/build.sh new file mode 100644 index 0000000..34241cc --- /dev/null +++ b/conda-recipe-cf/build.sh @@ -0,0 +1,5 @@ +#!/bin/bash -x + +export CFLAGS="-I$PREFIX/include $CFLAGS" +export MKLROOT=$CONDA_PREFIX +$PYTHON setup.py install diff --git a/conda-recipe-cf/meta.yaml b/conda-recipe-cf/meta.yaml new file mode 100644 index 0000000..3a4ddcb --- /dev/null +++ b/conda-recipe-cf/meta.yaml @@ -0,0 +1,48 @@ +{% set version = "1.2.6" %} +{% set buildnumber = 0 %} + +package: + name: mkl_random + version: {{ version }} + +source: + path: .. + +build: + number: {{buildnumber}} + ignore_run_exports: + - blas + +requirements: + build: + - {{ compiler('c') }} + - {{ compiler('cxx') }} + host: + - python + - setuptools + - mkl-devel + - cython + - numpy + - pip + run: + - python + - numpy + - {{ pin_compatible('mkl', min_pin="x.x", max_pin="x") }} + +test: + commands: + - pytest --pyargs mkl_random + requires: + - pytest + - mkl-service + - numpy + imports: + - mkl_random + - mkl_random.mklrand + +about: + home: http://github.com/IntelPython/mkl_random + license: BSD-3-Clause + license_family: BSD + license_file: LICENSE.txt + summary: NumPy-based implementation of random number generation sampling using Intel (R) Math Kernel Library, mirroring numpy.random, but exposing all choices of sampling algorithms available in MKL. diff --git a/mkl_random/mklrand.pyx b/mkl_random/mklrand.pyx index b307899..66459db 100644 --- a/mkl_random/mklrand.pyx +++ b/mkl_random/mklrand.pyx @@ -450,25 +450,6 @@ cdef object vec_cont3_array(irk_state *state, irk_cont3_vec func, object size, return arr_obj -cdef object vec_disc0_array(irk_state *state, irk_disc0_vec func, object size, - object lock): - cdef int *array_data - cdef int res - cdef cnp.ndarray array "arrayObject" - cdef cnp.npy_intp length - cdef cnp.npy_intp i - - if size is None: - func(state, 1, &res) - return res - else: - array = np.empty(size, np.int32) - length = cnp.PyArray_SIZE(array) - array_data = cnp.PyArray_DATA(array) - with lock, nogil: - func(state, length, array_data) - - return array cdef object vec_long_disc0_array( irk_state *state, irk_disc0_vec_long func, @@ -483,14 +464,13 @@ cdef object vec_long_disc0_array( if size is None: func(state, 1, &res) return res - else: - array = np.empty(size, np.uint) - length = cnp.PyArray_SIZE(array) - array_data = cnp.PyArray_DATA(array) - with lock, nogil: - func(state, length, array_data) + array = np.empty(size, np.dtype("long")) + length = cnp.PyArray_SIZE(array) + array_data = cnp.PyArray_DATA(array) + with lock, nogil: + func(state, length, array_data) - return array + return array cdef object vec_discnp_array_sc( @@ -507,7 +487,7 @@ cdef object vec_discnp_array_sc( func(state, 1, &res, n, p) return res else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) length = cnp.PyArray_SIZE(array) array_data = cnp.PyArray_DATA(array) with lock, nogil: @@ -544,7 +524,7 @@ cdef object vec_discnp_array(irk_state *state, irk_discnp_vec func, object size, cnp.PyArray_MultiIter_NEXT(multi) arr_obj = array else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) array_data = cnp.PyArray_DATA(array) multi = cnp.PyArray_MultiIterNew(3, array, on, op) res_size = cnp.PyArray_SIZE(array) @@ -585,7 +565,7 @@ cdef object vec_discdd_array_sc(irk_state *state, irk_discdd_vec func, object si func(state, 1, &res, n, p) return res else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) length = cnp.PyArray_SIZE(array) array_data = cnp.PyArray_DATA(array) with lock, nogil: @@ -619,7 +599,7 @@ cdef object vec_discdd_array(irk_state *state, irk_discdd_vec func, object size, cnp.PyArray_MultiIter_NEXT(multi) arr_obj = array else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) array_data = cnp.PyArray_DATA(array) res_size = cnp.PyArray_SIZE(array) multi = cnp.PyArray_MultiIterNew(3, array, on, op) @@ -660,7 +640,7 @@ cdef object vec_discnmN_array_sc(irk_state *state, irk_discnmN_vec func, object func(state, 1, &res, n, m, N) return res else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) length = cnp.PyArray_SIZE(array) array_data = cnp.PyArray_DATA(array) with lock, nogil: @@ -696,7 +676,7 @@ cdef object vec_discnmN_array(irk_state *state, irk_discnmN_vec func, object siz cnp.PyArray_MultiIter_NEXT(multi) arr_obj = array else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) array_data = cnp.PyArray_DATA(array) multi = cnp.PyArray_MultiIterNew(4, array, on, om, oN) @@ -738,7 +718,7 @@ cdef object vec_discd_array_sc(irk_state *state, irk_discd_vec func, object size func(state, 1, &res, a) return res else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) length = cnp.PyArray_SIZE(array) array_data = cnp.PyArray_DATA(array) with lock, nogil: @@ -758,7 +738,7 @@ cdef object vec_long_discd_array_sc(irk_state *state, irk_discd_long_vec func, o func(state, 1, &res, a) return res else: - array = np.empty(size, int) + array = np.empty(size, np.dtype("long")) length = cnp.PyArray_SIZE(array) array_data = cnp.PyArray_DATA(array) with lock, nogil: @@ -779,7 +759,7 @@ cdef object vec_discd_array(irk_state *state, irk_discd_vec func, object size, c if size is None: array = cnp.PyArray_SimpleNew(cnp.PyArray_NDIM(oa), - cnp.PyArray_DIMS(oa), cnp.NPY_INT) + cnp.PyArray_DIMS(oa), cnp.NPY_INT32) length = cnp.PyArray_SIZE(array) array_data = cnp.PyArray_DATA(array) itera = cnp.PyArray_IterNew(oa) @@ -789,7 +769,7 @@ cdef object vec_discd_array(irk_state *state, irk_discd_vec func, object size, c cnp.PyArray_ITER_NEXT(itera) arr_obj = array else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) array_data = cnp.PyArray_DATA(array) multi = cnp.PyArray_MultiIterNew(2, array, oa) res_size = cnp.PyArray_SIZE(array) @@ -832,7 +812,7 @@ cdef object vec_long_discd_array(irk_state *state, irk_discd_long_vec func, obje cnp.PyArray_ITER_NEXT(itera) arr_obj = array else: - array = np.empty(size, int) + array = np.empty(size, np.dtype("long")) array_data = cnp.PyArray_DATA(array) multi = cnp.PyArray_MultiIterNew(2, array, oa) res_size = cnp.PyArray_SIZE(array) @@ -873,7 +853,7 @@ cdef object vec_Poisson_array(irk_state *state, irk_discdptr_vec func1, irk_disc func1(state, length, array_data, oa_data) arr_obj = array else: - array = np.empty(size, np.int32) + array = np.empty(size, np.intc) array_data = cnp.PyArray_DATA(array) multi = cnp.PyArray_MultiIterNew(2, array, olambda) res_size = cnp.PyArray_SIZE(array) @@ -1396,10 +1376,8 @@ cdef class RandomState: """ tomaxint(size=None) - Random integers between 0 and ``sys.maxint``, inclusive. - Return a sample of uniformly distributed random integers in the interval - [0, ``sys.maxint``]. + [0, ``np.iinfo("long").max``]. Parameters ---------- diff --git a/mkl_random/tests/test_random.py b/mkl_random/tests/test_random.py index 1236fff..abb311a 100644 --- a/mkl_random/tests/test_random.py +++ b/mkl_random/tests/test_random.py @@ -95,7 +95,7 @@ def test_non_deterministic_brng(): def test_binomial_n_zero(): - zeros = np.zeros(2, dtype='int') + zeros = np.zeros(2, dtype='int32') for p in [0, .5, 1]: assert rnd.binomial(0, p) == 0 actual = rnd.binomial(zeros, p)