Skip to content
This repository was archived by the owner on Jan 5, 2024. It is now read-only.

Commit 3246392

Browse files
committed
First commit
0 parents  commit 3246392

20 files changed

+10822
-0
lines changed

.github/workflows/build-wheels.sh

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#!/bin/bash
2+
set -e -x
3+
4+
# Install a system package required by our library
5+
yum install -y atlas-devel
6+
7+
# Install requirements
8+
PYBIN=/opt/python/${PYABI}/bin
9+
REQ_FILE=/io/requirements-wheel.txt
10+
"${PYBIN}/pip" install -r $REQ_FILE
11+
12+
# Compile wheels
13+
cd /io
14+
"${PYBIN}/python" setup.py bdist_wheel
15+
16+
17+
# Bundle external shared libraries into the wheels
18+
for whl in dist/*.whl; do
19+
auditwheel repair "$whl" -w dist/
20+
done
21+
22+
rm dist/*-linux*.whl

.github/workflows/cd.yml

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
name: CD
2+
3+
on:
4+
push:
5+
tags:
6+
- '*'
7+
8+
jobs:
9+
build:
10+
runs-on: ${{ matrix.os }}
11+
strategy:
12+
fail-fast: false
13+
matrix:
14+
os: [ubuntu-latest, macOS-latest, windows-latest]
15+
python-version: [3.6, 3.7]
16+
include:
17+
- { os: ubuntu-latest, python-version: 3.6, python-abis: "cp36-cp36m" }
18+
- { os: ubuntu-latest, python-version: 3.7, python-abis: "cp37-cp37m" }
19+
- { os: windows-latest, python-version: 3.7, build-static: 1 }
20+
21+
steps:
22+
- name: Check out code
23+
uses: actions/checkout@v2
24+
25+
- name: Set up conda ${{ matrix.python-version }}
26+
env:
27+
PYTHON: ${{ matrix.python-version }}
28+
shell: bash
29+
run: |
30+
source ./.github/workflows/install-conda.sh
31+
python -m pip install --upgrade pip setuptools wheel coverage;
32+
33+
- name: Deploy packages
34+
if: startsWith(github.ref, 'refs/tags/') && matrix.no-deploy != '1'
35+
shell: bash
36+
env:
37+
DOCKER_IMAGE: "quay.io/pypa/manylinux1_x86_64"
38+
PYABI: ${{ matrix.python-abis }}
39+
BUILD_STATIC: ${{ matrix.build-static }}
40+
PYPI_PWD: ${{ secrets.PYPI_PASSWORD }}
41+
run: |
42+
source ./.github/workflows/reload-env.sh
43+
source ./.github/workflows/upload-packages.sh

.github/workflows/ci.yml

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
name: CI
2+
3+
on: [push, pull_request]
4+
5+
jobs:
6+
build:
7+
runs-on: ${{ matrix.os }}
8+
strategy:
9+
fail-fast: false
10+
matrix:
11+
os: [ubuntu-latest, macOS-latest, windows-latest]
12+
python-version: [3.6, 3.7]
13+
14+
steps:
15+
- name: Check out code
16+
uses: actions/checkout@v2
17+
with:
18+
fetch-depth: 2
19+
20+
- name: Set up conda ${{ matrix.python-version }}
21+
env:
22+
PYTHON: ${{ matrix.python-version }}
23+
shell: bash
24+
run: |
25+
source ./.github/workflows/install-conda.sh
26+
python -m pip install --upgrade pip setuptools wheel coverage;
27+
28+
- name: Build extensions
29+
shell: bash
30+
run: |
31+
source ./.github/workflows/reload-env.sh
32+
python setup.py build_ext -i
33+
34+
- name: Test with unittest
35+
env:
36+
WITH_HADOOP: ${{ matrix.with-hadoop }}
37+
WITH_KUBERNETES: ${{ matrix.with-kubernetes }}
38+
WITH_CYTHON: ${{ matrix.with-cython }}
39+
NO_COMMON_TESTS: ${{ matrix.no-common-tests }}
40+
NUMPY_EXPERIMENTAL_ARRAY_FUNCTION: 1
41+
CHANGE_MINIKUBE_NONE_USER: true
42+
shell: bash
43+
run: |
44+
source ./.github/workflows/reload-env.sh
45+
python -m unittest shared_memory.tests.test_shared_memory

.github/workflows/install-conda.sh

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
#!/bin/bash
2+
PYTHON=$(cut -d '-' -f 1 <<< "$PYTHON")
3+
UNAME="$(uname | awk '{print tolower($0)}')"
4+
FILE_EXT="sh"
5+
if [[ "$UNAME" == "darwin" ]]; then
6+
set -e
7+
ulimit -n 1024
8+
CONDA_OS="MacOSX"
9+
elif [[ $UNAME == "linux" ]]; then
10+
sudo apt-get install -y liblz4-dev
11+
CONDA_OS="Linux"
12+
elif [[ $UNAME == "mingw"* ]] || [[ $UNAME == "msys"* ]]; then
13+
CONDA_OS="Windows"
14+
FILE_EXT="exe"
15+
fi
16+
17+
CONDA_FILE="Miniconda3-latest-${CONDA_OS}-x86_64.${FILE_EXT}"
18+
19+
TEST_PACKAGES="virtualenv psutil pyyaml"
20+
21+
if [[ "$FILE_EXT" == "sh" ]]; then
22+
curl -L -o "miniconda.${FILE_EXT}" https://repo.continuum.io/miniconda/$CONDA_FILE
23+
bash miniconda.sh -b -p $HOME/miniconda && rm -f miniconda.*
24+
CONDA_BIN_PATH=$HOME/miniconda/bin
25+
TEST_PACKAGES="$TEST_PACKAGES nomkl libopenblas"
26+
export PATH="$HOME/miniconda/envs/test/bin:$HOME/miniconda/bin:$PATH"
27+
else
28+
CONDA=$(echo "/$CONDA" | sed -e 's/\\/\//g' -e 's/://')
29+
echo "Using installed conda at $CONDA"
30+
CONDA_BIN_PATH=$CONDA/Scripts
31+
export PATH="$CONDA/envs/test/Scripts:$CONDA/envs/test:$CONDA/Scripts:$CONDA:$PATH"
32+
fi
33+
$CONDA_BIN_PATH/conda create --quiet --yes -n test python=$PYTHON $TEST_PACKAGES
34+
35+
#check python version
36+
export PYTHON=$(python -c "import sys; print('.'.join(str(v) for v in sys.version_info[:3]))")
37+
echo "Installed Python version: $PYTHON"

.github/workflows/reload-env.sh

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
#!/bin/bash
2+
3+
export UNAME="$(uname | awk '{print tolower($0)}')"
4+
export PYTEST_CONFIG="--log-level=DEBUG --cov-report= --cov=mars --timeout=1500 -W ignore::PendingDeprecationWarning"
5+
6+
if [[ "$GITHUB_REF" =~ ^"refs/tags/" ]]; then
7+
export GITHUB_TAG_REF="$GITHUB_REF"
8+
unset CYTHON_TRACE
9+
export GIT_TAG=$(echo "$GITHUB_REF" | sed -e "s/refs\/tags\///g")
10+
fi
11+
12+
if [[ $UNAME == "mingw"* ]] || [[ $UNAME == "msys"* ]]; then
13+
export UNAME="windows"
14+
CONDA=$(echo "/$CONDA" | sed -e 's/\\/\//g' -e 's/://')
15+
export PATH="$CONDA/Library:$CONDA/Library/bin:$CONDA/Scripts:$CONDA:$PATH"
16+
export PATH="$CONDA/envs/test/Library:$CONDA/envs/test/Library/bin:$CONDA/envs/test/Scripts:$CONDA/envs/test:$PATH"
17+
else
18+
export CONDA="$HOME/miniconda"
19+
export PATH="$HOME/miniconda/envs/test/bin:$HOME/miniconda/bin:$PATH"
20+
fi
21+
22+
export PYTHON=$(python -c "import sys; print('.'.join(str(v) for v in sys.version_info[:3]))")
23+
24+
function retry {
25+
retrial=5
26+
if [ $1 == "-n" ]; then
27+
retrial=$2
28+
shift; shift
29+
fi
30+
r=0
31+
while true; do
32+
r=$((r+1))
33+
if [ "$r" -ge $retrial ]; then
34+
$@
35+
return $?
36+
else
37+
$@ && break || true
38+
sleep 1
39+
fi
40+
done
41+
}
42+
alias pip="retry pip"
43+
shopt -s expand_aliases
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
#!/bin/bash
2+
set -e
3+
4+
if [ -z "$GITHUB_TAG_REF" ]; then
5+
echo "Not on a tag, won't deploy to pypi"
6+
elif [ -n "$NO_DEPLOY" ]; then
7+
echo "Not on a build config, won't deploy to pypi"
8+
else
9+
git clean -f -x
10+
source activate test
11+
12+
if [ "$UNAME" = "linux" ]; then
13+
conda create --quiet --yes -n wheel python=$PYTHON
14+
conda activate wheel
15+
16+
docker pull $DOCKER_IMAGE
17+
pyabis=$(echo $PYABI | tr ":" "\n")
18+
for abi in $pyabis; do
19+
git clean -f -x
20+
docker run --rm -e "PYABI=$abi" -e "GIT_TAG=$GIT_TAG" -v `pwd`:/io \
21+
$DOCKER_IMAGE $PRE_CMD /io/.github/workflows/build-wheels.sh
22+
sudo chown -R $(id -u):$(id -g) ./*
23+
mv dist/*.whl /tmp
24+
done
25+
mv /tmp/*.whl dist/
26+
27+
conda activate test
28+
else
29+
conda create --quiet --yes -n wheel python=$PYTHON
30+
conda activate wheel
31+
32+
pip install -r requirements-wheel.txt
33+
pip wheel --no-deps .
34+
35+
conda activate test
36+
37+
mkdir -p dist
38+
cp *.whl dist/
39+
40+
if [[ "$UNAME" == "darwin" ]]; then
41+
pip install delocate
42+
delocate-wheel dist/*.whl
43+
delocate-addplat --rm-orig -x 10_9 -x 10_10 dist/*.whl
44+
fi
45+
fi
46+
47+
if [ -n "$BUILD_STATIC" ]; then
48+
python setup.py sdist --formats=gztar
49+
fi
50+
51+
echo ""
52+
echo "Generated files:"
53+
ls dist/
54+
echo ""
55+
56+
if [[ "$GITHUB_REPOSITORY" == "mars-project/shared_memory38" ]]; then
57+
PYPI_REPO="https://upload.pypi.org/legacy/"
58+
else
59+
PYPI_REPO="https://test.pypi.org/legacy/"
60+
fi
61+
62+
echo "[distutils]" > ~/.pypirc
63+
echo "index-servers =" >> ~/.pypirc
64+
echo " pypi" >> ~/.pypirc
65+
echo "[pypi]" >> ~/.pypirc
66+
echo "repository=$PYPI_REPO" >> ~/.pypirc
67+
echo "username=pyodps" >> ~/.pypirc
68+
echo "password=$PYPI_PWD" >> ~/.pypirc
69+
70+
python -m pip install twine
71+
python -m twine upload -r pypi --skip-existing dist/*
72+
fi

.gitignore

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
*.py[cod]
2+
3+
# C extensions
4+
*.so
5+
6+
# sqlite3 db files
7+
*.db
8+
9+
# Packages
10+
*.egg
11+
*.egg-info
12+
dist
13+
build/
14+
eggs
15+
parts
16+
var
17+
sdist
18+
develop-eggs
19+
.installed.cfg
20+
lib64
21+
__pycache__
22+
23+
# Unit test / coverage reports
24+
.coverage
25+
.coverage.*
26+
htmlcov
27+
.tox
28+
nosetests.xml
29+
.cache
30+
.pytest*
31+
.dist-coverage
32+
test.conf
33+
34+
# IDEs
35+
.idea
36+
.vscode
37+
*.iml
38+
39+
# Generated files
40+
clinic/*.h

MAINFEST.in

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
global-exclude *.c*
2+
global-exclude clinic

README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
Backport of `multiprocessing.shared_memory` for Python 3.6 and 3.7. Simply import all things from `shared_memory` to make your code work.
2+
3+
Note that `multiprocessing.managers.SharedMemoryManager` is also included under `shared_memory` package.

setup.py

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
import os
2+
import sys
3+
import subprocess
4+
from setuptools import setup, find_packages, Extension
5+
6+
if sys.version_info[:2] == (3, 6):
7+
clinic_file = "tools/py36_clinic.py"
8+
elif sys.version_info[:2] == (3, 7):
9+
clinic_file = "tools/py37_clinic.py"
10+
else:
11+
raise ValueError("Must run on Python 3.6 or 3.7")
12+
13+
if sys.platform != 'win32':
14+
tool_env = os.environ.copy()
15+
tool_env['PYTHONPATH'] = f'{os.getcwd()}/tools:' + tool_env.get('PYTHONPATH', '')
16+
subprocess.run([sys.executable, clinic_file, "shared_memory/posixshmem.c"],
17+
env=tool_env)
18+
19+
posix_shm_mod = Extension(
20+
"shared_memory._posixshmem",
21+
define_macros=[
22+
("HAVE_SHM_OPEN", "1"),
23+
("HAVE_SHM_UNLINK", "1"),
24+
("HAVE_SHM_MMAN_H", 1),
25+
],
26+
libraries=["rt"] if sys.platform == 'linux' else [],
27+
sources=["shared_memory/posixshmem.c"],
28+
)
29+
30+
setup(
31+
name="shared_memory38",
32+
version="0.1.0",
33+
description="Backport of multiprocessing.shared_memory in Python 3.8",
34+
classifiers=[
35+
'Operating System :: OS Independent',
36+
'Programming Language :: Python',
37+
'Programming Language :: Python :: 3.6',
38+
'Programming Language :: Python :: 3.7',
39+
'Programming Language :: Python :: Implementation :: CPython',
40+
'Topic :: Software Development :: Libraries',
41+
],
42+
url="https://github.com/mars-project/shared_memory38",
43+
packages=find_packages(exclude=('*.tests.*', '*.tests')),
44+
ext_modules=[posix_shm_mod] if sys.platform != 'win32' else [],
45+
)

0 commit comments

Comments
 (0)