Skip to content

Commit f4c3ce4

Browse files
Merge branch 'main' into master
2 parents 9651c10 + 8a3a958 commit f4c3ce4

15 files changed

+1105
-109
lines changed

.github/PULL_REQUEST_TEMPLATE/new_container.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ You have implemented a new container and would like to contribute it? Great! Her
22

33
- [ ] Create a new feature directory and populate it with the package structure [described in the documentation](https://testcontainers-python.readthedocs.io/en/latest/#package-structure). Copying one of the existing features is likely the best way to get started.
44
- [ ] Implement the new feature (typically in `__init__.py`) and corresponding tests.
5-
- [ ] Add a line `-e file:[feature name]` to `requirements.in` and run `make requirements`. This command will find any new requirements and generate lock files to ensure reproducible builds (see the [pip-tools documentation](https://pip-tools.readthedocs.io/en/latest/) for details). Then run `pip install -r requirements/[your python version].txt` to install the new requirements.
65
- [ ] Update the feature `README.rst` and add it to the table of contents (`toctree` directive) in the top-level `README.rst`.
7-
- [ ] Add a line `[feature name]` to the list of components in the GitHub Action workflow in `.github/workflows/main.yml` to run tests, build, and publish your package when pushed to the `master` branch.
8-
- [ ] Rebase your development branch on `master` (or merge `master` into your development branch).
6+
- [ ] Add a line `[feature name]` to the list of components in the GitHub Action workflow in `.github/workflows/main.yml` to run tests, build, and publish your package when pushed to the `main` branch.
7+
- [ ] Rebase your development branch on `main` (or merge `main` into your development branch).
8+
- [ ] Add a line `-e file:[feature name]` to `requirements.in` and open a pull request. Opening a pull request will automatically generate lock files to ensure reproducible builds (see the [pip-tools documentation](https://pip-tools.readthedocs.io/en/latest/) for details). Finally, run `python get_requirements.py --pr=[your PR number]` to fetch the updated requirement files (the build needs to have succeeded).

.github/workflows/docs.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
name: testcontainers documentation
22
on:
33
push:
4-
branches: [master]
4+
branches: [main]
55
pull_request:
6-
branches: [master]
6+
branches: [main]
77

88
jobs:
99
build:
@@ -15,10 +15,10 @@ jobs:
1515
with:
1616
python-version: "3.10"
1717
cache: pip
18-
cache-dependency-path: requirements/3.10.txt
18+
cache-dependency-path: requirements/ubuntu-latest-3.10.txt
1919
- name: Install Python dependencies
2020
run: |
2121
pip install --upgrade pip
22-
pip install -r requirements/3.10.txt
22+
pip install -r requirements/ubuntu-latest-3.10.txt
2323
- name: Build documentation
2424
run: make docs

.github/workflows/main.yml

Lines changed: 44 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,57 +1,62 @@
11
name: testcontainers packages
22
on:
33
push:
4-
branches: [master]
4+
branches: [main]
55
pull_request:
6-
branches: [master]
6+
branches: [main]
77

88
jobs:
99
build:
1010
strategy:
1111
matrix:
12-
python-version:
13-
- "3.7"
14-
- "3.8"
15-
- "3.9"
16-
- "3.10"
17-
- "3.11"
12+
runtime:
13+
- machine: ubuntu-latest
14+
python-version: "3.7"
15+
- machine: ubuntu-latest
16+
python-version: "3.8"
17+
- machine: ubuntu-latest
18+
python-version: "3.9"
19+
- machine: ubuntu-latest
20+
python-version: "3.10"
21+
- machine: ubuntu-latest
22+
python-version: "3.11"
1823
component:
19-
- arangodb
20-
- azurite
21-
- clickhouse
22-
- compose
23-
- core
24-
- elasticsearch
25-
- google
26-
- kafka
27-
- keycloak
28-
- localstack
29-
- meta
30-
- minio
31-
- mongodb
32-
- mssql
33-
- mysql
34-
- neo4j
35-
- nginx
36-
- opensearch
37-
- oracle
38-
- postgres
39-
- rabbitmq
40-
- redis
41-
- selenium
42-
runs-on: ubuntu-latest
24+
- arangodb
25+
- azurite
26+
- clickhouse
27+
- compose
28+
- core
29+
- elasticsearch
30+
- google
31+
- kafka
32+
- keycloak
33+
- localstack
34+
- meta
35+
- minio
36+
- mongodb
37+
- mssql
38+
- mysql
39+
- neo4j
40+
- nginx
41+
- opensearch
42+
- oracle
43+
- postgres
44+
- rabbitmq
45+
- redis
46+
- selenium
47+
runs-on: ${{ matrix.runtime.machine }}
4348
steps:
4449
- uses: actions/checkout@v3
45-
- name: Setup python ${{ matrix.python-version }}
50+
- name: Setup python ${{ matrix.runtime.python-version }}
4651
uses: actions/setup-python@v4
4752
with:
48-
python-version: ${{ matrix.python-version }}
53+
python-version: ${{ matrix.runtime.python-version }}
4954
cache: pip
50-
cache-dependency-path: ${{ format('requirements/{0}.txt', matrix.python-version) }}
55+
cache-dependency-path: ${{ format('requirements/{0}-{1}.txt', matrix.runtime.machine, matrix.runtime.python-version) }}
5156
- name: Install Python dependencies
5257
run: |
5358
pip install --upgrade pip
54-
pip install -r requirements/${{ matrix.python-version }}.txt
59+
pip install -r requirements/${{ matrix.runtime.machine }}-${{ matrix.runtime.python-version }}.txt
5560
- name: Run docker diagnostics
5661
if: matrix.component == 'core'
5762
run: |
@@ -76,9 +81,10 @@ jobs:
7681
- name: Upload the package to pypi
7782
if: >
7883
github.event_name == 'push'
79-
&& github.ref == 'refs/heads/master'
84+
&& github.ref == 'refs/heads/main'
8085
&& github.repository_owner == 'testcontainers'
81-
&& matrix.python-version == '3.10'
86+
&& matrix.runtime.python-version == '3.10'
87+
&& matrix.runtime.machine == 'ubuntu-latest'
8288
env:
8389
TWINE_USERNAME: __token__
8490
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}

.github/workflows/requirements.yml

Lines changed: 24 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,43 @@
11
name: testcontainers requirements
22
on:
33
push:
4-
branches: [master]
4+
branches: [main]
55
pull_request:
6-
branches: [master]
6+
branches: [main]
77

88
jobs:
99
requirements:
1010
strategy:
11+
fail-fast: false
1112
matrix:
12-
python-version:
13-
- "3.7"
14-
- "3.8"
15-
- "3.9"
16-
- "3.10"
17-
- "3.11"
18-
runs-on: ubuntu-latest
13+
runtime:
14+
- machine: ubuntu-latest
15+
python-version: "3.7"
16+
- machine: ubuntu-latest
17+
python-version: "3.8"
18+
- machine: ubuntu-latest
19+
python-version: "3.9"
20+
- machine: ubuntu-latest
21+
python-version: "3.10"
22+
- machine: ubuntu-latest
23+
python-version: "3.11"
24+
- machine: windows-latest
25+
python-version: "3.10"
26+
- machine: macos-latest
27+
python-version: "3.10"
28+
runs-on: ${{ matrix.runtime.machine }}
1929
steps:
2030
- uses: actions/checkout@v3
21-
- name: Setup python ${{ matrix.python-version }}
31+
- name: Setup python ${{ matrix.runtime.python-version }}
2232
uses: actions/setup-python@v4
2333
with:
24-
python-version: ${{ matrix.python-version }}
34+
python-version: ${{ matrix.runtime.python-version }}
2535
- name: Update pip and install pip-tools
2636
run: pip install --upgrade pip pip-tools
2737
- name: Build requirements
28-
run: |
29-
rm requirements/${{ matrix.python-version }}.txt
30-
pip-compile --resolver=backtracking -v --upgrade -o requirements/${{ matrix.python-version }}.txt
38+
run: pip-compile --resolver=backtracking -v --upgrade -o requirements.txt
3139
- name: Store requirements as artifact
3240
uses: actions/upload-artifact@v3
3341
with:
34-
name: requirements-${{ matrix.python-version }}.txt
35-
path: requirements/${{ matrix.python-version }}.txt
42+
name: requirements-${{ matrix.runtime.machine }}-${{ matrix.runtime.python-version }}.txt
43+
path: requirements.txt

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,3 +72,4 @@ venv
7272
.DS_Store
7373
.python-version
7474
.env
75+
.github-token

Makefile

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
PYTHON_VERSIONS = 3.7 3.8 3.9 3.10 3.11
22
PYTHON_VERSION ?= 3.10
33
IMAGE = testcontainers-python:${PYTHON_VERSION}
4-
REQUIREMENTS = $(addprefix requirements/,${PYTHON_VERSIONS:=.txt})
54
RUN = docker run --rm -it
65
# Get all directories that contain a setup.py and get the directory name.
76
PACKAGES = $(subst /,,$(dir $(wildcard */setup.py)))
@@ -26,7 +25,7 @@ ${DISTRIBUTIONS} : %/dist : %/setup.py
2625
# Targets to run the test suite for each package.
2726
tests : ${TESTS}
2827
${TESTS} : %/tests :
29-
pytest -svx --cov-report=term-missing --cov=testcontainers.$* --tb=short $*/tests
28+
pytest -svx --cov-report=term-missing --cov=testcontainers.$* --tb=short --strict-markers $*/tests
3029

3130
# Targets to lint the code.
3231
lint : ${LINT}
@@ -43,7 +42,7 @@ ${UPLOAD} : %/upload :
4342
fi
4443

4544
# Targets to build docker images
46-
image: requirements/${PYTHON_VERSION}.txt
45+
image: requirements/ubunut-latest-${PYTHON_VERSION}.txt
4746
docker build --build-arg version=${PYTHON_VERSION} -t ${IMAGE} .
4847

4948
# Targets to run tests in docker containers
@@ -63,13 +62,6 @@ doctest : ${DOCTESTS}
6362
${DOCTESTS} : %/doctest :
6463
sphinx-build -b doctest -c doctests $* docs/_build
6564

66-
# Targets to build requirement files
67-
requirements : ${REQUIREMENTS}
68-
${REQUIREMENTS} : requirements/%.txt : requirements.in */setup.py
69-
mkdir -p $(dir $@)
70-
${RUN} -w /workspace -v `pwd`:/workspace --platform=linux/amd64 python:$* bash -c \
71-
"pip install pip-tools && pip-compile --resolver=backtracking -v --upgrade -o $@ $<"
72-
7365
# Remove any generated files.
7466
clean :
7567
rm -rf docs/_build

README.rst

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -108,11 +108,4 @@ Testcontainers is a collection of `implicit namespace packages <https://peps.pyt
108108
Contributing a New Feature
109109
^^^^^^^^^^^^^^^^^^^^^^^^^^
110110

111-
You want to contribute a new feature or container? Great! You can do that in six steps.
112-
113-
1. Create a new feature directory and populate it with the [package structure]_ as described above. Copying one of the existing features is likely the best way to get started.
114-
2. Implement the new feature (typically in :code:`__init__.py`) and corresponding tests.
115-
3. Add a line :code:`-e file:[feature name]` to :code:`requirements.in` and run :code:`make requirements`. This command will find any new requirements and generate lock files to ensure reproducible builds (see the `pip-tools <https://pip-tools.readthedocs.io/en/latest/>`__ documentation for details). Then run :code:`pip install -r requirements/[your python version].txt` to install the new requirements.
116-
4. Update the feature :code:`README.rst` and add it to the table of contents (:code:`toctree` directive) in the top-level :code:`README.rst`.
117-
5. Add a line :code:`[feature name]` to the list of components in the GitHub Action workflow in :code:`.github/workflows/main.yml` to run tests, build, and publish your package when pushed to the :code:`master` branch.
118-
6. Rebase your development branch on :code:`master` (or merge :code:`master` into your development branch).
111+
You want to contribute a new feature or container? Great! You can do that in six steps as outlined `here <https://github.com/testcontainers/testcontainers-python/blob/main/.github/PULL_REQUEST_TEMPLATE/new_container.md>__`.

get_requirements.py

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
import argparse
2+
import io
3+
import pathlib
4+
import requests
5+
import shutil
6+
import tempfile
7+
import zipfile
8+
9+
10+
def __main__() -> None:
11+
parser = argparse.ArgumentParser()
12+
parser.add_argument("--owner", default="testcontainers")
13+
parser.add_argument("--repo", default="testcontainers-python")
14+
parser.add_argument("--run", help="GitHub Action run id")
15+
parser.add_argument("--pr", help="GitHub PR number")
16+
parser.add_argument("--branch", default="main")
17+
parser.add_argument("--token", help="GitHub autentication token")
18+
args = parser.parse_args()
19+
20+
# Get an access token.
21+
if args.token:
22+
token = args.token
23+
elif (path := pathlib.Path(".github-token")).is_file():
24+
token = path.read_text().strip()
25+
else:
26+
token = input("we need a GitHub access token to fetch the requirements; please visit "
27+
"https://github.com/settings/tokens/new, create a token with `public_repo` "
28+
"scope, and paste it here: ").strip()
29+
cache = input("do you want to cache the token in a `.github-token` file [Ny]? ")
30+
if cache.lower().startswith("y"):
31+
path.write_text(token)
32+
33+
headers = {
34+
"Authorization": f"Bearer {token}",
35+
}
36+
base_url = f"https://api.github.com/repos/{args.owner}/{args.repo}"
37+
38+
if args.run: # Run id was specified.
39+
run = args.run
40+
elif args.pr: # PR was specified, let's get the most recent run id.
41+
print(f"fetching most recent commit for PR #{args.pr}")
42+
response = requests.get(f"{base_url}/pulls/{args.pr}", headers=headers)
43+
response.raise_for_status()
44+
response = response.json()
45+
head_sha = response["head"]["sha"]
46+
else: # Nothing was specified, let's get the most recent run id on the main branch.
47+
print(f"fetching most recent commit for branch `{args.branch}`")
48+
response = requests.get(f"{base_url}/branches/{args.branch}", headers=headers)
49+
response.raise_for_status()
50+
response = response.json()
51+
head_sha = response["commit"]["sha"]
52+
53+
# List all completed runs and find the one that generated the requirements.
54+
response = requests.get(f"{base_url}/actions/runs", headers=headers, params={
55+
"head_sha": head_sha,
56+
"status": "success",
57+
})
58+
response.raise_for_status()
59+
response = response.json()
60+
61+
# Get the requirements run.
62+
runs = [run for run in response["workflow_runs"] if
63+
run["path"].endswith("requirements.yml")]
64+
if len(runs) != 1:
65+
raise RuntimeError(f"could not identify unique workflow run: {runs}")
66+
run = runs[0]["id"]
67+
68+
# Get all the artifacts.
69+
print(f"fetching artifacts for run {run} ...")
70+
url = f"{base_url}/actions/runs/{run}/artifacts"
71+
response = requests.get(url, headers=headers)
72+
response.raise_for_status()
73+
response = response.json()
74+
artifacts = response["artifacts"]
75+
print(f"discovered {len(artifacts)} artifacts")
76+
77+
# Get the content for each artifact and save it.
78+
for artifact in artifacts:
79+
name: str = artifact["name"]
80+
name = name.removeprefix("requirements-")
81+
print(f"fetching artifact {name} ...")
82+
response = requests.get(artifact["archive_download_url"], headers=headers)
83+
response.raise_for_status()
84+
with zipfile.ZipFile(io.BytesIO(response.content)) as zip, \
85+
tempfile.TemporaryDirectory() as tempdir:
86+
zip.extract("requirements.txt", tempdir)
87+
shutil.move(pathlib.Path(tempdir) / "requirements.txt",
88+
pathlib.Path("requirements") / name)
89+
90+
print("done")
91+
92+
93+
if __name__ == "__main__":
94+
__main__()

0 commit comments

Comments
 (0)