Skip to content

Commit 2d6ae56

Browse files
authored
[NEW] Python Project Generator v2.7.0
Release v2.7.0
2 parents 8dfe749 + 0f22f7b commit 2d6ae56

File tree

35 files changed

+1820
-305
lines changed

35 files changed

+1820
-305
lines changed

.github/workflows/dev_pr_validation.yml

Lines changed: 52 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,17 +42,65 @@ jobs:
4242

4343
# Run Integration tests
4444
- name: 'Run Integration tests'
45+
shell: bash
4546
run: |
47+
set -o pipefail
4648
# leverage uv to install other runtime test dependencies in the system site-packages!
4749
4850
uv pip install 'tox<4.0' # integration tests dependency
4951
5052
# Isolate flaky tests
51-
uv run pytest -ra -vvs --run-slow -k via_build_module
52-
uv run pytest -ra -vvs --run-slow -k test_build_creates_artifacts
53+
uv run pytest -ra -vvs --run-slow -k via_build_module 2>&1 | tee integration_test_via_build_module.log
54+
uv run pytest -ra -vvs --run-slow -k test_build_creates_artifacts 2>&1 | tee integration_test_build_creates_artifacts.log
55+
56+
# Isolate lint tests to prevent race condition with build tests
57+
uv run pytest -ra -vvs --run-slow -k test_lint_passes 2>&1 | tee integration_test_lint_passes.log
58+
59+
# Isolate problematic build_backend_sdist tests for debugging
60+
uv run pytest -ra -vvs --run-slow -k build_backend_sdist 2>&1 | tee integration_test_build_backend_sdist.log
61+
62+
# Run remaining Integration Tests and Unit Tests for sanity
63+
uv run pytest -ra -n auto --run-requires_uv --run-slow --run-network_bound -vvs -k 'test_cli' 2>&1 | tee integration_test_remaining.log
5364
54-
# Run eveything once again for sanity
55-
uv run pytest -ra -n auto --run-requires_uv --run-slow --run-network_bound -vvs -k 'test_cli or build_backend_sdist or test_build_creates_artifacts or test_lint_passes'
65+
- name: "Combine Integration Test Logs with Headers"
66+
if: always() # Process logs even if tests fail
67+
run: |
68+
echo "================================================================================" > combined_integration_tests.log
69+
echo "PYTEST INVOCATION 1: via_build_module tests" >> combined_integration_tests.log
70+
echo "================================================================================" >> combined_integration_tests.log
71+
cat integration_test_via_build_module.log >> combined_integration_tests.log
72+
echo "" >> combined_integration_tests.log
73+
74+
echo "================================================================================" >> combined_integration_tests.log
75+
echo "PYTEST INVOCATION 2: test_build_creates_artifacts tests" >> combined_integration_tests.log
76+
echo "================================================================================" >> combined_integration_tests.log
77+
cat integration_test_build_creates_artifacts.log >> combined_integration_tests.log
78+
echo "" >> combined_integration_tests.log
79+
80+
echo "================================================================================" >> combined_integration_tests.log
81+
echo "PYTEST INVOCATION 3: test_lint_passes tests (isolated to prevent race condition)" >> combined_integration_tests.log
82+
echo "================================================================================" >> combined_integration_tests.log
83+
cat integration_test_lint_passes.log >> combined_integration_tests.log
84+
echo "" >> combined_integration_tests.log
85+
86+
echo "================================================================================" >> combined_integration_tests.log
87+
echo "PYTEST INVOCATION 4: build_backend_sdist tests (isolated for debugging)" >> combined_integration_tests.log
88+
echo "================================================================================" >> combined_integration_tests.log
89+
cat integration_test_build_backend_sdist.log >> combined_integration_tests.log
90+
echo "" >> combined_integration_tests.log
91+
92+
echo "================================================================================" >> combined_integration_tests.log
93+
echo "PYTEST INVOCATION 5: Remaining test suite (test_cli only)" >> combined_integration_tests.log
94+
echo "================================================================================" >> combined_integration_tests.log
95+
cat integration_test_remaining.log >> combined_integration_tests.log
96+
97+
- name: "Upload Combined Integration Test Log as Artifact"
98+
if: always() # Upload logs even if tests fail
99+
uses: actions/upload-artifact@v4
100+
with:
101+
name: integration-tests-log
102+
path: combined_integration_tests.log
103+
if-no-files-found: warn
56104

57105
# CROSS PLATFORM TESTING: 15s on Ubuntu, 25 on mac, 35 on windows
58106
cross_platform_tests:

.github/workflows/test-job.yml

Lines changed: 54 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -88,13 +88,13 @@ jobs:
8888

8989
- name: Install uv
9090
if: matrix.platform != 'windows-latest'
91-
run: curl -LsSf https://astral.sh/uv/install.sh | sh
91+
run: curl -LsSf https://astral.sh/uv/0.8.0/install.sh | sh
9292

9393
- name: Install uv
9494
if: matrix.platform == 'windows-latest'
9595
uses: astral-sh/setup-uv@v5
9696
with:
97-
version: "latest"
97+
version: "0.8.0"
9898

9999
# ### Sanity Check that folder is Compatible for Python Distro / Build ###
100100
# - run: uvx pyroma --directory .
@@ -237,16 +237,65 @@ jobs:
237237
run: |
238238
uv venv check-env
239239
. check-env/${{ env.VENV_ACTIVATE }}
240-
uv pip install poetry-core pyroma 'twine >=5.0.0, <6.0.0'
241-
# --active
240+
uv pip install poetry-core pyroma 'twine >=6.0.0, <7.0.0'
241+
242+
echo "# ENVIRONMENT INFO" >> $GITHUB_STEP_SUMMARY
243+
echo "- Python version in verification env: $(python --version)" >> $GITHUB_STEP_SUMMARY
244+
echo "- UV version: $(uv --version)" >> $GITHUB_STEP_SUMMARY
245+
echo "- Twine version: $(python -m twine --version)" >> $GITHUB_STEP_SUMMARY
246+
247+
echo "## WHEEL ANALYSIS" >> $GITHUB_STEP_SUMMARY
248+
echo "[DEBUG] Checking wheel contents with unzip:"
249+
unzip -l ../dist/*.whl | head -30 || echo "Failed to list wheel contents"
250+
251+
echo "[DEBUG] === METADATA EXTRACTION ==="
252+
253+
echo "[DEBUG] === METADATA VERSION CHECK ==="
254+
METADATA_VERSION=$(unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null | grep '^Metadata-Version:' || echo "Metadata-Version: NOT FOUND")
255+
echo "[DEBUG] $METADATA_VERSION" >> $GITHUB_STEP_SUMMARY
256+
257+
echo "[DEBUG] Raw METADATA content from wheel:"
258+
unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null | head -20 || echo "Could not extract METADATA"
259+
260+
echo '---' >> $GITHUB_STEP_SUMMARY
261+
echo "**Raw MEtadata File**" >> $GITHUB_STEP_SUMMARY
262+
echo '```' >> $GITHUB_STEP_SUMMARY
263+
unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null >> $GITHUB_STEP_SUMMARY
264+
echo '```' >> $GITHUB_STEP_SUMMARY
265+
echo '---' >> $GITHUB_STEP_SUMMARY
266+
267+
268+
269+
echo "[DEBUG] === NAME/VERSION FIELD CHECK ==="
270+
NAME_FIELD=$(unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null | grep '^Name:' || echo "Name: NOT FOUND")
271+
VERSION_FIELD=$(unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null | grep '^Version:' || echo "Version: NOT FOUND")
272+
echo "[DEBUG] $NAME_FIELD"
273+
echo "[DEBUG] $VERSION_FIELD"
242274
243275
# Check .tar.gz file using 'pyroma' command
244276
echo "[INFO] Running pyroma on the sdist"
245277
pyroma --file "$(ls ../dist/*.tar.gz)"
246278
247279
# Check both .whl and .tar.gz files using 'twine check' command
248280
echo "[INFO] Running twine check on the sdist and wheel"
249-
python -m twine check ../dist/*
281+
python -m twine check ../dist/* || {
282+
echo "[ERROR] ========================================"
283+
echo "[ERROR] TWINE CHECK FAILED - DIAGNOSTIC INFO"
284+
echo "[ERROR] ========================================"
285+
echo "[ERROR] Environment: Python $(python --version), UV $(uv --version), Twine $(python -m twine --version)"
286+
echo "[ERROR] Wheel file: $(ls ../dist/*.whl)"
287+
echo "[ERROR] Metadata-Version: $(unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null | grep '^Metadata-Version:' || echo 'NOT FOUND')"
288+
echo "[ERROR] Name field: $(unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null | grep '^Name:' || echo 'NOT FOUND')"
289+
echo "[ERROR] Version field: $(unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null | grep '^Version:' || echo 'NOT FOUND')"
290+
echo "[ERROR] ========================================"
291+
echo "[ERROR] Full METADATA content (first 30 lines):"
292+
unzip -p ../dist/*.whl '*/METADATA' 2>/dev/null | head -30 || echo "Could not extract METADATA"
293+
echo "[ERROR] ========================================"
294+
echo "[ERROR] Wheel structure (metadata files only):"
295+
unzip -l ../dist/*.whl | grep -E "(METADATA|PKG-INFO|WHEEL)" || echo "No metadata files found"
296+
echo "[ERROR] ========================================"
297+
exit 1
298+
}
250299
251300
252301
######## PHASE 4: INSTALL WHEELS AND RUN SANITY CHECKS ########

CHANGELOG.md

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,42 @@
44
55
This project adheres to [Semantic Versioning](https://semver.org/).
66

7+
Only Major releases (bumps) contain breaking changes.
8+
9+
10+
## [2.7.0] (2025-11-02)
11+
12+
The release introduces a complete observability stack
13+
with Grafana + Loki + Promtail integration, delivers critical bug fixes for interactive configuration, and makes automated tests more robust.
14+
15+
### Changes
16+
17+
#### feature
18+
19+
- add complete observability stack integration via `include_observability` parameter
20+
- generate Grafana + Loki + Promtail Docker Compose orchestration for production monitoring
21+
22+
#### fix
23+
24+
- preserve user config values for derived fields in interactive mode
25+
- use template variables to replace hard-coded relics in generated projects
26+
27+
#### ci
28+
29+
- enhance PR validation pipeline with isolated test execution and race condition prevention
30+
- isolate build backend tests for debugging complex build issues
31+
- add cross-worker conflict prevention in pytest-xdist
32+
33+
#### test
34+
35+
- verify more cookiecutter behaviors and sanity checks
36+
- reorganize tests with context-specific modules for better maintainability
37+
38+
#### documentation
39+
40+
- add comprehensive observability stack documentation with setup guides
41+
- improve template consistency across generated projects
42+
743

844
## [2.6.3] (2025-06-21)
945

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
[![Build Status](https://img.shields.io/github/actions/workflow/status/boromir674/cookiecutter-python-package/test.yaml?link=https%3A%2F%2Fgithub.com%2Fboromir674%2Fcookiecutter-python-package%2Factions%2Fworkflows%2Ftest.yaml%3Fquery%3Dbranch%253Amaster)](https://github.com/boromir674/cookiecutter-python-package/actions/workflows/test.yaml?query=branch%3Amaster) [![Coverage](https://img.shields.io/codecov/c/github/boromir674/cookiecutter-python-package/master?logo=codecov)](https://app.codecov.io/gh/boromir674/cookiecutter-python-package) [![Docs](https://img.shields.io/readthedocs/python-package-generator/master?logo=readthedocs&logoColor=lightblue)](https://python-package-generator.readthedocs.io/en/master/) [![Maintainability](https://api.codeclimate.com/v1/badges/1d347d7dfaa134fd944e/maintainability)](https://codeclimate.com/github/boromir674/cookiecutter-python-package/maintainability)
44
[![Release Version](https://img.shields.io/pypi/v/cookiecutter_python)](https://pypi.org/project/cookiecutter-python/) [![Wheel](https://img.shields.io/pypi/wheel/cookiecutter-python?color=green&label=wheel)](https://pypi.org/project/cookiecutter-python) [![Tech Debt](https://img.shields.io/codeclimate/tech-debt/boromir674/cookiecutter-python-package)](https://codeclimate.com/github/boromir674/cookiecutter-python-package/) [![Codacy](https://app.codacy.com/project/badge/Grade/5be4a55ff1d34b98b491dc05e030f2d7)](https://app.codacy.com/gh/boromir674/cookiecutter-python-package/dashboard?utm_source=github.com&utm_medium=referral&utm_content=boromir674/cookiecutter-python-package&utm_campaign=Badge_Grade)
55
[![Supported Versions](https://img.shields.io/pypi/pyversions/cookiecutter-python?color=blue&label=python&logo=python&logoColor=%23ccccff)](https://pypi.org/project/cookiecutter-python)
6-
[![PyPI Stats](https://img.shields.io/pypi/dm/cookiecutter-python?logo=pypi&logoColor=%23849ED9&color=%23849ED9)](https://pypistats.org/packages/cookiecutter-python) [![Commits Since Tag](https://img.shields.io/github/commits-since/boromir674/cookiecutter-python-package/v2.6.3/master?color=blue&logo=github)](https://github.com/boromir674/cookiecutter-python-package/compare/v2.6.3..master) [![Commits Since Release](https://img.shields.io/github/commits-since/boromir674/cookiecutter-python-package/latest?color=blue&logo=semver&sort=semver)](https://github.com/boromir674/cookiecutter-python-package/releases)
6+
[![PyPI Stats](https://img.shields.io/pypi/dm/cookiecutter-python?logo=pypi&logoColor=%23849ED9&color=%23849ED9)](https://pypistats.org/packages/cookiecutter-python) [![Commits Since Tag](https://img.shields.io/github/commits-since/boromir674/cookiecutter-python-package/v2.7.0/master?color=blue&logo=github)](https://github.com/boromir674/cookiecutter-python-package/compare/v2.7.0..master) [![Commits Since Release](https://img.shields.io/github/commits-since/boromir674/cookiecutter-python-package/latest?color=blue&logo=semver&sort=semver)](https://github.com/boromir674/cookiecutter-python-package/releases)
77
[![License](https://img.shields.io/github/license/boromir674/cookiecutter-python-package)](https://github.com/boromir674/cookiecutter-python-package/blob/master/LICENSE) [![OpenSSF](https://bestpractices.coreinfrastructure.org/projects/5988/badge)](https://bestpractices.coreinfrastructure.org/en/projects/5988) [![Ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://docs.astral.sh/ruff/) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
88

99

pyproject.toml

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ build-backend = "poetry.core.masonry.api"
1414
name = "cookiecutter_python"
1515
### ... ###
1616

17-
version = "2.6.3"
17+
version = "2.7.0"
1818
description = "1-click Generator of Python Project, from Template with streamlined \"DevOps\" using a powerful CI/CD Pipeline."
1919
readme = "README.md"
2020
license = "AGPL-3.0-only"
@@ -41,23 +41,13 @@ include = [
4141
"LICENSE",
4242
"README.md",
4343
"CONTRIBUTING.md",
44-
"CHANGELOG.rst",
44+
"CHANGELOG.md",
4545
# "Dockerfile",
4646
]
4747
exclude = [
4848
"docs/*",
4949
"requirements/*",
5050
"scripts/*.py",
51-
"tox.ini",
52-
".circleci/config.yml",
53-
".coveragerc",
54-
".DS_Store",
55-
".gitignore",
56-
".prospector.yml",
57-
".pylintrc",
58-
".readthedocs.yml",
59-
".scrutinizer.yml",
60-
".travis.yml"
6151
]
6252

6353
[tool.poetry.scripts]
@@ -74,7 +64,7 @@ maintainers = [
7464
license = {text = "AGPL-3.0-only"}
7565

7666
name = "cookiecutter_python"
77-
version = "2.6.3"
67+
version = "2.7.0"
7868
description = "1-click Generator of Python Project, from Template with streamlined \"DevOps\" using a powerful CI/CD Pipeline."
7969
readme = "README.md"
8070
# keywords = []
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
__version__ = '2.6.3'
1+
__version__ = '2.7.0'
22

33
from . import _logging # noqa

src/cookiecutter_python/backend/helpers.py

Lines changed: 68 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
"""Interactive Dialog wizards to gather User Input for Context variables."""
2+
13
import json
24
import logging
35
import typing as t
@@ -69,51 +71,73 @@ def parse_context(config_file: str):
6971
**{k: v[0] for k, v in choices.items() if k not in user_default_context},
7072
)
7173

74+
# Render cookiecutter.json again with context to resolve derived fields
75+
# This ensures derived fields like pkg_name get computed properly
76+
template = env.get_template('cookiecutter.json')
77+
rendered_with_context = template.render({'cookiecutter': context_defaults})
78+
resolved_cook_json: t.Mapping[str, t.Any] = json.loads(rendered_with_context)
79+
80+
# Update context_defaults with resolved derived fields, but preserve user config values
81+
for key, value in resolved_cook_json.items():
82+
# Only update if the key wasn't explicitly provided by user
83+
if key not in user_default_context:
84+
context_defaults[key] = value
85+
7286
pipe = InteractiveDialogsPipeline()
7387

74-
# assert choices['rtd_python_version'] == [], f"DEBUG: {choices['rtd_python_version']}"
75-
res = pipe.process(
76-
[
77-
{
78-
"project_name": context_defaults['project_name'],
79-
"project_type": {
80-
'default': context_defaults['project_type'],
81-
'choices': choices['project_type'],
82-
},
83-
"full_name": context_defaults['full_name'],
84-
"author_email": context_defaults['author_email'],
85-
"github_username": context_defaults['github_username'],
86-
"project_short_description": context_defaults['project_short_description'],
87-
# "release_date": context_defaults['release_date'],
88-
# "year": context_defaults['year'],
89-
"version": context_defaults['version'],
90-
"initialize_git_repo": {
91-
'default': context_defaults['initialize_git_repo'],
92-
'choices': choices['initialize_git_repo'],
93-
},
94-
"supported-interpreters": {
95-
# 'default': context_defaults['initialize_git_repo'],
96-
'choices': [
97-
(choice, True)
98-
for choice in user_interpreters.get(
99-
'supported-interpreters',
100-
["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"],
101-
)
102-
],
103-
},
104-
"docs_builder": {
105-
'default': context_defaults['docs_builder'],
106-
'choices': choices['docs_builder'],
107-
},
108-
"rtd_python_version": {
109-
'default': context_defaults['rtd_python_version'],
110-
'choices': choices['rtd_python_version'],
111-
},
112-
"cicd": {
113-
'default': context_defaults['cicd'],
114-
'choices': choices['cicd'],
115-
},
88+
# Build the context dynamically to include all fields
89+
interactive_context = {}
90+
91+
# Simple fields (no choices)
92+
simple_fields = [
93+
'project_name',
94+
'project_slug',
95+
'pkg_name',
96+
'repo_name',
97+
'readthedocs_project_slug',
98+
'docker_image',
99+
'full_name',
100+
'author',
101+
'author_email',
102+
'github_username',
103+
'project_short_description',
104+
'pypi_subtitle',
105+
'version',
106+
]
107+
108+
for field in simple_fields:
109+
if field in context_defaults:
110+
interactive_context[field] = context_defaults[field]
111+
112+
# Choice fields (with options)
113+
choice_fields = [
114+
'project_type',
115+
'initialize_git_repo',
116+
'docs_builder',
117+
'rtd_python_version',
118+
'cicd',
119+
# Include Grafana, Loki stack no/yes
120+
'include_observability', # ["no", "yes"]
121+
]
122+
123+
for field in choice_fields:
124+
if field in choices:
125+
interactive_context[field] = {
126+
'default': context_defaults[field],
127+
'choices': choices[field],
116128
}
117-
]
118-
)
129+
130+
# Special handling for interpreters
131+
interactive_context["supported-interpreters"] = {
132+
'choices': [
133+
(choice, True)
134+
for choice in user_interpreters.get(
135+
'supported-interpreters',
136+
["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"],
137+
)
138+
],
139+
}
140+
141+
# assert choices['rtd_python_version'] == [], f"DEBUG: {choices['rtd_python_version']}"
142+
res = pipe.process([interactive_context])
119143
return res

src/cookiecutter_python/cookiecutter.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,5 +28,6 @@
2828
},
2929
"docs_builder": ["sphinx", "mkdocs"],
3030
"rtd_python_version": ["3.8", "3.9", "3.10", "3.11", "3.12"],
31-
"cicd": ["stable", "experimental"]
31+
"cicd": ["stable", "experimental"],
32+
"include_observability": ["no", "yes"]
3233
}

0 commit comments

Comments
 (0)