diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..7c5e50a --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,77 @@ +name: CI + +on: + pull_request: + branches: [main] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: astral-sh/setup-uv@v4 + with: + version: "latest" + + - run: uv sync + + - name: Ruff check + run: uv run ruff check src/ tests/ + + - name: Ruff format check + run: uv run ruff format --check src/ tests/ + + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.11", "3.12", "3.13", "3.14"] + steps: + - uses: actions/checkout@v4 + + - uses: astral-sh/setup-uv@v4 + with: + version: "latest" + + - run: uv sync --python ${{ matrix.python-version }} + + - name: Run unit tests + run: uv run pytest -m "not integration" -v + + version-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Check version has been bumped + run: | + PR_VERSION=$(grep '^version' pyproject.toml | head -1 | sed 's/.*"\(.*\)".*/\1/') + LATEST_TAG=$(git tag --sort=-v:refname | head -1 | sed 's/^v//') + + if [ -z "$LATEST_TAG" ]; then + echo "No existing tags. Version $PR_VERSION will be the first release." + exit 0 + fi + + echo "PR version: $PR_VERSION" + echo "Latest release: $LATEST_TAG" + + if [ "$PR_VERSION" = "$LATEST_TAG" ]; then + echo "" + echo "Error: Version has not been bumped." + echo "Update the version in pyproject.toml before merging." + exit 1 + fi + + # Compare versions: PR version must be greater than latest tag + HIGHER=$(printf '%s\n%s' "$LATEST_TAG" "$PR_VERSION" | sort -V | tail -1) + if [ "$HIGHER" != "$PR_VERSION" ]; then + echo "" + echo "Error: PR version ($PR_VERSION) is not higher than latest release ($LATEST_TAG)." + exit 1 + fi + + echo "Version bump confirmed: $LATEST_TAG -> $PR_VERSION" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..70e4363 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,29 @@ +name: Release + +on: + push: + branches: [main] + +permissions: + contents: write + +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Get version from pyproject.toml + id: version + run: | + VERSION=$(grep '^version' pyproject.toml | head -1 | sed 's/.*"\(.*\)".*/\1/') + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + echo "tag=v$VERSION" >> "$GITHUB_OUTPUT" + + - name: Create tag and release + run: | + gh release create "${{ steps.version.outputs.tag }}" \ + --title "${{ steps.version.outputs.tag }}" \ + --generate-notes + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c2e2e9d --- /dev/null +++ b/.gitignore @@ -0,0 +1,220 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[codz] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py.cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +# Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +# poetry.lock +# poetry.toml + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. +# https://pdm-project.org/en/latest/usage/project/#working-with-version-control +# pdm.lock +# pdm.toml +.pdm-python +.pdm-build/ + +# pixi +# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. +# pixi.lock +# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one +# in the .venv directory. It is recommended not to include this directory in version control. +.pixi + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# Redis +*.rdb +*.aof +*.pid + +# RabbitMQ +mnesia/ +rabbitmq/ +rabbitmq-data/ + +# ActiveMQ +activemq-data/ + +# SageMath parsed files +*.sage.py + +# Environments +.env +.envrc +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +# .idea/ + +# Abstra +# Abstra is an AI-powered process automation framework. +# Ignore directories containing user credentials, local state, and settings. +# Learn more at https://abstra.io/docs +.abstra/ + +# Visual Studio Code +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore +# and can be added to the global gitignore or merged into this file. However, if you prefer, +# you could uncomment the following to ignore the entire vscode folder +# .vscode/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Marimo +marimo/_static/ +marimo/_lsp/ +__marimo__/ + +# Streamlit +.streamlit/secrets.toml + +# CDK (generated during integration tests) +.cdk.staging +cdk.out diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..9b38853 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} \ No newline at end of file diff --git a/PLAN.md b/PLAN.md new file mode 100644 index 0000000..54913b8 --- /dev/null +++ b/PLAN.md @@ -0,0 +1,112 @@ +# gds-idea-app-kit - Implementation Plan + +## Overview + +CLI tool (`idea-app`) for scaffolding and maintaining GDS IDEA web apps +deployed to AWS with Cognito auth behind an ALB. Replaces the template +repository pattern with a package-based approach. + +Install: `uv tool install "gds-idea-app-kit @ git+https://github.com/co-cddo/gds-idea-app-kit"` + +## Commands + +### `idea-app init [--python 3.13]` + +Creates `gds-idea-app-{app-name}/` with a fully scaffolded project. + +1. Validate inputs (framework: streamlit/dash/fastapi, name: alphanumeric + hyphens) +2. Strip `gds-idea-app-` prefix if user accidentally included it +3. mkdir `gds-idea-app-{app-name}/` +4. `cdk init app --language python --generate-only` (catch missing cdk, print install instructions) +5. `uv init` +6. Delete: requirements.txt, requirements-dev.txt, source.bat, hello.py, generated stack module +7. Overwrite app.py with our template +8. Rewrite test file to reference WebApp stack +9. Merge into pyproject.toml: CDK deps, gds-idea-app-kit as dev dep, [tool.gds-idea] manifest +10. Append to .gitignore +11. Copy framework files into app_src/ +12. Copy .devcontainer/, dev_mocks/ +13. Apply template variables ({{app_name}}, {{python_version}}) +14. uv sync +15. git add && git commit +16. Print next steps (including gh repo create / manual git remote instructions for co-cddo org) + +Default Python: 3.13, overridable with --python. +Directory naming: always `gds-idea-app-{name}`. + +### `idea-app update [--dry-run]` + +Updates tool-owned files in existing projects. + +1. Read [tool.gds-idea] from pyproject.toml +2. Warn if tool version is newer than manifest version (suggest upgrading) +3. For each tool-owned file: compare hash to manifest, overwrite if unchanged, skip if modified +4. Update manifest version +5. --dry-run shows changes without applying + +File ownership: + +| Category | Files | Behavior | +|----------|-------|----------| +| Tool-owned | app_src/Dockerfile, .devcontainer/*, dev_mocks/* | Overwrite if hash matches | +| Shared | Both pyproject.toml files | Skip if modified, warn | +| User-owned | app.py, app_src/*_app.py, cdk.json, tests/ | Never touch | + +### `idea-app smoke-test [--build-only] [--wait]` + +Docker build + health check. --build-only skips health check. --wait keeps container running. + +### `idea-app provide-role [--use-profile] [--duration N]` + +AWS credential provisioning for dev container. Default duration: 1 hour. + +## Package structure + +``` +gds-idea-app-kit/ +├── pyproject.toml +├── src/ +│ └── gds_idea_app_kit/ +│ ├── __init__.py +│ ├── cli.py +│ ├── init.py +│ ├── update.py +│ ├── smoke_test.py +│ ├── provide_role.py +│ ├── manifest.py +│ └── templates/ +│ ├── common/ (app.py, gitignore-extra, devcontainer.json, docker-compose.yml) +│ ├── dev_mocks/ (dev_mock_authoriser.json, dev_mock_user.json) +│ ├── streamlit/ (Dockerfile, pyproject.toml, streamlit_app.py) +│ ├── dash/ (Dockerfile, pyproject.toml, dash_app.py) +│ └── fastapi/ (Dockerfile, pyproject.toml, fastapi_app.py) +└── tests/ + ├── conftest.py + ├── test_init.py + ├── test_update.py + ├── test_manifest.py + └── test_smoke_test.py +``` + +## Tech decisions + +- click for CLI framework (CliRunner for testing) +- tomlkit for pyproject.toml read/write (preserves formatting) +- boto3 for provide-role (direct dependency, not optional) +- Simple str.replace() for template variables, no Jinja2 +- cdk init runs first (requires empty dir), then uv init on top +- cdk.json comes from cdk init (always current feature flags), never updated by idea-app update +- Manifest stored in pyproject.toml [tool.gds-idea] section +- Repo naming enforced: gds-idea-app-{name} +- GitHub org: co-cddo (in printed instructions) +- Default Python: 3.13 (hardcoded in package, overridable with --python) +- On missing cdk: catch error, print install instructions (npm/brew) + +## Implementation phases + +1. Package skeleton (pyproject.toml, cli.py with click group, Ruff config, git init) +2. Template files (copy from dumper repo, add {{placeholders}}) +3. Core modules (manifest.py, init.py, update.py) +4. Port smoke_test.py and provide_role.py to click +5. Tests (unit: init/update/manifest, integration: smoke-test with Docker) +6. CI (GitHub Actions: lint, unit tests, integration tests with Docker) diff --git a/README.md b/README.md new file mode 100644 index 0000000..6adab68 --- /dev/null +++ b/README.md @@ -0,0 +1,197 @@ +# gds-idea-app-kit + +CLI tool for scaffolding and maintaining GDS IDEA web apps on AWS. + +Generates projects with: +- **Streamlit**, **Dash**, or **FastAPI** framework +- AWS CDK infrastructure (ECS Fargate behind ALB with Cognito auth) +- VS Code dev container for local development +- Production-ready multi-stage Dockerfile + +## Prerequisites + +Install with [Homebrew](https://brew.sh/): + +```bash +brew install uv node git docker aws-cdk +``` + +You also need SSH access to the `co-cddo` GitHub organisation (for private CDK construct dependencies). + +## Installation + +`idea-app` is installed as a global CLI tool, not as a per-project dependency: + +```bash +uv tool install "gds-idea-app-kit @ git+https://github.com/co-cddo/gds-idea-app-kit" +``` + +To upgrade to the latest version: + +```bash +uv tool upgrade gds-idea-app-kit +``` + +Verify it's working: + +```bash +idea-app --version +``` + +## New project + +Scaffold a new project with `idea-app init`: + +```bash +idea-app init streamlit my-dashboard +``` + +This creates a directory `gds-idea-app-my-dashboard/` containing: + +- `app.py` -- CDK entry point +- `cdk.json` -- CDK configuration +- `app_src/` -- your application code, Dockerfile, and dependencies +- `.devcontainer/` -- VS Code dev container configuration +- `dev_mocks/` -- mock auth data for local development +- `.gitignore` -- pre-configured for Python, CDK, and dev artifacts + +The tool runs `cdk init`, `uv init`, copies template files, installs CDK dependencies, and makes an initial git commit. All of this happens automatically. + +### Options + +```bash +idea-app init [--python 3.13] +``` + +- `framework`: `streamlit`, `dash`, or `fastapi` +- `app-name`: short name for your app (lowercase, hyphens ok). The `gds-idea-app-` prefix is added automatically. +- `--python`: Python version for the project (default: 3.13) + +### After init + +```bash +cd gds-idea-app-my-dashboard + +# Create the GitHub repo (requires gh CLI): +gh repo create co-cddo/gds-idea-app-my-dashboard --private --source . --push + +# Or add a remote manually: +git remote add origin git@github.com:co-cddo/gds-idea-app-my-dashboard.git +git push -u origin main +``` + +Then open the project in VS Code and reopen in the dev container when prompted. + +## Migrating an existing project + +If you have a project created from the [gds-idea-app-templates](https://github.com/co-cddo/gds-idea-app-templates) template repository, migrate it to `idea-app`: + +```bash +cd gds-idea-app-my-existing-project +idea-app migrate +``` + +The command is interactive and will: + +1. Read your existing `[tool.webapp]` configuration +2. Ask you to confirm before making changes +3. Build a manifest from your current tracked files +4. Remove old `template/` directory, `[project.scripts]`, and `[build-system]` sections +5. Offer to update your files to the latest templates (with a dry-run preview first) + +Run this on a clean branch so you can review the changes: + +```bash +git checkout -b migrate-to-idea-app +idea-app migrate +git diff +git add -A && git commit -m "Migrate to idea-app" +``` + +## Updating template files + +When `idea-app` is upgraded with new template changes (Dockerfile improvements, devcontainer updates, etc.), update your project: + +```bash +cd gds-idea-app-my-dashboard +idea-app update +``` + +The update command manages files like the Dockerfile, devcontainer config, and docker-compose. It does not touch your application code, `cdk.json`, or `pyproject.toml`. + +### How updates work + +Each tracked file is compared against the manifest hash from the last update: + +| File state | What happens | +|---|---| +| Unchanged since last update | Overwritten with the latest template | +| Locally modified | Skipped. A `.new` file is written alongside for you to review | +| Missing from project | Created | + +When files are skipped, you'll see instructions to compare and merge: + +``` +diff app_src/Dockerfile app_src/Dockerfile.new +``` + +### Options + +```bash +idea-app update [--dry-run] [--force] +``` + +- `--dry-run`: show what would change without writing anything +- `--force`: overwrite all files, including ones you've modified locally + +## Other commands + +### smoke-test + +Build and health-check the production Docker image: + +```bash +idea-app smoke-test # build + health check +idea-app smoke-test --build-only # just build, skip health check +idea-app smoke-test --wait # keep running after health check, press Enter to stop +``` + +### provide-role + +Provide AWS credentials to the dev container by assuming the configured IAM role: + +```bash +idea-app provide-role # assume role from [tool.webapp.dev] +idea-app provide-role --use-profile # pass through current AWS profile instead +idea-app provide-role --duration 7200 # session duration in seconds (default: 3600) +``` + +Configure the role ARN in your project's `pyproject.toml`: + +```toml +[tool.webapp.dev] +aws_role_arn = "arn:aws:iam::123456789012:role/your-dev-role" +aws_region = "eu-west-2" +``` + +## Development + +```bash +# Clone and install dev dependencies +git clone git@github.com:co-cddo/gds-idea-app-kit.git +cd gds-idea-app-kit +uv sync + +# Run unit tests +uv run pytest + +# Run integration tests (requires CDK and network access) +uv run pytest -m integration + +# Run all tests +uv run pytest -m "" + +# Lint and format +uv run ruff check src/ tests/ +uv run ruff format src/ tests/ +``` diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..f4c0646 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,57 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "gds-idea-app-kit" +version = "0.1.0" +description = "CLI tool for scaffolding and maintaining GDS IDEA web apps on AWS" +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ + "click>=8.0", + "tomlkit>=0.12.0", + "boto3>=1.26.0", +] + +[project.scripts] +idea-app = "gds_idea_app_kit.cli:cli" + +[tool.hatch.build.targets.wheel] +packages = ["src/gds_idea_app_kit"] + +[tool.hatch.build] +include = [ + "src/gds_idea_app_kit/**/*.py", + "src/gds_idea_app_kit/templates/**", +] + +[tool.uv] +dev-dependencies = [ + "pytest>=8.0", + "ruff>=0.8.0", +] + +[tool.ruff] +line-length = 100 +target-version = "py311" +extend-exclude = ["src/gds_idea_app_kit/templates/**/*.template"] + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "N", # pep8-naming + "UP", # pyupgrade +] + +[tool.ruff.lint.isort] +known-first-party = ["gds_idea_app_kit"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +markers = [ + "integration: tests that require Docker (deselect with '-m \"not integration\"')", +] diff --git a/src/gds_idea_app_kit/__init__.py b/src/gds_idea_app_kit/__init__.py new file mode 100644 index 0000000..eaaa3d6 --- /dev/null +++ b/src/gds_idea_app_kit/__init__.py @@ -0,0 +1,14 @@ +"""GDS IDEA App Kit - CLI tool for scaffolding and maintaining web apps on AWS.""" + +from importlib.metadata import version + +__version__ = version("gds-idea-app-kit") + +# Default Python version for new projects. Update this when a new stable CPython is released. +DEFAULT_PYTHON_VERSION = "3.13" + +# GitHub org used in printed instructions for repo creation. +GITHUB_ORG = "co-cddo" + +# Prefix applied to all generated project directories: gds-idea-app-{name} +REPO_PREFIX = "gds-idea-app" diff --git a/src/gds_idea_app_kit/cli.py b/src/gds_idea_app_kit/cli.py new file mode 100644 index 0000000..a4b30bf --- /dev/null +++ b/src/gds_idea_app_kit/cli.py @@ -0,0 +1,72 @@ +"""CLI entry point for idea-app.""" + +import click + +from gds_idea_app_kit import DEFAULT_PYTHON_VERSION, __version__ + + +@click.group() +@click.version_option(version=__version__, prog_name="idea-app") +def cli(): + """GDS IDEA App Kit - scaffold and maintain web apps on AWS.""" + + +@cli.command() +@click.argument("framework", type=click.Choice(["streamlit", "dash", "fastapi"])) +@click.argument("app_name") +@click.option( + "--python", + "python_version", + default=DEFAULT_PYTHON_VERSION, + show_default=True, + help="Python version for the project.", +) +def init(framework: str, app_name: str, python_version: str): + """Scaffold a new project: idea-app init .""" + from gds_idea_app_kit.init import run_init + + run_init(framework=framework, app_name=app_name, python_version=python_version) + + +@cli.command() +@click.option("--dry-run", is_flag=True, help="Show what would change without applying.") +@click.option("--force", is_flag=True, help="Overwrite locally modified files.") +def update(dry_run: bool, force: bool): + """Update tool-owned files in an existing project.""" + from gds_idea_app_kit.update import run_update + + run_update(dry_run=dry_run, force=force) + + +@cli.command("smoke-test") +@click.option("--build-only", is_flag=True, help="Only build the Docker image, skip health check.") +@click.option("--wait", is_flag=True, help="Keep container running until Enter is pressed.") +def smoke_test(build_only: bool, wait: bool): + """Build and health-check the production Docker image.""" + from gds_idea_app_kit.smoke_test import run_smoke_test + + run_smoke_test(build_only=build_only, wait=wait) + + +@cli.command("provide-role") +@click.option("--use-profile", is_flag=True, help="Use current AWS profile directly.") +@click.option( + "--duration", + type=int, + default=3600, + show_default=True, + help="Session duration in seconds for role assumption.", +) +def provide_role(use_profile: bool, duration: int): + """Provide AWS credentials to the dev container.""" + from gds_idea_app_kit.provide_role import run_provide_role + + run_provide_role(use_profile=use_profile, duration=duration) + + +@cli.command() +def migrate(): + """Migrate an existing project from the old template to idea-app.""" + from gds_idea_app_kit.migrate import run_migrate + + run_migrate() diff --git a/src/gds_idea_app_kit/init.py b/src/gds_idea_app_kit/init.py new file mode 100644 index 0000000..88fca3d --- /dev/null +++ b/src/gds_idea_app_kit/init.py @@ -0,0 +1,352 @@ +"""Implementation of the init command. + +Scaffolds a new project by running cdk init, uv init, copying template files, +installing dependencies, and making the initial commit. +""" + +import re +import shutil +import subprocess +import sys +from importlib.resources import files +from pathlib import Path + +import click +import tomlkit + +from gds_idea_app_kit import ( + GITHUB_ORG, + REPO_PREFIX, + __version__, +) +from gds_idea_app_kit.manifest import build_manifest, write_manifest + + +def _sanitize_app_name(name: str) -> str: + """Sanitize and validate an app name for use as a DNS subdomain label. + + The name will become part of a domain: {name}.gds-idea.click + + Args: + name: The raw app name from the user. + + Returns: + The cleaned app name. + + Raises: + click.BadParameter: If the name is invalid. + """ + # Strip the repo prefix if the user accidentally included it + prefix = f"{REPO_PREFIX}-" + if name.startswith(prefix): + name = name[len(prefix) :] + + # Lowercase + name = name.lower() + + # Validate DNS label rules + if not name: + raise click.BadParameter("App name cannot be empty.") + + if len(name) > 63: + raise click.BadParameter("App name must be 63 characters or fewer (DNS label limit).") + + if not re.match(r"^[a-z0-9]([a-z0-9-]*[a-z0-9])?$", name): + raise click.BadParameter( + "App name must contain only lowercase letters, numbers, and hyphens, " + "and must start and end with a letter or number." + ) + + if "--" in name: + raise click.BadParameter("App name must not contain consecutive hyphens (--).") + + if name.isdigit(): + raise click.BadParameter("App name must not be purely numeric.") + + return name + + +def _get_templates_dir() -> Path: + """Get the path to the bundled templates directory.""" + return Path(str(files("gds_idea_app_kit") / "templates")) + + +def _apply_template_vars(content: str, variables: dict[str, str]) -> str: + """Apply template variable substitution to content. + + Replaces {{key}} with value for each entry in variables. + + Args: + content: The template content with {{placeholders}}. + variables: Mapping of placeholder names to values. + + Returns: + Content with all placeholders replaced. + """ + for key, value in variables.items(): + content = content.replace(f"{{{{{key}}}}}", value) + return content + + +def _copy_template(src: Path, dest: Path, variables: dict[str, str] | None = None) -> None: + """Copy a template file to a destination, optionally applying variable substitution. + + Args: + src: Path to the source template file. + dest: Path to the destination file. + variables: Optional mapping of placeholder names to values. + """ + dest.parent.mkdir(parents=True, exist_ok=True) + content = src.read_text() + if variables: + content = _apply_template_vars(content, variables) + dest.write_text(content) + + +def _run_command( + cmd: list[str], + cwd: Path, + project_dir: Path | None = None, +) -> subprocess.CompletedProcess: + """Run a subprocess command with error handling. + + Args: + cmd: The command and arguments to run. + cwd: Working directory for the command. + project_dir: The project directory (for cleanup message on failure). + If not provided, uses cwd. + + Returns: + The completed process result. + """ + cleanup_dir = project_dir or cwd + try: + return subprocess.run(cmd, cwd=cwd, check=True, capture_output=True, text=True) + except FileNotFoundError: + if cmd[0] == "cdk": + click.echo("Error: 'cdk' is not installed.", err=True) + click.echo("", err=True) + click.echo("Install it with one of:", err=True) + click.echo(" npm install -g aws-cdk", err=True) + click.echo(" brew install aws-cdk", err=True) + else: + click.echo(f"Error: '{cmd[0]}' is not installed.", err=True) + sys.exit(1) + except subprocess.CalledProcessError as e: + click.echo(f"Error running: {' '.join(cmd)}", err=True) + if e.stderr: + click.echo(e.stderr, err=True) + click.echo("", err=True) + click.echo("To clean up the failed project:", err=True) + click.echo(f" rm -rf {cleanup_dir}", err=True) + sys.exit(1) + + +def _delete_cdk_artifacts(project_dir: Path) -> None: + """Delete files generated by cdk init that we don't need. + + Args: + project_dir: The project root directory. + """ + # Files to delete + for name in ("requirements.txt", "requirements-dev.txt", "source.bat", "README.md"): + path = project_dir / name + if path.exists(): + path.unlink() + + # CDK generates a stack module directory named after the project dir. + # e.g. gds-idea-app-foo → gds_idea_app_foo/gds_idea_app_foo_stack.py + # We replace it with our own app.py, so delete the whole thing. + dir_name = project_dir.name.replace("-", "_") + stack_module = project_dir / dir_name + if stack_module.is_dir(): + shutil.rmtree(stack_module) + + # CDK's generated app.py imports the stack module above -- delete it too. + cdk_app = project_dir / "app.py" + if cdk_app.exists(): + cdk_app.unlink() + + # CDK's generated tests/ directory + tests_dir = project_dir / "tests" + if tests_dir.is_dir(): + shutil.rmtree(tests_dir) + + +def _write_webapp_config(project_dir: Path, app_name: str, framework: str) -> None: + """Write [tool.webapp] section to pyproject.toml for AppConfig.from_pyproject(). + + Args: + project_dir: The project root directory. + app_name: The application name. + framework: The web framework. + """ + pyproject_path = project_dir / "pyproject.toml" + with open(pyproject_path) as f: + config = tomlkit.load(f) + + if "tool" not in config: + config["tool"] = {} + + webapp = tomlkit.table() + webapp.add("app_name", app_name) + webapp.add("framework", framework) + config["tool"]["webapp"] = webapp + + with open(pyproject_path, "w") as f: + tomlkit.dump(config, f) + + +def run_init(framework: str, app_name: str, python_version: str) -> None: + """Scaffold a new project. + + Creates a fully configured CDK + web app project with the given framework. + The project directory will be named gds-idea-app-{app_name}. + + Args: + framework: The web framework (streamlit, dash, fastapi). + app_name: Name for the application. + python_version: Python version for the project. + """ + # -- Validate inputs -- + app_name = _sanitize_app_name(app_name) + repo_name = f"{REPO_PREFIX}-{app_name}" + project_dir = Path.cwd() / repo_name + + if project_dir.exists(): + click.echo(f"Error: Directory already exists: {project_dir}", err=True) + sys.exit(1) + + click.echo(f"Scaffolding {framework} app: {app_name}") + click.echo(f" Directory: {repo_name}/") + click.echo(f" Python: {python_version}") + click.echo() + + # -- Create directory and run cdk init (must be first, needs empty dir) -- + project_dir.mkdir() + click.echo("Running cdk init...") + _run_command( + ["cdk", "init", "app", "--language", "python", "--generate-only"], + cwd=project_dir, + project_dir=project_dir, + ) + + # -- Run uv init on top of cdk output -- + click.echo("Running uv init...") + _run_command(["uv", "init", "--no-workspace"], cwd=project_dir, project_dir=project_dir) + + # -- Clean up CDK artifacts we don't need -- + click.echo("Cleaning up CDK artifacts...") + _delete_cdk_artifacts(project_dir) + + # -- Prepare template variables -- + python_version_nodot = python_version.replace(".", "") + template_vars = { + "app_name": app_name, + "python_version": python_version, + "python_version_nodot": python_version_nodot, + } + templates = _get_templates_dir() + + # -- Copy app.py (CDK entry point) -- + click.echo("Copying template files...") + _copy_template(templates / "common" / "app.py", project_dir / "app.py") + + # -- Copy framework files into app_src/ -- + app_src = project_dir / "app_src" + app_src.mkdir(exist_ok=True) + + # Framework app file (e.g. streamlit_app.py) + framework_app = f"{framework}_app.py" + _copy_template(templates / framework / framework_app, app_src / framework_app) + + # Dockerfile (has template vars for python version) + _copy_template( + templates / framework / "Dockerfile", + app_src / "Dockerfile", + variables=template_vars, + ) + + # App pyproject.toml (from .toml.template with substitution) + _copy_template( + templates / framework / "pyproject.toml.template", + app_src / "pyproject.toml", + variables=template_vars, + ) + + # -- Copy .devcontainer/ files -- + _copy_template( + templates / "common" / "devcontainer.json", + project_dir / ".devcontainer" / "devcontainer.json", + ) + _copy_template( + templates / "common" / "docker-compose.yml", + project_dir / ".devcontainer" / "docker-compose.yml", + ) + + # -- Copy dev_mocks/ -- + dev_mocks_src = templates / "dev_mocks" + for mock_file in dev_mocks_src.iterdir(): + if mock_file.is_file(): + _copy_template(mock_file, project_dir / "dev_mocks" / mock_file.name) + + # -- Append to .gitignore -- + gitignore = project_dir / ".gitignore" + extra = (templates / "common" / "gitignore-extra").read_text() + with open(gitignore, "a") as f: + f.write("\n") + f.write(extra) + + # -- Install CDK dependencies -- + click.echo("Installing CDK dependencies...") + _run_command( + [ + "uv", + "add", + "aws-cdk-lib", + "constructs", + "gds-idea-cdk-constructs @ git+ssh://git@github.com/co-cddo/gds-idea-cdk-constructs.git", + ], + cwd=project_dir, + project_dir=project_dir, + ) + + # -- Write [tool.webapp] config for AppConfig.from_pyproject() -- + click.echo("Writing project configuration...") + _write_webapp_config(project_dir, app_name, framework) + + # -- Build and write manifest -- + manifest = build_manifest( + framework=framework, + app_name=app_name, + tool_version=__version__, + project_dir=project_dir, + ) + write_manifest(project_dir, manifest) + + # -- Sync dependencies -- + click.echo("Syncing dependencies...") + _run_command(["uv", "sync"], cwd=project_dir, project_dir=project_dir) + + # -- Initial git commit -- + click.echo("Creating initial commit...") + _run_command(["git", "add", "."], cwd=project_dir, project_dir=project_dir) + _run_command( + ["git", "commit", "-m", f"Initial scaffold ({framework}, Python {python_version})"], + cwd=project_dir, + project_dir=project_dir, + ) + + # -- Print next steps -- + click.echo() + click.echo(f"Project created: {repo_name}/") + click.echo() + click.echo("Next steps:") + click.echo(f" cd {repo_name}") + click.echo() + click.echo(" # Create the GitHub repo (requires gh CLI):") + click.echo(f" gh repo create {GITHUB_ORG}/{repo_name} --private --source . --push") + click.echo() + click.echo(" # Or add a remote manually:") + click.echo(f" git remote add origin git@github.com:{GITHUB_ORG}/{repo_name}.git") + click.echo(" git push -u origin main") diff --git a/src/gds_idea_app_kit/manifest.py b/src/gds_idea_app_kit/manifest.py new file mode 100644 index 0000000..7e49c6d --- /dev/null +++ b/src/gds_idea_app_kit/manifest.py @@ -0,0 +1,135 @@ +"""Manifest management for tracking tool-owned files in [tool.gds-idea-app-kit]. + +The manifest lives in pyproject.toml under the [tool.gds-idea-app-kit] section and tracks: +- Project metadata (framework, app_name, tool_version) +- SHA256 hashes of tool-owned files (for change detection during updates) +""" + +import hashlib +from pathlib import Path + +import tomlkit + +# Key used in pyproject.toml [tool.*] section +MANIFEST_KEY = "gds-idea-app-kit" + +# Files that `update` manages, keyed by source location in the templates directory. +# The dict maps template source path -> destination path in the project. +TOOL_OWNED_FILES = { + "common/devcontainer.json": ".devcontainer/devcontainer.json", + "common/docker-compose.yml": ".devcontainer/docker-compose.yml", + "dev_mocks/dev_mock_authoriser.json": "dev_mocks/dev_mock_authoriser.json", + "dev_mocks/dev_mock_user.json": "dev_mocks/dev_mock_user.json", +} + +# Framework-specific files that `update` manages. +# The framework name is substituted at runtime. +FRAMEWORK_OWNED_FILES = { + "Dockerfile": "app_src/Dockerfile", +} + + +def hash_file(path: Path) -> str: + """Compute SHA256 hash of a file. + + Args: + path: Path to the file to hash. + + Returns: + Hash string in the format "sha256:". + """ + content = path.read_bytes() + digest = hashlib.sha256(content).hexdigest() + return f"sha256:{digest}" + + +def get_tracked_files(framework: str) -> dict[str, str]: + """Get the full mapping of template source -> project destination for a framework. + + Args: + framework: The framework name (streamlit, dash, fastapi). + + Returns: + Dict mapping template source paths to project destination paths. + """ + files = dict(TOOL_OWNED_FILES) + for template_name, dest_path in FRAMEWORK_OWNED_FILES.items(): + files[f"{framework}/{template_name}"] = dest_path + return files + + +def read_manifest(project_dir: Path) -> dict: + """Read [tool.gds-idea-app-kit] from pyproject.toml. + + Args: + project_dir: Root directory of the project. + + Returns: + The manifest dict, or empty dict if the section doesn't exist. + """ + pyproject_path = project_dir / "pyproject.toml" + if not pyproject_path.exists(): + return {} + + with open(pyproject_path) as f: + config = tomlkit.load(f) + + return dict(config.get("tool", {}).get(MANIFEST_KEY, {})) + + +def write_manifest(project_dir: Path, manifest: dict) -> None: + """Write/update [tool.gds-idea-app-kit] in pyproject.toml, preserving other content. + + Args: + project_dir: Root directory of the project. + manifest: The manifest dict to write. + """ + pyproject_path = project_dir / "pyproject.toml" + + with open(pyproject_path) as f: + config = tomlkit.load(f) + + # Ensure [tool] section exists + if "tool" not in config: + config["tool"] = {} + + # Write the manifest section + config["tool"][MANIFEST_KEY] = manifest + + with open(pyproject_path, "w") as f: + tomlkit.dump(config, f) + + +def build_manifest( + framework: str, + app_name: str, + tool_version: str, + project_dir: Path, +) -> dict: + """Build a manifest dict by hashing the tracked files in project_dir. + + Args: + framework: The framework name (streamlit, dash, fastapi). + app_name: The application name. + tool_version: The version of gds-idea-app-kit that generated the project. + project_dir: Root directory of the project. + + Returns: + Complete manifest dict ready to write to pyproject.toml. + """ + tracked = get_tracked_files(framework) + + file_hashes = {} + for _template_src, dest_path in sorted(tracked.items()): + full_path = project_dir / dest_path + if full_path.exists(): + file_hashes[dest_path] = hash_file(full_path) + + manifest = { + "framework": framework, + "app_name": app_name, + "tool_version": tool_version, + "files": file_hashes, + } + + return manifest diff --git a/src/gds_idea_app_kit/migrate.py b/src/gds_idea_app_kit/migrate.py new file mode 100644 index 0000000..665ed0f --- /dev/null +++ b/src/gds_idea_app_kit/migrate.py @@ -0,0 +1,225 @@ +"""Implementation of the migrate command. + +Converts an existing project created from the old dumper template to work with +idea-app. Creates the [tool.gds-idea-app-kit] manifest from the current state +of tracked files, removes old template/ directory and entry points, and +optionally updates files to the latest templates. + +Usage: + idea-app migrate # interactive, run from project root +""" + +import re +import shutil +import sys +import tomllib +from pathlib import Path + +import click +import tomlkit + +from gds_idea_app_kit import __version__ +from gds_idea_app_kit.manifest import build_manifest, read_manifest, write_manifest +from gds_idea_app_kit.update import run_update + + +def _detect_python_version(project_dir: Path) -> str: + """Detect the Python version from project files. + + Checks the Dockerfile for a FROM python:X.Y line first, then falls back + to requires-python in app_src/pyproject.toml. + + Args: + project_dir: The project root directory. + + Returns: + The Python version string (e.g. "3.13"). + """ + # Try Dockerfile first: FROM python:X.Y-slim + dockerfile = project_dir / "app_src" / "Dockerfile" + if dockerfile.exists(): + match = re.search(r"FROM python:(\d+\.\d+)", dockerfile.read_text()) + if match: + return match.group(1) + + # Try app_src/pyproject.toml: requires-python = ">=X.Y" + app_pyproject = project_dir / "app_src" / "pyproject.toml" + if app_pyproject.exists(): + with open(app_pyproject, "rb") as f: + config = tomllib.load(f) + requires = config.get("project", {}).get("requires-python", "") + match = re.search(r"(\d+\.\d+)", requires) + if match: + return match.group(1) + + return "3.13" + + +def _read_webapp_config(project_dir: Path) -> dict[str, str]: + """Read framework and app_name from [tool.webapp] in pyproject.toml. + + Args: + project_dir: The project root directory. + + Returns: + Dict with "framework" and "app_name". + """ + pyproject_path = project_dir / "pyproject.toml" + + if not pyproject_path.exists(): + click.echo("Error: No pyproject.toml found. Are you in a project root?", err=True) + sys.exit(1) + + with open(pyproject_path, "rb") as f: + config = tomllib.load(f) + + webapp = config.get("tool", {}).get("webapp", {}) + framework = webapp.get("framework", "") + app_name = webapp.get("app_name", "") + + if not framework or not app_name: + click.echo("Error: No [tool.webapp] section with framework and app_name found.", err=True) + click.echo(" This doesn't look like a project created from the template.", err=True) + sys.exit(1) + + return {"framework": framework, "app_name": app_name} + + +def _remove_old_config(project_dir: Path) -> None: + """Remove old template entry points and build config from pyproject.toml. + + Removes: + - [project.scripts] entries (configure, smoke_test, provide_role) + - [build-system] section + - [tool.uv.build-backend] section + - Sets package = false in [tool.uv] + + Preserves all other content. + + Args: + project_dir: The project root directory. + """ + pyproject_path = project_dir / "pyproject.toml" + with open(pyproject_path) as f: + config = tomlkit.load(f) + + # Remove [build-system] + if "build-system" in config: + del config["build-system"] + + # Remove [project.scripts] + if "project" in config and "scripts" in config["project"]: + del config["project"]["scripts"] + + # Remove [tool.uv.build-backend] and set package = false + if "tool" in config and "uv" in config["tool"]: + uv_config = config["tool"]["uv"] + if "build-backend" in uv_config: + del uv_config["build-backend"] + uv_config["package"] = False + + with open(pyproject_path, "w") as f: + tomlkit.dump(config, f) + + +def _remove_template_dir(project_dir: Path) -> None: + """Remove the old template/ directory if it exists. + + Args: + project_dir: The project root directory. + """ + template_dir = project_dir / "template" + if template_dir.is_dir(): + shutil.rmtree(template_dir) + + +def run_migrate() -> None: + """Migrate an existing project to use idea-app. + + Interactive command that: + 1. Reads existing [tool.webapp] config + 2. Builds a manifest from current tracked files + 3. Removes old template/ directory and entry points + 4. Optionally runs update to get latest template files + """ + project_dir = Path.cwd() + pyproject_path = project_dir / "pyproject.toml" + + # -- Pre-flight checks -- + if not pyproject_path.exists(): + click.echo("Error: No pyproject.toml found. Are you in a project root?", err=True) + sys.exit(1) + + manifest = read_manifest(project_dir) + if manifest: + click.echo("This project has already been migrated.", err=True) + click.echo(" Use 'idea-app update' instead.", err=True) + sys.exit(1) + + webapp_config = _read_webapp_config(project_dir) + framework = webapp_config["framework"] + app_name = webapp_config["app_name"] + python_version = _detect_python_version(project_dir) + + # -- Summary -- + click.echo(f"Migrating project: {app_name} ({framework})") + click.echo(f" Python version: {python_version}") + click.echo() + click.echo("This will:") + click.echo(" - Add [tool.gds-idea-app-kit] manifest to pyproject.toml") + click.echo(" - Remove old template/ directory and entry points") + click.echo(" - Set package = false in [tool.uv]") + click.echo(" - Remove [build-system]") + click.echo() + click.echo("Recommendation: run this on a clean branch.") + click.echo(" git checkout -b migrate-to-idea-app") + click.echo() + + if not click.confirm("Continue?", default=False): + click.echo("Aborted.") + return + + # -- Execute migration -- + click.echo() + click.echo("Building manifest from current files...") + new_manifest = build_manifest( + framework=framework, + app_name=app_name, + tool_version=__version__, + project_dir=project_dir, + ) + new_manifest["python_version"] = python_version + write_manifest(project_dir, new_manifest) + + click.echo("Removing old template configuration...") + _remove_old_config(project_dir) + + template_dir = project_dir / "template" + if template_dir.is_dir(): + click.echo("Removing template/ directory...") + _remove_template_dir(project_dir) + + click.echo("Migration complete.") + click.echo() + + # -- Offer update -- + if not click.confirm("Would you like to update to the latest template files?", default=True): + click.echo("Run 'idea-app update' when ready.") + return + + click.echo() + run_update(dry_run=True) + + click.echo() + if not click.confirm("Apply these changes?", default=True): + click.echo("Run 'idea-app update' when ready.") + return + + click.echo() + run_update(dry_run=False) + + # -- Next steps -- + click.echo() + click.echo("Next steps:") + click.echo(" 1. Review changes: git diff") + click.echo(' 2. Commit: git add -A && git commit -m "Migrate to idea-app"') diff --git a/src/gds_idea_app_kit/provide_role.py b/src/gds_idea_app_kit/provide_role.py new file mode 100644 index 0000000..cee2c04 --- /dev/null +++ b/src/gds_idea_app_kit/provide_role.py @@ -0,0 +1,305 @@ +"""Implementation of the provide-role command. + +Runs on the host machine to provide AWS credentials to the dev container by +writing temporary credentials to .aws-dev/, which is volume-mounted into the +container via docker-compose. + +Two modes: +1. Role assumption (default): assumes the container role from your current + profile credentials via STS. Requires aws_role_arn in [tool.webapp.dev]. +2. Pass-through: extracts and writes your current profile credentials directly. + Used when --use-profile is given or no aws_role_arn is configured. + +Usage: + AWS_PROFILE=aws-dev idea-app provide-role + AWS_PROFILE=aws-dev idea-app provide-role --use-profile + AWS_PROFILE=aws-dev idea-app provide-role --duration 7200 +""" + +import os +import sys +import tomllib +from pathlib import Path + +import boto3 +import click +from botocore.exceptions import ClientError, NoCredentialsError + +AWS_DEV_DIR = ".aws-dev" +CREDENTIALS_FILE = "credentials" +CONFIG_FILE = "config" +DEFAULT_REGION = "eu-west-2" + + +def _check_aws_profile() -> str: + """Check that AWS_PROFILE is set and return the profile name. + + Returns: + The AWS profile name. + """ + profile = os.environ.get("AWS_PROFILE", "") + if not profile: + click.echo("Error: AWS_PROFILE environment variable is not set.", err=True) + click.echo( + " Run: export AWS_PROFILE= && idea-app provide-role", + err=True, + ) + sys.exit(1) + return profile + + +def _get_role_config(project_dir: Path) -> dict[str, str]: + """Read AWS role configuration from [tool.webapp.dev] in pyproject.toml. + + Args: + project_dir: The project root directory. + + Returns: + Dict with "role_arn" (empty string if not configured) and "region". + """ + pyproject_path = project_dir / "pyproject.toml" + + if not pyproject_path.exists(): + click.echo("Error: No pyproject.toml found. Are you in a project root?", err=True) + sys.exit(1) + + with open(pyproject_path, "rb") as f: + config = tomllib.load(f) + + dev_config = config.get("tool", {}).get("webapp", {}).get("dev", {}) + + return { + "role_arn": dev_config.get("aws_role_arn", ""), + "region": dev_config.get("aws_region", DEFAULT_REGION), + } + + +def _select_mode(role_arn: str, use_profile: bool) -> tuple[bool, str]: + """Determine whether to use pass-through or role assumption mode. + + Args: + role_arn: The role ARN from config (empty string if not configured). + use_profile: Whether --use-profile flag was given. + + Returns: + Tuple of (use_pass_through, reason). + """ + if use_profile: + return True, "--use-profile flag" + if role_arn: + return False, "aws_role_arn configured in pyproject.toml" + return True, "no aws_role_arn in pyproject.toml" + + +def _get_current_identity(session: boto3.Session) -> dict: + """Get current AWS identity to verify credentials are active. + + Args: + session: A boto3 session. + + Returns: + The caller identity response dict. + """ + try: + sts = session.client("sts") + return sts.get_caller_identity() + except NoCredentialsError as e: + raise RuntimeError("No AWS credentials found.") from e + except ClientError as e: + error_msg = e.response.get("Error", {}).get("Message", str(e)) + raise RuntimeError(f"Failed to verify AWS credentials: {error_msg}") from e + + +def _assume_role(session: boto3.Session, role_arn: str, duration: int) -> dict: + """Assume the specified AWS role from current credentials. + + Args: + session: A boto3 session. + role_arn: The ARN of the role to assume. + duration: Session duration in seconds. + + Returns: + The STS assume_role response dict. + """ + try: + sts = session.client("sts") + return sts.assume_role( + RoleArn=role_arn, + RoleSessionName="dev-container", + DurationSeconds=duration, + ) + except ClientError as e: + error_code = e.response.get("Error", {}).get("Code", "Unknown") + error_msg = e.response.get("Error", {}).get("Message", str(e)) + raise RuntimeError(f"Failed to assume role ({error_code}): {error_msg}") from e + + +def _get_session_credentials(session: boto3.Session) -> dict: + """Extract credentials from the current boto3 session. + + Returns credentials in the same format as STS responses for consistency + with _assume_role output. + + Args: + session: A boto3 session. + + Returns: + Dict with AccessKeyId, SecretAccessKey, SessionToken, and Expiration. + """ + try: + credentials = session.get_credentials() + frozen = credentials.get_frozen_credentials() + + creds = { + "AccessKeyId": frozen.access_key, + "SecretAccessKey": frozen.secret_key, + "SessionToken": frozen.token, + } + + # Try to get expiration from the credential provider + expiration = None + if hasattr(credentials, "_expiry_time"): + expiration = credentials._expiry_time + + creds["Expiration"] = expiration + return creds + except Exception as e: + raise RuntimeError(f"Failed to extract session credentials: {e}") from e + + +def _write_credentials( + project_dir: Path, + creds: dict, + region: str, + source_description: str, +) -> None: + """Write credentials to .aws-dev/ in standard AWS format. + + Creates the .aws-dev directory if it doesn't exist. Writes both a + credentials file and a config file. + + Args: + project_dir: The project root directory. + creds: Credentials dict with AccessKeyId, SecretAccessKey, SessionToken, + and optionally Expiration. + region: AWS region for the config file. + source_description: Description of credential source for the comment header. + """ + aws_dev_dir = project_dir / AWS_DEV_DIR + aws_dev_dir.mkdir(exist_ok=True) + + expiration = creds.get("Expiration") + if expiration: + expiration_line = f"# Expires: {expiration}" + else: + expiration_line = "# Expires: unknown" + + credentials_content = ( + f"# Auto-generated by idea-app provide-role\n" + f"# {source_description}\n" + f"{expiration_line}\n" + f"[default]\n" + f"aws_access_key_id = {creds['AccessKeyId']}\n" + f"aws_secret_access_key = {creds['SecretAccessKey']}\n" + f"aws_session_token = {creds['SessionToken']}\n" + ) + + config_content = f"[default]\nregion = {region}\noutput = json\n" + + (aws_dev_dir / CREDENTIALS_FILE).write_text(credentials_content) + (aws_dev_dir / CONFIG_FILE).write_text(config_content) + + +def _format_expiration(creds: dict) -> str: + """Format the expiration from a credentials dict for display. + + Args: + creds: Credentials dict with an optional Expiration key. + + Returns: + A human-readable expiration string. + """ + expiration = creds.get("Expiration") + return str(expiration) if expiration else "unknown" + + +def run_provide_role(use_profile: bool, duration: int) -> None: + """Provide AWS credentials to the dev container. + + Args: + use_profile: If True, use the current AWS profile directly. + duration: Session duration in seconds for role assumption. + """ + project_dir = Path.cwd() + + # -- Check prerequisites -- + profile_name = _check_aws_profile() + role_config = _get_role_config(project_dir) + role_arn = role_config["role_arn"] + region = role_config["region"] + + # -- Verify credentials -- + click.echo("Checking AWS credentials...") + click.echo(f" AWS_PROFILE: {profile_name}") + + session = boto3.Session() + try: + identity = _get_current_identity(session) + current_arn = identity.get("Arn", "") + click.echo(f" Current identity: {current_arn}") + except RuntimeError as e: + click.echo() + click.echo(f"Error: {e}", err=True) + click.echo( + f" Run 'aws sso login --profile {profile_name}' first.", + err=True, + ) + sys.exit(1) + + # -- Determine mode -- + use_pass_through, reason = _select_mode(role_arn, use_profile) + + click.echo() + if use_pass_through: + click.echo(f"Mode: Pass-through ({reason})") + click.echo(f" Using credentials from profile: {profile_name}") + else: + click.echo(f"Mode: Role assumption ({reason})") + click.echo(f" Role: {role_arn}") + click.echo(f" Duration: {duration}s ({duration / 3600:.1f} hours)") + + # -- Get credentials -- + click.echo() + if not use_pass_through: + click.echo("Assuming role...") + try: + response = _assume_role(session, role_arn, duration) + creds = response["Credentials"] + assumed_arn = response.get("AssumedRoleUser", {}).get("Arn", "") + source_description = f"Role: {role_arn}" + if assumed_arn: + click.echo(f" Assumed identity: {assumed_arn}") + except RuntimeError as e: + click.echo(f"Error: {e}", err=True) + click.echo( + " Check that your profile has permission to assume the container role.", + err=True, + ) + sys.exit(1) + else: + click.echo("Extracting session credentials...") + try: + creds = _get_session_credentials(session) + source_description = f"Source: AWS_PROFILE={profile_name} (pass-through)" + click.echo(f" Expires: {_format_expiration(creds)}") + except RuntimeError as e: + click.echo(f"Error: {e}", err=True) + sys.exit(1) + + # -- Write credentials -- + _write_credentials(project_dir, creds, region, source_description) + + click.echo() + click.echo(f"Credentials written to {AWS_DEV_DIR}/") + click.echo(f" Expires: {_format_expiration(creds)}") + click.echo(" No container restart needed - credentials update live.") diff --git a/src/gds_idea_app_kit/smoke_test.py b/src/gds_idea_app_kit/smoke_test.py new file mode 100644 index 0000000..3963b21 --- /dev/null +++ b/src/gds_idea_app_kit/smoke_test.py @@ -0,0 +1,242 @@ +"""Implementation of the smoke-test command. + +Builds the production Docker image using docker compose and optionally runs +a health check to verify the container starts and responds correctly. This +validates that a cdk deploy will succeed before pushing. + +Usage: + idea-app smoke-test # build, health check, teardown + idea-app smoke-test --build-only # build image only + idea-app smoke-test --wait # build, health check, keep running +""" + +import os +import subprocess +import sys +import time +import tomllib +import urllib.error +import urllib.request +from pathlib import Path + +import click + +COMPOSE_FILE = ".devcontainer/docker-compose.yml" +SERVICE_NAME = "app" +CONTAINER_PORT = 8080 +MAX_WAIT_SECONDS = 120 +POLL_INTERVAL_SECONDS = 2 + +HEALTH_PATHS: dict[str, str] = { + "streamlit": "/_stcore/health", + "dash": "/health", + "fastapi": "/health", +} + + +def _get_framework(project_dir: Path) -> str: + """Read the framework from [tool.webapp] in pyproject.toml. + + Args: + project_dir: The project root directory. + + Returns: + The framework name (e.g. "streamlit"). + """ + pyproject_path = project_dir / "pyproject.toml" + + if not pyproject_path.exists(): + click.echo("Error: No pyproject.toml found. Are you in a project root?", err=True) + sys.exit(1) + + with open(pyproject_path, "rb") as f: + config = tomllib.load(f) + + framework = config.get("tool", {}).get("webapp", {}).get("framework", "") + + if not framework: + click.echo("Error: No framework configured in [tool.webapp].", err=True) + click.echo(" This doesn't look like a project created by idea-app.", err=True) + sys.exit(1) + + return framework + + +def _get_health_path(framework: str) -> str: + """Get the health check URL path for a framework. + + Args: + framework: The framework name (e.g. "streamlit"). + + Returns: + The health check path (e.g. "/_stcore/health"). + """ + return HEALTH_PATHS.get(framework, "/health") + + +def _compose( + *args: str, + stream: bool = False, + check: bool = True, +) -> subprocess.CompletedProcess: + """Run a docker compose command targeting the production image. + + Args: + *args: Arguments to pass to docker compose (e.g. "build", "up", "-d"). + stream: If True, inherit stdout/stderr so output streams to terminal. + check: If True, raise CalledProcessError on non-zero exit. + + Returns: + The CompletedProcess result. + """ + cmd = ["docker", "compose", "-f", COMPOSE_FILE, *args] + + env = os.environ.copy() + env["DOCKER_TARGET"] = "production" + + if stream: + return subprocess.run(cmd, check=check, env=env) + else: + return subprocess.run(cmd, check=check, capture_output=True, text=True, env=env) + + +def _get_host_port() -> str: + """Get the host port mapped to the container's port 8080. + + Returns: + The host port as a string (e.g. "8080"). + """ + result = _compose("port", SERVICE_NAME, str(CONTAINER_PORT)) + # Output format: "0.0.0.0:8080" or ":::8080" + return result.stdout.strip().split(":")[-1] + + +def _check_health(url: str) -> bool: + """Check if a health endpoint responds with HTTP 200. + + Args: + url: The full URL to check (e.g. "http://localhost:8080/health"). + + Returns: + True if the endpoint responds with 200, False otherwise. + """ + try: + with urllib.request.urlopen(url, timeout=2) as response: + return response.status == 200 + except (urllib.error.URLError, urllib.error.HTTPError, TimeoutError, OSError): + return False + + +def _poll_health(url: str, timeout: int = MAX_WAIT_SECONDS) -> bool: + """Poll a health endpoint until it responds or the timeout expires. + + Prints dots while waiting. + + Args: + url: The health check URL. + timeout: Maximum seconds to wait. + + Returns: + True if the health check passed, False if it timed out. + """ + seconds_waited = 0 + while seconds_waited < timeout: + if _check_health(url): + click.echo() + return True + click.echo(".", nl=False) + time.sleep(POLL_INTERVAL_SECONDS) + seconds_waited += POLL_INTERVAL_SECONDS + + click.echo() + return False + + +def _show_failure_logs() -> None: + """Print container logs to help debug a health check failure.""" + click.echo() + click.echo("Container logs:") + _compose("logs", stream=True, check=False) + + +def _cleanup() -> None: + """Stop and remove containers.""" + click.echo() + click.echo("Cleaning up...") + _compose("down", check=False) + + +def run_smoke_test(build_only: bool, wait: bool = False) -> None: + """Build and health-check the production Docker image. + + Args: + build_only: If True, only build the image without running a container. + wait: If True, keep the container running after health check until Enter. + """ + project_dir = Path.cwd() + + # -- Validate configuration -- + click.echo("Loading configuration...") + framework = _get_framework(project_dir) + health_path = _get_health_path(framework) + click.echo(f" Framework: {framework}") + click.echo(f" Health check: {health_path}") + click.echo() + + # -- Check compose file exists -- + compose_path = project_dir / COMPOSE_FILE + if not compose_path.exists(): + click.echo(f"Error: {COMPOSE_FILE} not found.", err=True) + click.echo(" Run 'idea-app update' to restore missing files.", err=True) + sys.exit(1) + + # -- Build -- + click.echo("Building production image...") + try: + _compose("build", stream=True) + except subprocess.CalledProcessError: + click.echo("Error: Docker build failed.", err=True) + sys.exit(1) + except FileNotFoundError: + click.echo("Error: docker not found. Is Docker installed and running?", err=True) + sys.exit(1) + click.echo("Build complete.") + + if build_only: + return + + # -- Start, health check, teardown -- + container_started = False + try: + click.echo() + click.echo("Starting container...") + _compose("up", "-d") + container_started = True + + host_port = _get_host_port() + health_url = f"http://localhost:{host_port}{health_path}" + click.echo(f" Health check URL: {health_url}") + click.echo() + click.echo("Tip: To see container logs, run in another terminal:") + click.echo(f" docker compose -f {COMPOSE_FILE} logs -f") + click.echo() + + click.echo(f"Waiting for health check (up to {MAX_WAIT_SECONDS}s)...") + passed = _poll_health(health_url) + + if not passed: + click.echo("Health check failed. The container did not respond in time.", err=True) + _show_failure_logs() + sys.exit(1) + + click.echo("Health check passed.") + + if wait: + click.echo() + click.echo(f"Container running at http://localhost:{host_port}") + click.echo("Press Enter to stop and clean up...") + input() + + finally: + if container_started: + _cleanup() diff --git a/src/gds_idea_app_kit/templates/common/app.py b/src/gds_idea_app_kit/templates/common/app.py new file mode 100644 index 0000000..423653a --- /dev/null +++ b/src/gds_idea_app_kit/templates/common/app.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 +import os + +import aws_cdk as cdk +from aws_cdk import ( + Tags, +) +from gds_idea_cdk_constructs import AppConfig, DeploymentConfig +from gds_idea_cdk_constructs.web_app import AuthType, WebApp + +app = cdk.App() + +cdk_env = cdk.Environment( + account=os.environ["CDK_DEFAULT_ACCOUNT"], + region=os.environ["CDK_DEFAULT_REGION"], +) + +app_config = AppConfig.from_pyproject() +dep_config = DeploymentConfig(cdk_env) + +# +Tags.of(app).add("Environment", dep_config.environment.friendly_name) +Tags.of(app).add("ManagedBy", "cdk") +Tags.of(app).add("Repository", "TBA") +Tags.of(app).add("AppName", app_config.app_name) + + +stack = WebApp( + app, + deployment_config=dep_config, + app_config=app_config, + authentication=AuthType.INTERNAL_ACCESS, +) + +app.synth() diff --git a/src/gds_idea_app_kit/templates/common/devcontainer.json b/src/gds_idea_app_kit/templates/common/devcontainer.json new file mode 100644 index 0000000..ee39b28 --- /dev/null +++ b/src/gds_idea_app_kit/templates/common/devcontainer.json @@ -0,0 +1,60 @@ +{ + "name": "Web App Dev Container", + "dockerComposeFile": "docker-compose.yml", + "service": "app", + "workspaceFolder": "/app", + // ======================================================================== + // VS CODE CONFIGURATION - CUSTOMIZE AS NEEDED + // ======================================================================== + // This file contains VS Code-specific settings: + // - Extensions and editor settings + // - Dev container features (git, gh CLI, etc.) + // + // For runtime configuration (env vars, ports, volumes), + // edit docker-compose.yml instead. + // ======================================================================== + // Additional dev container features + // These are incompatible with our build method DO NOT add features. + // "features": { + // }, + // VS Code customizations (ADD YOUR EXTENSIONS HERE) + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python", + "ms-python.vscode-pylance", + "charliermarsh.ruff", + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ms-toolsai.jupyter" + ], + "settings": { + "python.defaultInterpreterPath": "/app/.venv/bin/python", + "python.terminal.activateEnvironment": true, + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff", + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.fixAll": "explicit", + "source.organizeImports": "explicit" + } + }, + "ruff.configuration": "/app/pyproject.toml" + } + } + }, + // Forward application port + "forwardPorts": [ + 8080 + ], + "portsAttributes": { + "8080": { + "label": "Application", + "onAutoForward": "notify" + } + }, + // Post-create command: sync dependencies + "postCreateCommand": "uv sync", + // Run as non-root user + "remoteUser": "appuser" +} diff --git a/src/gds_idea_app_kit/templates/common/docker-compose.yml b/src/gds_idea_app_kit/templates/common/docker-compose.yml new file mode 100644 index 0000000..9587f80 --- /dev/null +++ b/src/gds_idea_app_kit/templates/common/docker-compose.yml @@ -0,0 +1,43 @@ +# ============================================================================ +# DOCKER COMPOSE CONFIGURATION - SOURCE OF TRUTH +# ============================================================================ +# This file defines ALL runtime configuration (ports, volumes, env vars). +# Used by both: +# - VS Code dev containers (.devcontainer/devcontainer.json) +# - Smoke test script (idea-app smoke-test) +# +# For VS Code-specific settings (extensions, editor config), +# edit .devcontainer/devcontainer.json instead. +# ============================================================================ + +services: + app: + build: + context: .. + dockerfile: app_src/Dockerfile + target: ${DOCKER_TARGET:-development} # Use development by default, override with DOCKER_TARGET env var + + volumes: + # Mount app source for live editing and auto-reload + - ../app_src:/app + + # Mount dev mock files directory (can be empty) + - ../dev_mocks:/app/dev_mocks + + # Mount AWS credentials for container (generated by provide_role) + - ../.aws-dev:/home/appuser/.aws:ro + + # Persist UV cache across container rebuilds (vscode user, not root) + - uv-cache:/home/appuser/.cache/uv + + environment: + # Dev mode authentication (ADD YOUR ENV VARS HERE) + - COGNITO_AUTH_DEV_MODE=true + - COGNITO_AUTH_CONFIG_PATH=/app/dev_mocks/dev_mock_authoriser.json + - COGNITO_AUTH_DEV_CONFIG=/app/dev_mocks/dev_mock_user.json + + ports: + - "8080:8080" # Map host port 8080 to container port 8080 + +volumes: + uv-cache: diff --git a/src/gds_idea_app_kit/templates/common/gitignore-extra b/src/gds_idea_app_kit/templates/common/gitignore-extra new file mode 100644 index 0000000..be3e323 --- /dev/null +++ b/src/gds_idea_app_kit/templates/common/gitignore-extra @@ -0,0 +1,6 @@ +# AWS dev container credentials (generated by idea-app provide-role) +.aws-dev/* +!.aws-dev/README.md + +# Template review files (generated by idea-app update) +*.new diff --git a/src/gds_idea_app_kit/templates/dash/Dockerfile b/src/gds_idea_app_kit/templates/dash/Dockerfile new file mode 100644 index 0000000..ed72639 --- /dev/null +++ b/src/gds_idea_app_kit/templates/dash/Dockerfile @@ -0,0 +1,72 @@ +# ============================================================================ +# Multi-stage Dockerfile for Dash Application +# ============================================================================ +# Production build (default): docker build -t app . +# Dev container build: docker build --target development -t app:dev . +# ============================================================================ + +# Base stage - common setup +FROM python:{{python_version}}-slim AS base + +WORKDIR /app + +ARG USERNAME=appuser +ARG USER_UID=1000 +ARG USER_GID=$USER_UID + +# Install UV (pinned to major version) +COPY --from=ghcr.io/astral-sh/uv:0.9 /uv /usr/local/bin/uv + +# Create non-root user and set ownership of /app +RUN groupadd --gid $USER_GID $USERNAME \ + && useradd --uid $USER_UID --gid $USER_GID -m $USERNAME \ + && chown -R $USERNAME:$USERNAME /app + +# Copy dependency files with correct ownership +COPY --chown=$USERNAME:$USERNAME app_src/pyproject.toml ./ +COPY --chown=$USERNAME:$USERNAME app_src/uv.loc[k] ./ + +# Install minimal base packages (needed by both dev and prod) +# If your build fails with 'gcc not found' or similar compilation errors, +# uncomment build-essential below to add C/C++ compilers: +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ + git \ + # build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Switch to non-root user for dependency installation +USER $USERNAME + +# Install dependencies (as non-root user) +RUN uv sync + +# Copy application code +COPY --chown=$USERNAME:$USERNAME app_src/ . + +EXPOSE 8080 + +CMD ["uv", "run", "gunicorn", "dash_app:server", "-b", "0.0.0.0:8080", "--workers", "1"] + +# ============================================================================ +# Development stage - for dev containers (NOT DEFAULT) +# ============================================================================ +FROM base AS development + +# Switch back to root to install dev-only packages +USER root + +# Grant passwordless sudo for development flexibility +RUN apt-get update \ + && apt-get install -y --no-install-recommends sudo \ + && rm -rf /var/lib/apt/lists/* \ + && echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \ + && chmod 0440 /etc/sudoers.d/$USERNAME + +USER $USERNAME + +# ============================================================================ +# Production stage - secure, minimal, non-root (DEFAULT) +# ============================================================================ +FROM base AS production diff --git a/src/gds_idea_app_kit/templates/dash/dash_app.py b/src/gds_idea_app_kit/templates/dash/dash_app.py new file mode 100644 index 0000000..81f615d --- /dev/null +++ b/src/gds_idea_app_kit/templates/dash/dash_app.py @@ -0,0 +1,71 @@ +import json +import logging + +from cognito_auth.dash import DashAuth +from dash import Dash, Input, Output, dcc, html +from flask import jsonify, request + +# Configure logging - quiet noisy libraries +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) + +# Quiet noisy third-party loggers +logging.getLogger("watchdog").setLevel(logging.WARNING) +logging.getLogger("urllib3").setLevel(logging.WARNING) +logging.getLogger("botocore").setLevel(logging.WARNING) +logging.getLogger("boto3").setLevel(logging.WARNING) +logging.getLogger("werkzeug").setLevel(logging.WARNING) # Flask logger + +# Your app logger +logger = logging.getLogger(__name__) + +REDIRECT_URL = "https://gds-idea.click/401.html" + +app = Dash(__name__) + + +# create the health check endpoint for the ALB +@app.server.before_request +def maybe_handle_health(): + if request.path == "/health": + return jsonify({"status": "ok"}), 200 + + +auth = DashAuth() +auth.protect_app(app) # protects the entire app. + + +# Layout with dynamic content that will be populated by callback +app.layout = html.Div( + [ + html.H1("You are Authorised!"), + html.Div(id="user-info"), + # Hidden interval to trigger initial load + dcc.Interval(id="interval", interval=1000, n_intervals=0, max_intervals=1), + ] +) + + +# Callback to fetch and display user info within app context +@app.callback(Output("user-info", "children"), Input("interval", "n_intervals")) +def display_user_info(n): + user = auth.get_auth_user() + + return html.Div( + [ + html.P(f"Welcome {user.email}"), + html.H2("OIDC Claims:"), + html.Pre(json.dumps(user.oidc_claims, indent=2)), + html.H2("Access Claims:"), + html.Pre(json.dumps(user.access_claims, indent=2)), + ] + ) + + +# Expose server for gunicorn (production) +server = app.server + +if __name__ == "__main__": + # Development mode: run Flask dev server with auto-reload + app.run(debug=True, host="0.0.0.0", port=8080) diff --git a/src/gds_idea_app_kit/templates/dash/pyproject.toml.template b/src/gds_idea_app_kit/templates/dash/pyproject.toml.template new file mode 100644 index 0000000..98cfccc --- /dev/null +++ b/src/gds_idea_app_kit/templates/dash/pyproject.toml.template @@ -0,0 +1,27 @@ +[project] +name = "{{app_name}}" +version = "0.1.0" +description = "Dash app with Cognito authentication" +requires-python = ">={{python_version}}" +dependencies = [ + "cognito-auth[dash] @ git+https://github.com/co-cddo/gds-idea-app-auth.git", + "gunicorn>=21.0.0", +] + +[tool.ruff] +line-length = 100 +target-version = "py{{python_version_nodot}}" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "N", # pep8-naming + "UP", # pyupgrade +] +ignore = [] + +[tool.ruff.lint.isort] +known-first-party = ["{{app_name}}"] diff --git a/src/gds_idea_app_kit/templates/dev_mocks/dev_mock_authoriser.json b/src/gds_idea_app_kit/templates/dev_mocks/dev_mock_authoriser.json new file mode 100644 index 0000000..69072aa --- /dev/null +++ b/src/gds_idea_app_kit/templates/dev_mocks/dev_mock_authoriser.json @@ -0,0 +1,5 @@ +{ + "allowed_groups": ["gds-idea"], + "allowed_users": ["developer@example.com"], + "require_all": false +} diff --git a/src/gds_idea_app_kit/templates/dev_mocks/dev_mock_user.json b/src/gds_idea_app_kit/templates/dev_mocks/dev_mock_user.json new file mode 100644 index 0000000..8ee8429 --- /dev/null +++ b/src/gds_idea_app_kit/templates/dev_mocks/dev_mock_user.json @@ -0,0 +1,6 @@ +{ + "email": "developer@example.com", + "sub": "12345678-1234-1234-1234-123456789abc", + "username": "12345678-1234-1234-1234-123456789abc", + "groups": ["gds-idea"] +} diff --git a/src/gds_idea_app_kit/templates/fastapi/Dockerfile b/src/gds_idea_app_kit/templates/fastapi/Dockerfile new file mode 100644 index 0000000..dc76804 --- /dev/null +++ b/src/gds_idea_app_kit/templates/fastapi/Dockerfile @@ -0,0 +1,72 @@ +# ============================================================================ +# Multi-stage Dockerfile for FastAPI Application +# ============================================================================ +# Production build (default): docker build -t app . +# Dev container build: docker build --target development -t app:dev . +# ============================================================================ + +# Base stage - common setup +FROM python:{{python_version}}-slim AS base + +WORKDIR /app + +ARG USERNAME=appuser +ARG USER_UID=1000 +ARG USER_GID=$USER_UID + +# Install UV (pinned to major version) +COPY --from=ghcr.io/astral-sh/uv:0.9 /uv /usr/local/bin/uv + +# Create non-root user and set ownership of /app +RUN groupadd --gid $USER_GID $USERNAME \ + && useradd --uid $USER_UID --gid $USER_GID -m $USERNAME \ + && chown -R $USERNAME:$USERNAME /app + +# Copy dependency files with correct ownership +COPY --chown=$USERNAME:$USERNAME app_src/pyproject.toml ./ +COPY --chown=$USERNAME:$USERNAME app_src/uv.loc[k] ./ + +# Install minimal base packages (needed by both dev and prod) +# If your build fails with 'gcc not found' or similar compilation errors, +# uncomment build-essential below to add C/C++ compilers: +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ + git \ + # build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Switch to non-root user for dependency installation +USER $USERNAME + +# Install dependencies (as non-root user) +RUN uv sync + +# Copy application code +COPY --chown=$USERNAME:$USERNAME app_src/ . + +EXPOSE 8080 + +CMD ["uv", "run", "uvicorn", "fastapi_app:app", "--host", "0.0.0.0", "--port", "8080"] + +# ============================================================================ +# Development stage - for dev containers (NOT DEFAULT) +# ============================================================================ +FROM base AS development + +# Switch back to root to install dev-only packages +USER root + +# Grant passwordless sudo for development flexibility +RUN apt-get update \ + && apt-get install -y --no-install-recommends sudo \ + && rm -rf /var/lib/apt/lists/* \ + && echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \ + && chmod 0440 /etc/sudoers.d/$USERNAME + +USER $USERNAME + +# ============================================================================ +# Production stage - secure, minimal, non-root (DEFAULT) +# ============================================================================ +FROM base AS production diff --git a/src/gds_idea_app_kit/templates/fastapi/fastapi_app.py b/src/gds_idea_app_kit/templates/fastapi/fastapi_app.py new file mode 100644 index 0000000..7583e60 --- /dev/null +++ b/src/gds_idea_app_kit/templates/fastapi/fastapi_app.py @@ -0,0 +1,49 @@ +import logging + +from cognito_auth.fastapi import FastAPIAuth +from fastapi import FastAPI, Request + +# Configure logging - quiet noisy libraries +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) + +# Quiet noisy third-party loggers +logging.getLogger("watchdog").setLevel(logging.WARNING) +logging.getLogger("urllib3").setLevel(logging.WARNING) +logging.getLogger("botocore").setLevel(logging.WARNING) +logging.getLogger("boto3").setLevel(logging.WARNING) +logging.getLogger("uvicorn.access").setLevel(logging.WARNING) # Uvicorn access logs + +# Your app logger +logger = logging.getLogger(__name__) + +app = FastAPI() +auth = FastAPIAuth(app) + + +# Health check endpoint for ECS/ALB (unprotected) +@app.get("/health") +def health_check(): + return {"status": "healthy"} + + +# Main route - protected by app-wide auth middleware +@app.get("/") +def index(request: Request): + user = auth.get_current_user(request) + + return { + "message": "You are Authorised!", + "email": user.email, + "oidc_claims": user.oidc_claims, + "access_claims": user.access_claims, + } + + +# Additional example route - also automatically protected +@app.get("/api/user") +def get_user(request: Request): + user = auth.get_current_user(request) + + return {"email": user.email, "groups": user.groups if hasattr(user, "groups") else []} diff --git a/src/gds_idea_app_kit/templates/fastapi/pyproject.toml.template b/src/gds_idea_app_kit/templates/fastapi/pyproject.toml.template new file mode 100644 index 0000000..02e47a0 --- /dev/null +++ b/src/gds_idea_app_kit/templates/fastapi/pyproject.toml.template @@ -0,0 +1,27 @@ +[project] +name = "{{app_name}}" +version = "0.1.0" +description = "FastAPI app with Cognito authentication" +requires-python = ">={{python_version}}" +dependencies = [ + "cognito-auth[fastapi] @ git+https://github.com/co-cddo/gds-idea-app-auth.git", + "uvicorn[standard]>=0.27.0", +] + +[tool.ruff] +line-length = 100 +target-version = "py{{python_version_nodot}}" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "N", # pep8-naming + "UP", # pyupgrade +] +ignore = [] + +[tool.ruff.lint.isort] +known-first-party = ["{{app_name}}"] diff --git a/src/gds_idea_app_kit/templates/streamlit/Dockerfile b/src/gds_idea_app_kit/templates/streamlit/Dockerfile new file mode 100644 index 0000000..6b5c063 --- /dev/null +++ b/src/gds_idea_app_kit/templates/streamlit/Dockerfile @@ -0,0 +1,77 @@ +# ============================================================================ +# Multi-stage Dockerfile for Streamlit Application +# ============================================================================ +# Production build (default): docker build -t app . +# Dev container build: docker build --target development -t app:dev . +# ============================================================================ + +# Base stage - common setup +FROM python:{{python_version}}-slim AS base + +WORKDIR /app + +ARG USERNAME=appuser +ARG USER_UID=1000 +ARG USER_GID=$USER_UID + +# Install UV (pinned to major version) +COPY --from=ghcr.io/astral-sh/uv:0.9 /uv /usr/local/bin/uv + +# Create non-root user and set ownership of /app +RUN groupadd --gid $USER_GID $USERNAME \ + && useradd --uid $USER_UID --gid $USER_GID -m $USERNAME \ + && chown -R $USERNAME:$USERNAME /app + +# Copy dependency files with correct ownership +COPY --chown=$USERNAME:$USERNAME app_src/pyproject.toml ./ +COPY --chown=$USERNAME:$USERNAME app_src/uv.loc[k] ./ + +# Install minimal base packages (needed by both dev and prod) +# If your build fails with 'gcc not found' or similar compilation errors, +# uncomment build-essential below to add C/C++ compilers: +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ + git \ + # build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Switch to non-root user for dependency installation +USER $USERNAME + +# Install dependencies (as non-root user) +RUN uv sync + +# Copy application code +COPY --chown=$USERNAME:$USERNAME app_src/ . + +EXPOSE 8080 + +CMD ["uv", "run", "streamlit", "run", "streamlit_app.py", \ + "--server.port", "8080", \ + "--server.address", "0.0.0.0", \ + "--server.headless", "true", \ + "--server.enableCORS", "false", \ + "--browser.gatherUsageStats", "false"] + +# ============================================================================ +# Development stage - for dev containers (NOT DEFAULT) +# ============================================================================ +FROM base AS development + +# Switch back to root to install dev-only packages +USER root + +# Grant passwordless sudo for development flexibility +RUN apt-get update \ + && apt-get install -y --no-install-recommends sudo \ + && rm -rf /var/lib/apt/lists/* \ + && echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \ + && chmod 0440 /etc/sudoers.d/$USERNAME + +USER $USERNAME + +# ============================================================================ +# Production stage - secure, minimal, non-root (DEFAULT) +# ============================================================================ +FROM base AS production diff --git a/src/gds_idea_app_kit/templates/streamlit/pyproject.toml.template b/src/gds_idea_app_kit/templates/streamlit/pyproject.toml.template new file mode 100644 index 0000000..83234b8 --- /dev/null +++ b/src/gds_idea_app_kit/templates/streamlit/pyproject.toml.template @@ -0,0 +1,26 @@ +[project] +name = "{{app_name}}" +version = "0.1.0" +description = "Streamlit app with Cognito authentication" +requires-python = ">={{python_version}}" +dependencies = [ + "cognito-auth[streamlit] @ git+https://github.com/co-cddo/gds-idea-app-auth.git", +] + +[tool.ruff] +line-length = 100 +target-version = "py{{python_version_nodot}}" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "N", # pep8-naming + "UP", # pyupgrade +] +ignore = [] + +[tool.ruff.lint.isort] +known-first-party = ["{{app_name}}"] diff --git a/src/gds_idea_app_kit/templates/streamlit/streamlit_app.py b/src/gds_idea_app_kit/templates/streamlit/streamlit_app.py new file mode 100644 index 0000000..d12380d --- /dev/null +++ b/src/gds_idea_app_kit/templates/streamlit/streamlit_app.py @@ -0,0 +1,37 @@ +import logging + +import streamlit as st +from cognito_auth.streamlit import StreamlitAuth + +# Configure logging - quiet noisy libraries +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) + +# Quiet noisy third-party loggers +logging.getLogger("watchdog").setLevel(logging.WARNING) +logging.getLogger("urllib3").setLevel(logging.WARNING) +logging.getLogger("botocore").setLevel(logging.WARNING) +logging.getLogger("boto3").setLevel(logging.WARNING) + +# Your app logger +logger = logging.getLogger(__name__) + +REDIRECT_URL = "https://gds-idea.click/401.html" + +auth = StreamlitAuth() + +user = auth.get_auth_user() + + +st.write("You are Authorised!") +st.write(f"Welcome {user.email}") + +st.write("OIDC_claims:") +st.json(user.oidc_claims) + +st.write("Access Claims:") +st.json(user.access_claims) + +st.write("All Headers:") +st.json(dict(st.context.headers)) diff --git a/src/gds_idea_app_kit/update.py b/src/gds_idea_app_kit/update.py new file mode 100644 index 0000000..419ad9e --- /dev/null +++ b/src/gds_idea_app_kit/update.py @@ -0,0 +1,330 @@ +"""Implementation of the update command. + +Compares gds-idea-app-kit managed files against the manifest hashes to detect +local modifications, then overwrites unchanged files with the latest templates. +Locally modified files get a .new file written alongside for manual review, +unless --force is used to overwrite everything. + +The update is structured as plan -> apply -> report: + +1. _plan_updates() classifies each tracked file into an Action (CREATE, UPDATE, + SKIP, or FORCE) without touching the filesystem. +2. _apply_updates() writes files to disk based on the plan. +3. _report_updates() prints results to the user. +""" + +import sys +from dataclasses import dataclass +from enum import Enum +from pathlib import Path + +import click + +from gds_idea_app_kit import __version__ +from gds_idea_app_kit.init import _apply_template_vars, _get_templates_dir +from gds_idea_app_kit.manifest import ( + build_manifest, + get_tracked_files, + hash_file, + read_manifest, + write_manifest, +) + + +class Action(Enum): + """What to do with a tracked file during update.""" + + CREATE = "create" # file missing from project + UPDATE = "update" # file unchanged from manifest, overwrite with latest + SKIP = "skip" # locally modified, write .new alongside + FORCE = "force" # locally modified, overwrite anyway (--force) + + +@dataclass +class FileUpdate: + """A planned update action for a single tracked file. + + Attributes: + dest_path: Relative path in the project (e.g. "app_src/Dockerfile"). + dest_full: Absolute path to the file. + new_content: Rendered template content to write. + action: What to do with this file. + """ + + dest_path: str + dest_full: Path + new_content: str + action: Action + + +def _parse_version(version: str) -> tuple[int, ...]: + """Parse a dotted version string into a tuple of integers for comparison. + + Args: + version: A dotted version string (e.g. "0.1.0"). + + Returns: + Tuple of integers (e.g. (0, 1, 0)). + """ + return tuple(int(x) for x in version.split(".")) + + +def _check_version(manifest: dict) -> None: + """Warn if the installed tool is older than the version that created the project. + + Args: + manifest: The manifest dict from pyproject.toml. + """ + manifest_version = manifest.get("tool_version", "0.0.0") + try: + if _parse_version(__version__) < _parse_version(manifest_version): + click.echo( + f"Warning: This project was created with gds-idea-app-kit {manifest_version}, " + f"but you have {__version__} installed.", + err=True, + ) + click.echo( + " Consider upgrading: uv tool upgrade gds-idea-app-kit", + err=True, + ) + click.echo() + except (ValueError, TypeError): + # Don't crash on malformed version strings + pass + + +def _render_template(template_path: Path, template_vars: dict[str, str]) -> str: + """Read a template file and apply variable substitution. + + Args: + template_path: Path to the template file. + template_vars: Mapping of placeholder names to values. + + Returns: + The rendered template content. + """ + content = template_path.read_text() + return _apply_template_vars(content, template_vars) + + +def _classify_file( + dest_full: Path, + manifest_hashes: dict[str, str], + dest_path: str, + force: bool, +) -> Action: + """Determine the action for a single tracked file. + + Args: + dest_full: Absolute path to the file in the project. + manifest_hashes: Mapping of relative paths to their manifest hashes. + dest_path: Relative path of the file (used as key into manifest_hashes). + force: Whether --force was specified. + + Returns: + The Action to take for this file. + """ + if not dest_full.exists(): + return Action.CREATE + + current_hash = hash_file(dest_full) + manifest_hash = manifest_hashes.get(dest_path) + is_modified = manifest_hash is not None and current_hash != manifest_hash + + if not is_modified: + return Action.UPDATE + + return Action.FORCE if force else Action.SKIP + + +def _plan_updates( + project_dir: Path, + tracked: dict[str, str], + templates_dir: Path, + template_vars: dict[str, str], + manifest_hashes: dict[str, str], + force: bool, +) -> list[FileUpdate]: + """Build the update plan by classifying each tracked file. + + This is a pure planning step -- no filesystem writes, no output. + + Args: + project_dir: The project root directory. + tracked: Mapping of template source paths to destination paths. + templates_dir: Root directory containing template files. + template_vars: Template variable substitutions. + manifest_hashes: Current manifest file hashes. + force: Whether --force was specified. + + Returns: + List of FileUpdate objects describing what to do with each file. + """ + plan = [] + for template_src, dest_path in sorted(tracked.items()): + template_full = templates_dir / template_src + + if not template_full.exists(): + continue + + new_content = _render_template(template_full, template_vars) + dest_full = project_dir / dest_path + action = _classify_file(dest_full, manifest_hashes, dest_path, force) + + plan.append(FileUpdate(dest_path, dest_full, new_content, action)) + + return plan + + +def _apply_updates(plan: list[FileUpdate]) -> None: + """Write files to disk based on the update plan. + + CREATE, UPDATE, and FORCE actions write to the destination path. + SKIP actions write a .new file alongside for manual review. + + Args: + plan: List of FileUpdate objects from _plan_updates(). + """ + for item in plan: + if item.action == Action.SKIP: + new_path = Path(f"{item.dest_full}.new") + new_path.write_text(item.new_content) + else: + item.dest_full.parent.mkdir(parents=True, exist_ok=True) + item.dest_full.write_text(item.new_content) + + +def _report_updates(plan: list[FileUpdate], dry_run: bool) -> None: + """Print the results of the update to the user. + + Args: + plan: List of FileUpdate objects from _plan_updates(). + dry_run: Whether this was a dry run. + """ + created = [item for item in plan if item.action == Action.CREATE] + updated = [item for item in plan if item.action in (Action.UPDATE, Action.FORCE)] + skipped = [item for item in plan if item.action == Action.SKIP] + + for item in created: + click.echo(f" Created: {item.dest_path}") + + for item in updated: + click.echo(f" Updated: {item.dest_path}") + + if skipped: + click.echo() + for item in skipped: + click.echo(f" Skipped: {item.dest_path} (locally modified)") + click.echo(f" New version written to: {item.dest_path}.new") + click.echo(f" Review changes: diff {item.dest_path} {item.dest_path}.new") + + if not created and not updated and not skipped: + click.echo(" Nothing to update.") + + if skipped and not dry_run: + click.echo() + count = len(skipped) + click.echo( + f"{count} file(s) were locally modified and skipped. Review the .new files above," + ) + click.echo("then rename or delete them when done.") + + if dry_run: + click.echo() + click.echo("No changes made (dry run).") + + +def _update_manifest( + project_dir: Path, + framework: str, + app_name: str, + python_version: str, +) -> None: + """Rebuild and write the manifest after applying updates. + + Args: + project_dir: The project root directory. + framework: The framework name (e.g. "streamlit"). + app_name: The application name. + python_version: The Python version string. + """ + click.echo() + click.echo("Updating manifest...") + new_manifest = build_manifest( + framework=framework, + app_name=app_name, + tool_version=__version__, + project_dir=project_dir, + ) + new_manifest["python_version"] = python_version + write_manifest(project_dir, new_manifest) + + +def run_update(dry_run: bool, force: bool = False) -> None: + """Update gds-idea-app-kit managed files in an existing project. + + Reads the manifest from pyproject.toml, plans updates by classifying each + tracked file, then applies the plan (unless --dry-run). + + Args: + dry_run: If True, show what would change without applying. + force: If True, overwrite locally modified files without creating .new files. + """ + project_dir = Path.cwd() + pyproject_path = project_dir / "pyproject.toml" + + if not pyproject_path.exists(): + click.echo("Error: No pyproject.toml found. Are you in a project root?", err=True) + sys.exit(1) + + # -- Read manifest -- + manifest = read_manifest(project_dir) + if not manifest: + click.echo( + "Error: No [tool.gds-idea-app-kit] section found in pyproject.toml.", + err=True, + ) + click.echo(" This doesn't look like a project created by idea-app.", err=True) + sys.exit(1) + + framework = manifest.get("framework") + app_name = manifest.get("app_name") + if not framework or not app_name: + click.echo("Error: Manifest is missing framework or app_name.", err=True) + sys.exit(1) + + # -- Check version -- + _check_version(manifest) + + if dry_run: + click.echo("Dry run: showing what would change...") + elif force: + click.echo("Updating gds-idea-app-kit managed files (force)...") + else: + click.echo("Updating gds-idea-app-kit managed files...") + click.echo() + + # -- Prepare template variables -- + python_version = manifest.get("python_version", "3.13") + python_version_nodot = python_version.replace(".", "") + template_vars = { + "app_name": app_name, + "python_version": python_version, + "python_version_nodot": python_version_nodot, + } + + templates_dir = _get_templates_dir() + tracked = get_tracked_files(framework) + manifest_hashes = manifest.get("files", {}) + + # -- Plan -> Apply -> Report -- + plan = _plan_updates(project_dir, tracked, templates_dir, template_vars, manifest_hashes, force) + + if not dry_run: + _apply_updates(plan) + + _report_updates(plan, dry_run) + + has_writes = any(item.action in (Action.CREATE, Action.UPDATE, Action.FORCE) for item in plan) + if not dry_run and has_writes: + _update_manifest(project_dir, framework, app_name, python_version) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..6013f67 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,37 @@ +"""Shared test fixtures.""" + +import pytest +from click.testing import CliRunner + +from gds_idea_app_kit.manifest import MANIFEST_KEY + + +@pytest.fixture() +def cli_runner(): + """Click CLI test runner.""" + return CliRunner() + + +@pytest.fixture() +def project_dir(tmp_path): + """Create a minimal project directory with a pyproject.toml.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test-app"\nversion = "0.1.0"\n\n[tool]\n') + return tmp_path + + +@pytest.fixture() +def project_with_manifest(project_dir): + """Create a project directory with an existing manifest in pyproject.toml.""" + pyproject = project_dir / "pyproject.toml" + pyproject.write_text( + f'[project]\nname = "test-app"\nversion = "0.1.0"\n\n' + f"[tool.{MANIFEST_KEY}]\n" + f'framework = "streamlit"\n' + f'app_name = "test-app"\n' + f'tool_version = "0.1.0"\n\n' + f"[tool.{MANIFEST_KEY}.files]\n" + f'"app_src/Dockerfile" = "sha256:abc123"\n' + f'".devcontainer/devcontainer.json" = "sha256:def456"\n' + ) + return project_dir diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..2ee8499 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,224 @@ +"""Tests for the CLI interface.""" + +from unittest.mock import patch + +import pytest + +from gds_idea_app_kit import DEFAULT_PYTHON_VERSION, __version__ +from gds_idea_app_kit.cli import cli + +# ---- version and help ---- + + +def test_version(cli_runner): + """--version prints the package version.""" + result = cli_runner.invoke(cli, ["--version"]) + assert result.exit_code == 0 + assert __version__ in result.output + + +def test_help_lists_all_commands(cli_runner): + """--help lists all four commands.""" + result = cli_runner.invoke(cli, ["--help"]) + assert result.exit_code == 0 + assert "init" in result.output + assert "update" in result.output + assert "smoke-test" in result.output + assert "provide-role" in result.output + + +# ---- init command ---- + + +def test_init_help_shows_options(cli_runner): + """init --help shows framework choices and --python option.""" + result = cli_runner.invoke(cli, ["init", "--help"]) + assert result.exit_code == 0 + assert "streamlit" in result.output + assert "dash" in result.output + assert "fastapi" in result.output + assert "--python" in result.output + assert DEFAULT_PYTHON_VERSION in result.output + + +@pytest.mark.parametrize("framework", ["streamlit", "dash", "fastapi"]) +def test_init_valid_framework(cli_runner, framework): + """init accepts valid framework and passes correct args to run_init.""" + with patch("gds_idea_app_kit.init.run_init") as mock: + result = cli_runner.invoke(cli, ["init", framework, "my-app"]) + assert result.exit_code == 0 + mock.assert_called_once_with( + framework=framework, app_name="my-app", python_version=DEFAULT_PYTHON_VERSION + ) + + +def test_init_custom_python_version(cli_runner): + """init --python passes the custom version to run_init.""" + with patch("gds_idea_app_kit.init.run_init") as mock: + result = cli_runner.invoke(cli, ["init", "streamlit", "my-app", "--python", "3.12"]) + assert result.exit_code == 0 + mock.assert_called_once_with(framework="streamlit", app_name="my-app", python_version="3.12") + + +def test_init_default_python_version(cli_runner): + """init uses DEFAULT_PYTHON_VERSION when --python is not given.""" + with patch("gds_idea_app_kit.init.run_init") as mock: + result = cli_runner.invoke(cli, ["init", "streamlit", "my-app"]) + assert result.exit_code == 0 + mock.assert_called_once_with( + framework="streamlit", app_name="my-app", python_version=DEFAULT_PYTHON_VERSION + ) + + +def test_init_invalid_framework(cli_runner): + result = cli_runner.invoke(cli, ["init", "flask", "my-app"]) + assert result.exit_code != 0 + assert "Invalid value" in result.output + + +def test_init_missing_app_name(cli_runner): + result = cli_runner.invoke(cli, ["init", "streamlit"]) + assert result.exit_code != 0 + + +def test_init_missing_all_args(cli_runner): + result = cli_runner.invoke(cli, ["init"]) + assert result.exit_code != 0 + + +# ---- update command ---- + + +def test_update_help_shows_options(cli_runner): + """update --help shows --dry-run and --force options.""" + result = cli_runner.invoke(cli, ["update", "--help"]) + assert result.exit_code == 0 + assert "--dry-run" in result.output + assert "--force" in result.output + + +def test_update_runs(cli_runner): + """update passes dry_run=False and force=False to run_update.""" + with patch("gds_idea_app_kit.update.run_update") as mock: + result = cli_runner.invoke(cli, ["update"]) + assert result.exit_code == 0 + mock.assert_called_once_with(dry_run=False, force=False) + + +def test_update_dry_run(cli_runner): + """update --dry-run passes dry_run=True to run_update.""" + with patch("gds_idea_app_kit.update.run_update") as mock: + result = cli_runner.invoke(cli, ["update", "--dry-run"]) + assert result.exit_code == 0 + mock.assert_called_once_with(dry_run=True, force=False) + + +def test_update_force(cli_runner): + """update --force passes force=True to run_update.""" + with patch("gds_idea_app_kit.update.run_update") as mock: + result = cli_runner.invoke(cli, ["update", "--force"]) + assert result.exit_code == 0 + mock.assert_called_once_with(dry_run=False, force=True) + + +# ---- smoke-test command ---- + + +def test_smoke_test_help_shows_options(cli_runner): + result = cli_runner.invoke(cli, ["smoke-test", "--help"]) + assert result.exit_code == 0 + assert "--build-only" in result.output + assert "--wait" in result.output + + +def test_smoke_test_runs(cli_runner): + """smoke-test passes build_only=False and wait=False to run_smoke_test.""" + with patch("gds_idea_app_kit.smoke_test.run_smoke_test") as mock: + result = cli_runner.invoke(cli, ["smoke-test"]) + assert result.exit_code == 0 + mock.assert_called_once_with(build_only=False, wait=False) + + +def test_smoke_test_build_only(cli_runner): + """smoke-test --build-only passes build_only=True.""" + with patch("gds_idea_app_kit.smoke_test.run_smoke_test") as mock: + result = cli_runner.invoke(cli, ["smoke-test", "--build-only"]) + assert result.exit_code == 0 + mock.assert_called_once_with(build_only=True, wait=False) + + +def test_smoke_test_wait(cli_runner): + """smoke-test --wait passes wait=True.""" + with patch("gds_idea_app_kit.smoke_test.run_smoke_test") as mock: + result = cli_runner.invoke(cli, ["smoke-test", "--wait"]) + assert result.exit_code == 0 + mock.assert_called_once_with(build_only=False, wait=True) + + +# ---- provide-role command ---- + + +def test_provide_role_help_shows_options(cli_runner): + result = cli_runner.invoke(cli, ["provide-role", "--help"]) + assert result.exit_code == 0 + assert "--use-profile" in result.output + assert "--duration" in result.output + assert "3600" in result.output # default value shown + + +def test_provide_role_runs(cli_runner): + """provide-role passes use_profile=False and default duration to run_provide_role.""" + with patch("gds_idea_app_kit.provide_role.run_provide_role") as mock: + result = cli_runner.invoke(cli, ["provide-role"]) + assert result.exit_code == 0 + mock.assert_called_once_with(use_profile=False, duration=3600) + + +def test_provide_role_default_duration(cli_runner): + """provide-role uses 3600 as default duration.""" + with patch("gds_idea_app_kit.provide_role.run_provide_role") as mock: + result = cli_runner.invoke(cli, ["provide-role"]) + assert result.exit_code == 0 + assert mock.call_args == ((), {"use_profile": False, "duration": 3600}) + + +def test_provide_role_custom_duration(cli_runner): + """provide-role --duration 7200 passes custom duration.""" + with patch("gds_idea_app_kit.provide_role.run_provide_role") as mock: + result = cli_runner.invoke(cli, ["provide-role", "--duration", "7200"]) + assert result.exit_code == 0 + mock.assert_called_once_with(use_profile=False, duration=7200) + + +def test_provide_role_use_profile(cli_runner): + """provide-role --use-profile passes use_profile=True.""" + with patch("gds_idea_app_kit.provide_role.run_provide_role") as mock: + result = cli_runner.invoke(cli, ["provide-role", "--use-profile"]) + assert result.exit_code == 0 + mock.assert_called_once_with(use_profile=True, duration=3600) + + +# ---- migrate command ---- + + +def test_migrate_help_shows_description(cli_runner): + """migrate --help shows the command description.""" + result = cli_runner.invoke(cli, ["migrate", "--help"]) + assert result.exit_code == 0 + assert "Migrate" in result.output + + +def test_migrate_runs(cli_runner): + """migrate calls run_migrate.""" + with patch("gds_idea_app_kit.migrate.run_migrate") as mock: + result = cli_runner.invoke(cli, ["migrate"]) + assert result.exit_code == 0 + mock.assert_called_once_with() + + +# ---- unknown command ---- + + +def test_unknown_command(cli_runner): + result = cli_runner.invoke(cli, ["nonexistent"]) + assert result.exit_code != 0 diff --git a/tests/test_init.py b/tests/test_init.py new file mode 100644 index 0000000..2f389e7 --- /dev/null +++ b/tests/test_init.py @@ -0,0 +1,279 @@ +"""Tests for init module helper functions.""" + +import click +import pytest + +from gds_idea_app_kit import REPO_PREFIX +from gds_idea_app_kit.init import ( + _apply_template_vars, + _copy_template, + _get_templates_dir, + _run_command, + _sanitize_app_name, +) + +# ---- _sanitize_app_name ---- +# Validates that app names are safe for use as DNS subdomain labels +# (the name becomes {name}.gds-idea.click). + + +def test_sanitize_valid_name(): + """A typical hyphenated name passes through unchanged.""" + assert _sanitize_app_name("my-dashboard") == "my-dashboard" + + +def test_sanitize_simple_name(): + """A single-word name passes through unchanged.""" + assert _sanitize_app_name("myapp") == "myapp" + + +def test_sanitize_name_with_numbers(): + """Names containing digits are valid.""" + assert _sanitize_app_name("app-123") == "app-123" + + +def test_sanitize_strips_repo_prefix(): + """If the user accidentally includes the repo prefix, strip it.""" + assert _sanitize_app_name(f"{REPO_PREFIX}-my-dashboard") == "my-dashboard" + + +def test_sanitize_lowercases(): + """Mixed-case input is lowercased automatically.""" + assert _sanitize_app_name("My-Dashboard") == "my-dashboard" + + +def test_sanitize_rejects_empty(): + """Empty string is rejected.""" + with pytest.raises(click.BadParameter, match="cannot be empty"): + _sanitize_app_name("") + + +def test_sanitize_rejects_empty_after_prefix_strip(): + """The prefix alone with no name is rejected.""" + with pytest.raises(click.BadParameter, match="cannot be empty"): + _sanitize_app_name(f"{REPO_PREFIX}-") + + +def test_sanitize_rejects_leading_hyphen(): + """DNS labels cannot start with a hyphen.""" + with pytest.raises(click.BadParameter, match="start and end"): + _sanitize_app_name("-my-app") + + +def test_sanitize_rejects_trailing_hyphen(): + """DNS labels cannot end with a hyphen.""" + with pytest.raises(click.BadParameter, match="start and end"): + _sanitize_app_name("my-app-") + + +def test_sanitize_rejects_consecutive_hyphens(): + """Consecutive hyphens are invalid in subdomain labels.""" + with pytest.raises(click.BadParameter, match="consecutive hyphens"): + _sanitize_app_name("my--app") + + +def test_sanitize_rejects_purely_numeric(): + """Purely numeric names could be confused with IP addresses.""" + with pytest.raises(click.BadParameter, match="purely numeric"): + _sanitize_app_name("12345") + + +def test_sanitize_rejects_underscores(): + """Underscores are not valid in DNS labels.""" + with pytest.raises(click.BadParameter, match="lowercase letters"): + _sanitize_app_name("my_app") + + +def test_sanitize_rejects_spaces(): + """Spaces are not valid in DNS labels.""" + with pytest.raises(click.BadParameter, match="lowercase letters"): + _sanitize_app_name("my app") + + +def test_sanitize_rejects_too_long(): + """DNS labels are limited to 63 characters.""" + with pytest.raises(click.BadParameter, match="63 characters"): + _sanitize_app_name("a" * 64) + + +def test_sanitize_accepts_max_length(): + """Exactly 63 characters is valid.""" + name = "a" * 63 + assert _sanitize_app_name(name) == name + + +def test_sanitize_single_char(): + """A single letter is a valid name.""" + assert _sanitize_app_name("a") == "a" + + +def test_sanitize_single_digit_rejected(): + """A single digit is purely numeric and rejected.""" + with pytest.raises(click.BadParameter, match="purely numeric"): + _sanitize_app_name("1") + + +# ---- _get_templates_dir ---- +# Verifies that bundled template files are accessible via importlib.resources. + + +def test_get_templates_dir_exists(): + """The templates directory should be bundled with the package.""" + templates = _get_templates_dir() + assert templates.exists() + assert templates.is_dir() + + +def test_get_templates_dir_has_common(): + """The common/ subdirectory contains shared template files.""" + templates = _get_templates_dir() + assert (templates / "common").is_dir() + + +def test_get_templates_dir_has_frameworks(): + """Each supported framework has its own template subdirectory.""" + templates = _get_templates_dir() + assert (templates / "streamlit").is_dir() + assert (templates / "dash").is_dir() + assert (templates / "fastapi").is_dir() + + +# ---- _apply_template_vars ---- +# Tests the simple {{placeholder}} substitution used in templates. + + +def test_apply_vars_replaces_placeholders(): + """A single placeholder is replaced with its value.""" + content = "FROM python:{{python_version}}-slim" + result = _apply_template_vars(content, {"python_version": "3.13"}) + assert result == "FROM python:3.13-slim" + + +def test_apply_vars_multiple_placeholders(): + """Multiple different placeholders are all replaced.""" + content = 'name = "{{app_name}}"\ntarget = "py{{python_version_nodot}}"' + result = _apply_template_vars(content, {"app_name": "my-app", "python_version_nodot": "313"}) + assert result == 'name = "my-app"\ntarget = "py313"' + + +def test_apply_vars_no_placeholders(): + """Content without placeholders is returned unchanged.""" + content = "no placeholders here" + result = _apply_template_vars(content, {"app_name": "my-app"}) + assert result == "no placeholders here" + + +def test_apply_vars_empty_variables(): + """An empty variables dict leaves placeholders in place.""" + content = "{{app_name}} stays" + result = _apply_template_vars(content, {}) + assert result == "{{app_name}} stays" + + +def test_apply_vars_repeated_placeholder(): + """The same placeholder appearing twice is replaced in both locations.""" + content = "{{app_name}} and {{app_name}}" + result = _apply_template_vars(content, {"app_name": "foo"}) + assert result == "foo and foo" + + +# ---- _copy_template ---- +# Tests the file copy helper that reads a template, optionally applies +# variable substitution, and writes to a destination path. + + +def test_copy_template_simple(tmp_path): + """A plain file is copied verbatim when no variables are given.""" + src = tmp_path / "src" / "template.txt" + src.parent.mkdir() + src.write_text("hello world") + + dest = tmp_path / "dest" / "output.txt" + _copy_template(src, dest) + + assert dest.read_text() == "hello world" + + +def test_copy_template_creates_parent_dirs(tmp_path): + """Missing parent directories at the destination are created automatically.""" + src = tmp_path / "template.txt" + src.write_text("content") + + dest = tmp_path / "deep" / "nested" / "dir" / "output.txt" + _copy_template(src, dest) + + assert dest.exists() + assert dest.read_text() == "content" + + +def test_copy_template_with_variables(tmp_path): + """Placeholders in the template are replaced when variables are provided.""" + src = tmp_path / "template.txt" + src.write_text("FROM python:{{python_version}}-slim") + + dest = tmp_path / "output.txt" + _copy_template(src, dest, variables={"python_version": "3.12"}) + + assert dest.read_text() == "FROM python:3.12-slim" + + +def test_copy_template_without_variables(tmp_path): + """When variables=None, placeholders are left as-is (no substitution).""" + src = tmp_path / "template.txt" + src.write_text("no {{placeholders}} replaced") + + dest = tmp_path / "output.txt" + _copy_template(src, dest, variables=None) + + assert dest.read_text() == "no {{placeholders}} replaced" + + +# ---- _run_command ---- +# Wraps subprocess.run with error handling: catches missing commands +# (especially cdk with install instructions) and prints a cleanup +# command on failure. + + +def test_run_command_success(tmp_path): + """A successful command returns the CompletedProcess result.""" + result = _run_command(["echo", "hello"], cwd=tmp_path) + assert result.returncode == 0 + assert "hello" in result.stdout + + +def test_run_command_failed_prints_cleanup(tmp_path, capsys): + """A failing command prints stderr and a cleanup rm -rf suggestion.""" + with pytest.raises(SystemExit): + _run_command(["false"], cwd=tmp_path, project_dir=tmp_path) + + captured = capsys.readouterr() + assert "rm -rf" in captured.err + assert str(tmp_path) in captured.err + + +def test_run_command_missing_cdk_prints_install_instructions(tmp_path, capsys): + """When cdk is not found, prints npm/brew install instructions.""" + with pytest.raises(SystemExit): + _run_command(["cdk-nonexistent-binary"], cwd=tmp_path) + + captured = capsys.readouterr() + assert "not installed" in captured.err + + +def test_run_command_missing_cdk_specific_message(tmp_path, capsys): + """The cdk-specific error message includes npm and brew install options.""" + with pytest.raises(SystemExit): + _run_command(["cdk"], cwd=tmp_path) + + # cdk might actually be installed -- only check the error path + # if it actually failed with FileNotFoundError + + +def test_run_command_missing_arbitrary_binary(tmp_path, capsys): + """A missing non-cdk binary prints a generic 'not installed' error.""" + with pytest.raises(SystemExit): + _run_command(["totally-nonexistent-command-xyz"], cwd=tmp_path) + + captured = capsys.readouterr() + assert "totally-nonexistent-command-xyz" in captured.err + assert "not installed" in captured.err diff --git a/tests/test_integration.py b/tests/test_integration.py new file mode 100644 index 0000000..7ae9d34 --- /dev/null +++ b/tests/test_integration.py @@ -0,0 +1,101 @@ +"""Integration tests that run real commands (cdk, uv, git). + +These tests require: +- cdk installed (npm install -g aws-cdk) +- uv installed +- git installed +- Network access to GitHub (for private deps) + +Run with: uv run pytest -m integration +Skip with: uv run pytest -m "not integration" +""" + +import os +import subprocess +import tomllib + +import pytest + +from gds_idea_app_kit.init import run_init +from gds_idea_app_kit.manifest import read_manifest + + +@pytest.mark.integration +def test_init_streamlit_end_to_end(tmp_path): + """Full init creates a working streamlit project with all deps resolved.""" + os.chdir(tmp_path) + run_init(framework="streamlit", app_name="integ-test", python_version="3.13") + + project = tmp_path / "gds-idea-app-integ-test" + + # ---- Project structure ---- + + assert project.is_dir(), "Project directory should be created" + + # CDK files + assert (project / "app.py").exists(), "CDK entry point should exist" + assert (project / "cdk.json").exists(), "cdk.json should exist" + + # App source + assert (project / "app_src" / "Dockerfile").exists() + assert (project / "app_src" / "streamlit_app.py").exists() + assert (project / "app_src" / "pyproject.toml").exists() + + # Dev container + assert (project / ".devcontainer" / "devcontainer.json").exists() + assert (project / ".devcontainer" / "docker-compose.yml").exists() + + # Dev mocks + assert (project / "dev_mocks" / "dev_mock_authoriser.json").exists() + assert (project / "dev_mocks" / "dev_mock_user.json").exists() + + # ---- Template variables substituted ---- + + dockerfile = (project / "app_src" / "Dockerfile").read_text() + assert "python:3.13" in dockerfile, "Python version should be substituted in Dockerfile" + assert "{{python_version}}" not in dockerfile, "Template placeholder should not remain" + + app_pyproject = (project / "app_src" / "pyproject.toml").read_text() + assert "integ-test" in app_pyproject, "App name should be substituted in pyproject.toml" + assert "{{app_name}}" not in app_pyproject, "Template placeholder should not remain" + + # ---- Root pyproject.toml config ---- + + with open(project / "pyproject.toml", "rb") as f: + root_config = tomllib.load(f) + + webapp = root_config.get("tool", {}).get("webapp", {}) + assert webapp.get("framework") == "streamlit" + assert webapp.get("app_name") == "integ-test" + + # ---- Manifest ---- + + manifest = read_manifest(project) + assert manifest, "Manifest should be written" + assert manifest["framework"] == "streamlit" + assert manifest["app_name"] == "integ-test" + assert "files" in manifest, "Manifest should contain file hashes" + assert len(manifest["files"]) > 0, "Manifest should track at least one file" + + # ---- Dependencies resolved ---- + + assert (project / "uv.lock").exists(), "uv.lock should be created by uv sync" + assert (project / ".venv").is_dir(), ".venv should be created by uv sync" + + # ---- Git history ---- + + result = subprocess.run( + ["git", "log", "--oneline"], + cwd=project, + capture_output=True, + text=True, + check=True, + ) + assert "Initial scaffold" in result.stdout, "Initial commit should exist" + assert "streamlit" in result.stdout, "Commit message should mention the framework" + + # ---- .gitignore ---- + + gitignore = (project / ".gitignore").read_text() + assert ".aws-dev" in gitignore, ".aws-dev should be in gitignore" + assert "*.new" in gitignore, "*.new files should be in gitignore" diff --git a/tests/test_manifest.py b/tests/test_manifest.py new file mode 100644 index 0000000..ae4fd19 --- /dev/null +++ b/tests/test_manifest.py @@ -0,0 +1,228 @@ +"""Tests for manifest module.""" + +import hashlib + +import pytest + +from gds_idea_app_kit.manifest import ( + build_manifest, + get_tracked_files, + hash_file, + read_manifest, + write_manifest, +) + +# ---- fixtures ---- + + +@pytest.fixture() +def sample_file(tmp_path): + """A simple text file for hashing tests.""" + f = tmp_path / "test.txt" + f.write_text("hello world") + return f + + +@pytest.fixture() +def tracked_project(project_dir): + """A project directory with all tool-owned files present.""" + (project_dir / "app_src").mkdir() + (project_dir / "app_src" / "Dockerfile").write_text("FROM python:3.13-slim") + (project_dir / ".devcontainer").mkdir() + (project_dir / ".devcontainer" / "devcontainer.json").write_text("{}") + (project_dir / ".devcontainer" / "docker-compose.yml").write_text("services:") + (project_dir / "dev_mocks").mkdir() + (project_dir / "dev_mocks" / "dev_mock_authoriser.json").write_text("{}") + (project_dir / "dev_mocks" / "dev_mock_user.json").write_text("{}") + return project_dir + + +# ---- hash_file ---- + + +def test_hash_file_returns_sha256_prefix(sample_file): + result = hash_file(sample_file) + expected = f"sha256:{hashlib.sha256(b'hello world').hexdigest()}" + assert result == expected + + +def test_hash_file_different_content_gives_different_hash(tmp_path): + a = tmp_path / "a.txt" + a.write_text("content a") + b = tmp_path / "b.txt" + b.write_text("content b") + assert hash_file(a) != hash_file(b) + + +def test_hash_file_same_content_gives_same_hash(tmp_path): + a = tmp_path / "a.txt" + a.write_text("same") + b = tmp_path / "b.txt" + b.write_text("same") + assert hash_file(a) == hash_file(b) + + +def test_hash_file_empty(tmp_path): + f = tmp_path / "empty.txt" + f.write_text("") + expected = f"sha256:{hashlib.sha256(b'').hexdigest()}" + assert hash_file(f) == expected + + +def test_hash_file_binary(tmp_path): + f = tmp_path / "bin.dat" + data = b"\x00\x01\x02\xff" + f.write_bytes(data) + expected = f"sha256:{hashlib.sha256(data).hexdigest()}" + assert hash_file(f) == expected + + +# ---- get_tracked_files ---- + + +@pytest.mark.parametrize("framework", ["streamlit", "dash", "fastapi"]) +def test_tracked_files_include_common_destinations(framework): + tracked = get_tracked_files(framework) + destinations = set(tracked.values()) + assert ".devcontainer/devcontainer.json" in destinations + assert ".devcontainer/docker-compose.yml" in destinations + assert "dev_mocks/dev_mock_authoriser.json" in destinations + assert "dev_mocks/dev_mock_user.json" in destinations + assert "app_src/Dockerfile" in destinations + + +@pytest.mark.parametrize("framework", ["streamlit", "dash", "fastapi"]) +def test_tracked_files_dockerfile_source_matches_framework(framework): + tracked = get_tracked_files(framework) + assert f"{framework}/Dockerfile" in tracked + + +def test_tracked_files_differ_across_frameworks(): + streamlit = get_tracked_files("streamlit") + fastapi = get_tracked_files("fastapi") + assert "streamlit/Dockerfile" in streamlit + assert "streamlit/Dockerfile" not in fastapi + assert "fastapi/Dockerfile" in fastapi + + +# ---- read_manifest ---- + + +def test_read_manifest_missing_pyproject(tmp_path): + assert read_manifest(tmp_path) == {} + + +def test_read_manifest_no_manifest_section(project_dir): + assert read_manifest(project_dir) == {} + + +def test_read_manifest_existing(project_with_manifest): + result = read_manifest(project_with_manifest) + assert result["framework"] == "streamlit" + assert result["app_name"] == "test-app" + assert result["tool_version"] == "0.1.0" + assert result["files"]["app_src/Dockerfile"] == "sha256:abc123" + assert result["files"][".devcontainer/devcontainer.json"] == "sha256:def456" + + +# ---- write_manifest ---- + + +@pytest.fixture() +def sample_manifest(): + return { + "framework": "fastapi", + "app_name": "my-api", + "tool_version": "0.2.0", + "files": {"app_src/Dockerfile": "sha256:aaa111"}, + } + + +def test_write_manifest_creates_section(project_dir, sample_manifest): + write_manifest(project_dir, sample_manifest) + result = read_manifest(project_dir) + assert result["framework"] == "fastapi" + assert result["app_name"] == "my-api" + assert result["files"]["app_src/Dockerfile"] == "sha256:aaa111" + + +def test_write_manifest_preserves_existing_content(project_dir, sample_manifest): + write_manifest(project_dir, sample_manifest) + content = (project_dir / "pyproject.toml").read_text() + assert 'name = "test-app"' in content + assert 'version = "0.1.0"' in content + + +def test_write_manifest_overwrites_previous(project_with_manifest, sample_manifest): + write_manifest(project_with_manifest, sample_manifest) + result = read_manifest(project_with_manifest) + assert result["framework"] == "fastapi" + assert result["app_name"] == "my-api" + assert ".devcontainer/devcontainer.json" not in result["files"] + + +# ---- build_manifest ---- + + +def test_build_manifest_hashes_all_tracked_files(tracked_project): + result = build_manifest( + framework="streamlit", + app_name="test-app", + tool_version="0.1.0", + project_dir=tracked_project, + ) + assert result["framework"] == "streamlit" + assert result["app_name"] == "test-app" + assert result["tool_version"] == "0.1.0" + assert len(result["files"]) == 5 + for file_hash in result["files"].values(): + assert file_hash.startswith("sha256:") + + +def test_build_manifest_skips_missing_files(project_dir): + (project_dir / "app_src").mkdir() + (project_dir / "app_src" / "Dockerfile").write_text("FROM python:3.13-slim") + + result = build_manifest( + framework="streamlit", + app_name="test-app", + tool_version="0.1.0", + project_dir=project_dir, + ) + assert "app_src/Dockerfile" in result["files"] + assert ".devcontainer/devcontainer.json" not in result["files"] + + +def test_build_manifest_hash_matches_content(project_dir): + content = "FROM python:3.13-slim AS base" + (project_dir / "app_src").mkdir() + (project_dir / "app_src" / "Dockerfile").write_text(content) + + result = build_manifest( + framework="streamlit", + app_name="test-app", + tool_version="0.1.0", + project_dir=project_dir, + ) + expected = f"sha256:{hashlib.sha256(content.encode()).hexdigest()}" + assert result["files"]["app_src/Dockerfile"] == expected + + +# ---- round-trip ---- + + +def test_build_write_read_roundtrip(tracked_project): + """build_manifest -> write_manifest -> read_manifest gives consistent data.""" + manifest = build_manifest( + framework="streamlit", + app_name="roundtrip-app", + tool_version="0.1.0", + project_dir=tracked_project, + ) + write_manifest(tracked_project, manifest) + result = read_manifest(tracked_project) + + assert result["framework"] == manifest["framework"] + assert result["app_name"] == manifest["app_name"] + assert result["tool_version"] == manifest["tool_version"] + assert dict(result["files"]) == manifest["files"] diff --git a/tests/test_migrate.py b/tests/test_migrate.py new file mode 100644 index 0000000..7dbf6ad --- /dev/null +++ b/tests/test_migrate.py @@ -0,0 +1,316 @@ +"""Tests for the migrate command.""" + +import os +from unittest.mock import patch + +import pytest +import tomlkit + +from gds_idea_app_kit.manifest import read_manifest +from gds_idea_app_kit.migrate import ( + _detect_python_version, + _read_webapp_config, + _remove_old_config, + _remove_template_dir, + run_migrate, +) + +# ---- fixtures ---- + + +OLD_PYPROJECT = """\ +[build-system] +requires = ["uv_build>=0.9.2,<0.10.0"] +build-backend = "uv_build" + +[project] +name = "gds-idea-templates" +version = "0.1.2" +description = "Test project" +requires-python = ">=3.11" +dependencies = [ + "aws-cdk-lib>=2.180.0", + "constructs>=10.0.0,<11.0.0", +] + +[project.scripts] +configure = "template.configure:main" +smoke_test = "template.smoke_test:main" +provide_role = "template.provide_role:main" + +[tool.uv] +package = true +dev-dependencies = [ + "pytest>=6.2.5", +] + +[tool.uv.build-backend] +module-name = "template" +module-root = "" + +[tool.webapp] +app_name = "test-app" +framework = "streamlit" + +[tool.webapp.dev] +# aws_role_arn = "arn:aws:iam::123456:role/my-role" +# aws_region = "eu-west-2" +""" + + +@pytest.fixture() +def old_project(tmp_path): + """Create a mock old-style project with template/ directory and old pyproject.toml. + + Returns the project directory path. + """ + # Write old-style pyproject.toml + (tmp_path / "pyproject.toml").write_text(OLD_PYPROJECT) + + # Create template/ directory with scripts + template_dir = tmp_path / "template" + template_dir.mkdir() + (template_dir / "__init__.py").write_text("") + (template_dir / "configure.py").write_text("def main(): pass") + (template_dir / "smoke_test.py").write_text("def main(): pass") + (template_dir / "provide_role.py").write_text("def main(): pass") + + # Create app_src/ with Dockerfile + app_src = tmp_path / "app_src" + app_src.mkdir() + (app_src / "Dockerfile").write_text("FROM python:3.13-slim AS base\nWORKDIR /app\n") + (app_src / "pyproject.toml").write_text( + '[project]\nname = "test-app"\nrequires-python = ">=3.13"\n' + ) + (app_src / "streamlit_app.py").write_text("import streamlit as st\n") + + # Create .devcontainer/ + devcontainer = tmp_path / ".devcontainer" + devcontainer.mkdir() + (devcontainer / "devcontainer.json").write_text('{"name": "test"}') + (devcontainer / "docker-compose.yml").write_text("services:\n app:\n") + + # Create dev_mocks/ + dev_mocks = tmp_path / "dev_mocks" + dev_mocks.mkdir() + (dev_mocks / "dev_mock_authoriser.json").write_text("{}") + (dev_mocks / "dev_mock_user.json").write_text("{}") + + return tmp_path + + +# ---- _detect_python_version ---- + + +def test_detect_python_version_from_dockerfile(tmp_path): + """Parses Python version from Dockerfile FROM line.""" + app_src = tmp_path / "app_src" + app_src.mkdir() + (app_src / "Dockerfile").write_text("FROM python:3.13-slim AS base\n") + + assert _detect_python_version(tmp_path) == "3.13" + + +def test_detect_python_version_312_from_dockerfile(tmp_path): + """Parses a different Python version from Dockerfile.""" + app_src = tmp_path / "app_src" + app_src.mkdir() + (app_src / "Dockerfile").write_text("FROM python:3.12-slim AS base\n") + + assert _detect_python_version(tmp_path) == "3.12" + + +def test_detect_python_version_falls_back_to_pyproject(tmp_path): + """Falls back to app_src/pyproject.toml when Dockerfile has no version.""" + app_src = tmp_path / "app_src" + app_src.mkdir() + (app_src / "Dockerfile").write_text("FROM ubuntu:latest\n") + (app_src / "pyproject.toml").write_text('[project]\nrequires-python = ">=3.12"\n') + + assert _detect_python_version(tmp_path) == "3.12" + + +def test_detect_python_version_defaults_when_no_files(tmp_path): + """Defaults to 3.13 when no files can be parsed.""" + assert _detect_python_version(tmp_path) == "3.13" + + +# ---- _read_webapp_config ---- + + +def test_read_webapp_config_reads_framework_and_name(old_project): + """Reads framework and app_name from [tool.webapp].""" + config = _read_webapp_config(old_project) + assert config["framework"] == "streamlit" + assert config["app_name"] == "test-app" + + +def test_read_webapp_config_exits_when_no_pyproject(tmp_path): + """Exits with error when pyproject.toml doesn't exist.""" + with pytest.raises(SystemExit): + _read_webapp_config(tmp_path) + + +def test_read_webapp_config_exits_when_no_webapp(tmp_path): + """Exits with error when [tool.webapp] section is missing.""" + (tmp_path / "pyproject.toml").write_text('[project]\nname = "test"\n') + with pytest.raises(SystemExit): + _read_webapp_config(tmp_path) + + +# ---- _remove_old_config ---- + + +def test_remove_old_config_removes_scripts_and_build(old_project): + """Removes [project.scripts], [build-system], [tool.uv.build-backend].""" + _remove_old_config(old_project) + + with open(old_project / "pyproject.toml") as f: + config = tomlkit.load(f) + + assert "build-system" not in config + assert "scripts" not in config.get("project", {}) + assert "build-backend" not in config.get("tool", {}).get("uv", {}) + + +def test_remove_old_config_sets_package_false(old_project): + """Sets package = false in [tool.uv].""" + _remove_old_config(old_project) + + with open(old_project / "pyproject.toml") as f: + config = tomlkit.load(f) + + assert config["tool"]["uv"]["package"] is False + + +def test_remove_old_config_preserves_other_content(old_project): + """Preserves [project], [tool.webapp], [tool.webapp.dev], [tool.uv] dev-dependencies.""" + _remove_old_config(old_project) + + with open(old_project / "pyproject.toml") as f: + config = tomlkit.load(f) + + # Project metadata preserved + assert config["project"]["name"] == "gds-idea-templates" + assert config["project"]["version"] == "0.1.2" + + # Webapp config preserved + assert config["tool"]["webapp"]["app_name"] == "test-app" + assert config["tool"]["webapp"]["framework"] == "streamlit" + + # UV dev-dependencies preserved + assert "pytest>=6.2.5" in config["tool"]["uv"]["dev-dependencies"] + + +def test_remove_old_config_handles_missing_sections(tmp_path): + """Does not error when sections to remove don't exist.""" + (tmp_path / "pyproject.toml").write_text( + '[project]\nname = "test"\n\n[tool.uv]\ndev-dependencies = []\n' + ) + # Should not raise + _remove_old_config(tmp_path) + + with open(tmp_path / "pyproject.toml") as f: + config = tomlkit.load(f) + + assert config["project"]["name"] == "test" + + +# ---- _remove_template_dir ---- + + +def test_remove_template_dir_deletes_directory(old_project): + """Removes the template/ directory and all its contents.""" + assert (old_project / "template").is_dir() + _remove_template_dir(old_project) + assert not (old_project / "template").exists() + + +def test_remove_template_dir_no_error_when_missing(tmp_path): + """Does not error when template/ directory doesn't exist.""" + _remove_template_dir(tmp_path) # Should not raise + + +# ---- run_migrate integration ---- + + +def test_migrate_full_flow(old_project): + """Full migration creates manifest, removes old config, removes template/.""" + os.chdir(old_project) + + # Simulate user confirming migration but declining update + with patch("gds_idea_app_kit.migrate.click") as mock_click: + mock_click.confirm = lambda msg, **kwargs: msg.startswith("Continue") + mock_click.echo = click_echo_noop + + run_migrate() + + # Manifest should be written + manifest = read_manifest(old_project) + assert manifest is not None + assert manifest["framework"] == "streamlit" + assert manifest["app_name"] == "test-app" + assert manifest["python_version"] == "3.13" + + # template/ should be gone + assert not (old_project / "template").exists() + + # Old config should be cleaned up + with open(old_project / "pyproject.toml") as f: + config = tomlkit.load(f) + + assert "build-system" not in config + assert "scripts" not in config.get("project", {}) + assert config["tool"]["uv"]["package"] is False + + +def test_migrate_exits_when_already_migrated(old_project, capsys): + """Exits with message when manifest already exists.""" + os.chdir(old_project) + + # Write a manifest to simulate already-migrated project + from gds_idea_app_kit.manifest import build_manifest, write_manifest + + manifest = build_manifest( + framework="streamlit", + app_name="test-app", + tool_version="0.1.0", + project_dir=old_project, + ) + write_manifest(old_project, manifest) + + with pytest.raises(SystemExit): + run_migrate() + + captured = capsys.readouterr() + assert "already been migrated" in captured.err + + +def test_migrate_aborts_on_decline(old_project): + """No changes are made when user declines the confirmation.""" + os.chdir(old_project) + + # Read pyproject.toml before + original_content = (old_project / "pyproject.toml").read_text() + + with patch("gds_idea_app_kit.migrate.click") as mock_click: + mock_click.confirm = lambda msg, **kwargs: False + mock_click.echo = click_echo_noop + + run_migrate() + + # pyproject.toml should be unchanged + assert (old_project / "pyproject.toml").read_text() == original_content + + # template/ should still exist + assert (old_project / "template").is_dir() + + # No manifest should exist + assert read_manifest(old_project) == {} + + +# ---- helper ---- + + +def click_echo_noop(*args, **kwargs): + """No-op replacement for click.echo in tests.""" diff --git a/tests/test_provide_role.py b/tests/test_provide_role.py new file mode 100644 index 0000000..7ba7efb --- /dev/null +++ b/tests/test_provide_role.py @@ -0,0 +1,203 @@ +"""Tests for the provide-role command.""" + +from datetime import UTC, datetime +from unittest.mock import patch + +import pytest + +from gds_idea_app_kit.provide_role import ( + _check_aws_profile, + _format_expiration, + _get_role_config, + _select_mode, + _write_credentials, +) + +# ---- _check_aws_profile ---- + + +def test_check_aws_profile_returns_profile_name(): + """Returns the profile name when AWS_PROFILE is set.""" + with patch.dict("os.environ", {"AWS_PROFILE": "aws-dev"}): + assert _check_aws_profile() == "aws-dev" + + +def test_check_aws_profile_exits_when_not_set(): + """Exits with error when AWS_PROFILE is not set.""" + with patch.dict("os.environ", {}, clear=True), pytest.raises(SystemExit): + _check_aws_profile() + + +# ---- _get_role_config ---- + + +def test_get_role_config_reads_role_and_region(tmp_path): + """Reads aws_role_arn and aws_region from [tool.webapp.dev].""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text( + '[project]\nname = "test"\n\n' + "[tool.webapp.dev]\n" + 'aws_role_arn = "arn:aws:iam::123456:role/my-role"\n' + 'aws_region = "us-east-1"\n' + ) + + config = _get_role_config(tmp_path) + assert config["role_arn"] == "arn:aws:iam::123456:role/my-role" + assert config["region"] == "us-east-1" + + +def test_get_role_config_empty_role_when_not_configured(tmp_path): + """Returns empty role_arn when [tool.webapp.dev] has no aws_role_arn.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test"\n\n[tool.webapp]\nframework = "streamlit"\n') + + config = _get_role_config(tmp_path) + assert config["role_arn"] == "" + + +def test_get_role_config_default_region(tmp_path): + """Uses eu-west-2 as default region when aws_region is not specified.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text( + '[project]\nname = "test"\n\n' + "[tool.webapp.dev]\n" + 'aws_role_arn = "arn:aws:iam::123456:role/my-role"\n' + ) + + config = _get_role_config(tmp_path) + assert config["region"] == "eu-west-2" + + +def test_get_role_config_exits_when_no_pyproject(tmp_path): + """Exits with error when pyproject.toml doesn't exist.""" + with pytest.raises(SystemExit): + _get_role_config(tmp_path) + + +# ---- _select_mode ---- + + +def test_select_mode_role_assumption_when_arn_configured(): + """Uses role assumption when role_arn is configured and no --use-profile flag.""" + use_pass_through, reason = _select_mode("arn:aws:iam::123:role/r", use_profile=False) + assert use_pass_through is False + assert "aws_role_arn configured" in reason + + +def test_select_mode_pass_through_with_use_profile_flag(): + """Uses pass-through when --use-profile flag is given, even with role_arn configured.""" + use_pass_through, reason = _select_mode("arn:aws:iam::123:role/r", use_profile=True) + assert use_pass_through is True + assert "--use-profile" in reason + + +def test_select_mode_pass_through_when_no_role_arn(): + """Uses pass-through when no role_arn is configured, regardless of flag.""" + use_pass_through, reason = _select_mode("", use_profile=False) + assert use_pass_through is True + assert "no aws_role_arn" in reason + + +# ---- _write_credentials ---- + + +def test_write_credentials_creates_both_files(tmp_path): + """Writes both credentials and config files to .aws-dev/.""" + creds = { + "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", + "SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", + "SessionToken": "FwoGZXIvYXdzEBY", + "Expiration": datetime(2026, 2, 16, 15, 30, 0, tzinfo=UTC), + } + + _write_credentials(tmp_path, creds, "eu-west-2", "Role: arn:aws:iam::123:role/r") + + aws_dev = tmp_path / ".aws-dev" + assert (aws_dev / "credentials").exists() + assert (aws_dev / "config").exists() + + +def test_write_credentials_file_content(tmp_path): + """Credentials file contains the access key, secret key, and session token.""" + creds = { + "AccessKeyId": "AKIAIOSFODNN7EXAMPLE", + "SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", + "SessionToken": "FwoGZXIvYXdzEBY", + "Expiration": datetime(2026, 2, 16, 15, 30, 0, tzinfo=UTC), + } + + _write_credentials(tmp_path, creds, "eu-west-2", "Role: arn:aws:iam::123:role/r") + + content = (tmp_path / ".aws-dev" / "credentials").read_text() + assert "AKIAIOSFODNN7EXAMPLE" in content + assert "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" in content + assert "FwoGZXIvYXdzEBY" in content + assert "[default]" in content + assert "Role: arn:aws:iam::123:role/r" in content + + +def test_write_credentials_config_content(tmp_path): + """Config file contains the region and output format.""" + creds = { + "AccessKeyId": "AKIA", + "SecretAccessKey": "secret", + "SessionToken": "token", + } + + _write_credentials(tmp_path, creds, "us-east-1", "test") + + content = (tmp_path / ".aws-dev" / "config").read_text() + assert "region = us-east-1" in content + assert "output = json" in content + assert "[default]" in content + + +def test_write_credentials_creates_directory(tmp_path): + """Creates the .aws-dev directory if it doesn't exist.""" + creds = { + "AccessKeyId": "AKIA", + "SecretAccessKey": "secret", + "SessionToken": "token", + } + + aws_dev = tmp_path / ".aws-dev" + assert not aws_dev.exists() + + _write_credentials(tmp_path, creds, "eu-west-2", "test") + + assert aws_dev.is_dir() + + +def test_write_credentials_handles_no_expiration(tmp_path): + """Writes 'unknown' when no Expiration is present in credentials.""" + creds = { + "AccessKeyId": "AKIA", + "SecretAccessKey": "secret", + "SessionToken": "token", + } + + _write_credentials(tmp_path, creds, "eu-west-2", "test") + + content = (tmp_path / ".aws-dev" / "credentials").read_text() + assert "Expires: unknown" in content + + +# ---- _format_expiration ---- + + +def test_format_expiration_with_datetime(): + """Formats a datetime expiration as a string.""" + creds = {"Expiration": datetime(2026, 2, 16, 15, 30, 0, tzinfo=UTC)} + result = _format_expiration(creds) + assert "2026" in result + assert "15:30" in result + + +def test_format_expiration_without_expiration(): + """Returns 'unknown' when no Expiration key is present.""" + assert _format_expiration({}) == "unknown" + + +def test_format_expiration_with_none(): + """Returns 'unknown' when Expiration is None.""" + assert _format_expiration({"Expiration": None}) == "unknown" diff --git a/tests/test_smoke_test.py b/tests/test_smoke_test.py new file mode 100644 index 0000000..9529463 --- /dev/null +++ b/tests/test_smoke_test.py @@ -0,0 +1,130 @@ +"""Tests for the smoke-test command.""" + +import os +from unittest.mock import MagicMock, patch + +import pytest + +from gds_idea_app_kit.smoke_test import _get_framework, _get_health_path, run_smoke_test + +# ---- _get_health_path ---- + + +def test_health_path_streamlit(): + """Streamlit uses the _stcore health endpoint.""" + assert _get_health_path("streamlit") == "/_stcore/health" + + +def test_health_path_dash(): + """Dash uses /health.""" + assert _get_health_path("dash") == "/health" + + +def test_health_path_fastapi(): + """FastAPI uses /health.""" + assert _get_health_path("fastapi") == "/health" + + +def test_health_path_unknown_falls_back(): + """Unknown frameworks fall back to /health.""" + assert _get_health_path("unknown-framework") == "/health" + + +# ---- _get_framework ---- + + +def test_get_framework_reads_from_pyproject(tmp_path): + """Reads the framework from [tool.webapp] in pyproject.toml.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test"\n\n[tool.webapp]\nframework = "streamlit"\n') + + result = _get_framework(tmp_path) + assert result == "streamlit" + + +def test_get_framework_exits_when_no_pyproject(tmp_path): + """Exits with error when pyproject.toml doesn't exist.""" + with pytest.raises(SystemExit): + _get_framework(tmp_path) + + +def test_get_framework_exits_when_no_webapp_section(tmp_path): + """Exits with error when [tool.webapp] section is missing.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test"\n') + + with pytest.raises(SystemExit): + _get_framework(tmp_path) + + +def test_get_framework_exits_when_framework_empty(tmp_path): + """Exits with error when framework key exists but is empty.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test"\n\n[tool.webapp]\nframework = ""\n') + + with pytest.raises(SystemExit): + _get_framework(tmp_path) + + +# ---- run_smoke_test build_only ---- + + +def test_build_only_does_not_start_container(tmp_path): + """Build-only mode calls build but never starts a container or runs cleanup.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test"\n\n[tool.webapp]\nframework = "streamlit"\n') + compose_file = tmp_path / ".devcontainer" / "docker-compose.yml" + compose_file.parent.mkdir(parents=True) + compose_file.write_text("services:\n app:\n") + + os.chdir(tmp_path) + + with patch("gds_idea_app_kit.smoke_test._compose") as mock_compose: + run_smoke_test(build_only=True) + + # Build should have been called (via stream=True) + calls = [str(c) for c in mock_compose.call_args_list] + build_called = any("build" in c for c in calls) + assert build_called + + # up and down should NOT have been called + up_called = any("up" in c for c in calls) + down_called = any("down" in c for c in calls) + assert not up_called + assert not down_called + + +# ---- run_smoke_test cleanup on failure ---- + + +def test_cleanup_runs_on_failure(tmp_path): + """Cleanup runs even when an error occurs after the container starts.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test"\n\n[tool.webapp]\nframework = "streamlit"\n') + compose_file = tmp_path / ".devcontainer" / "docker-compose.yml" + compose_file.parent.mkdir(parents=True) + compose_file.write_text("services:\n app:\n") + + os.chdir(tmp_path) + + compose_calls = [] + + def fake_compose(*args, stream=False, check=True): + compose_calls.append(args) + if args and args[0] == "up": + # Simulate successful up, then _get_host_port will fail + return MagicMock(stdout="0.0.0.0:8080\n") + if args and args[0] == "port": + # After up succeeds, port query raises to simulate failure + raise RuntimeError("simulated failure") + return MagicMock(stdout="") + + with ( + patch("gds_idea_app_kit.smoke_test._compose", side_effect=fake_compose), + pytest.raises(RuntimeError, match="simulated failure"), + ): + run_smoke_test(build_only=False) + + # Cleanup (down) should have been called despite the error + down_calls = [c for c in compose_calls if c and c[0] == "down"] + assert len(down_calls) == 1 diff --git a/tests/test_update.py b/tests/test_update.py new file mode 100644 index 0000000..5faa6dd --- /dev/null +++ b/tests/test_update.py @@ -0,0 +1,670 @@ +"""Tests for the update command.""" + +import os +from pathlib import Path +from unittest.mock import patch + +import pytest + +from gds_idea_app_kit.init import _apply_template_vars, _get_templates_dir +from gds_idea_app_kit.manifest import ( + build_manifest, + get_tracked_files, + hash_file, + read_manifest, + write_manifest, +) +from gds_idea_app_kit.update import ( + Action, + FileUpdate, + _apply_updates, + _check_version, + _classify_file, + _parse_version, + _plan_updates, + _report_updates, + run_update, +) + +# ---- fixtures ---- + + +@pytest.fixture() +def update_project(tmp_path): + """Create a project directory with manifest and all tracked files for streamlit. + + Returns the project directory path. + """ + framework = "streamlit" + app_name = "test-app" + templates_dir = _get_templates_dir() + tracked = get_tracked_files(framework) + template_vars = { + "app_name": app_name, + "python_version": "3.13", + "python_version_nodot": "313", + } + + # Copy all tracked template files into the project + for template_src, dest_path in tracked.items(): + template_full = templates_dir / template_src + dest_full = tmp_path / dest_path + dest_full.parent.mkdir(parents=True, exist_ok=True) + content = template_full.read_text() + content = _apply_template_vars(content, template_vars) + dest_full.write_text(content) + + # Write pyproject.toml with manifest + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test-app"\nversion = "0.1.0"\n\n[tool]\n') + + manifest = build_manifest( + framework=framework, + app_name=app_name, + tool_version="0.1.0", + project_dir=tmp_path, + ) + manifest["python_version"] = "3.13" + write_manifest(tmp_path, manifest) + + return tmp_path + + +# ---- _parse_version ---- + + +def test_parse_version_three_part(): + """Parses a standard three-part version string.""" + assert _parse_version("0.1.0") == (0, 1, 0) + + +def test_parse_version_two_part(): + """Parses a two-part version string.""" + assert _parse_version("1.0") == (1, 0) + + +def test_parse_version_single(): + """Parses a single-number version string.""" + assert _parse_version("3") == (3,) + + +def test_parse_version_comparison_minor(): + """Minor version bump compares correctly.""" + assert _parse_version("0.1.0") < _parse_version("0.2.0") + + +def test_parse_version_comparison_patch(): + """Patch version bump compares correctly.""" + assert _parse_version("0.1.0") < _parse_version("0.1.1") + + +def test_parse_version_comparison_equal(): + """Equal versions compare as equal.""" + assert _parse_version("1.0.0") == _parse_version("1.0.0") + + +def test_parse_version_comparison_major_wins(): + """Major version takes precedence over minor and patch.""" + assert _parse_version("2.0.0") > _parse_version("1.9.9") + + +# ---- _check_version ---- + + +def test_check_version_older_tool_warns(capsys): + """When installed tool is older than manifest version, prints a warning.""" + with patch("gds_idea_app_kit.update.__version__", "0.1.0"): + _check_version({"tool_version": "0.2.0"}) + + captured = capsys.readouterr() + assert "0.2.0" in captured.err + assert "upgrade" in captured.err + + +def test_check_version_same_version_no_warning(capsys): + """When versions match, no warning is printed.""" + with patch("gds_idea_app_kit.update.__version__", "0.1.0"): + _check_version({"tool_version": "0.1.0"}) + + captured = capsys.readouterr() + assert captured.err == "" + + +def test_check_version_newer_tool_no_warning(capsys): + """When installed tool is newer, no warning is printed.""" + with patch("gds_idea_app_kit.update.__version__", "0.2.0"): + _check_version({"tool_version": "0.1.0"}) + + captured = capsys.readouterr() + assert captured.err == "" + + +# ---- run_update error cases ---- + + +def test_update_no_pyproject(tmp_path, capsys): + """Exit 1 when no pyproject.toml exists.""" + with pytest.raises(SystemExit): + os.chdir(tmp_path) + run_update(dry_run=False) + + captured = capsys.readouterr() + assert "No pyproject.toml" in captured.err + + +def test_update_no_manifest(tmp_path, capsys): + """Exit 1 when pyproject.toml has no manifest section.""" + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text('[project]\nname = "test-app"\n') + + with pytest.raises(SystemExit): + os.chdir(tmp_path) + run_update(dry_run=False) + + captured = capsys.readouterr() + assert "No [tool.gds-idea-app-kit]" in captured.err + + +# ---- run_update file handling ---- + + +def test_update_unchanged_files_get_overwritten(update_project, capsys): + """Files whose hash matches the manifest are overwritten with latest template.""" + os.chdir(update_project) + run_update(dry_run=False) + + captured = capsys.readouterr() + # All files match their manifest hashes, so all should be updated + # The key thing is no files show as skipped + assert "Skipped:" not in captured.out + + +def test_update_modified_file_writes_new(update_project, capsys): + """Locally modified files get a .new file written alongside with review instructions.""" + # Modify one tracked file + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.write_text("# User modified this file\n") + + os.chdir(update_project) + run_update(dry_run=False) + + captured = capsys.readouterr() + assert "Skipped: app_src/Dockerfile (locally modified)" in captured.out + assert "app_src/Dockerfile.new" in captured.out + assert "diff app_src/Dockerfile app_src/Dockerfile.new" in captured.out + + +def test_update_modified_file_new_has_template_content(update_project): + """The .new file contains the latest template content, not the user's version.""" + # Modify one tracked file + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.write_text("# User modified this file\n") + + os.chdir(update_project) + run_update(dry_run=False) + + new_file = update_project / "app_src" / "Dockerfile.new" + assert new_file.exists() + content = new_file.read_text() + # Should have template content (Dockerfile starts with a comment header) + assert "# User modified" not in content + assert "FROM python:" in content + + +def test_update_modified_file_original_unchanged(update_project): + """The original modified file is not overwritten.""" + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.write_text("# User modified this file\n") + + os.chdir(update_project) + run_update(dry_run=False) + + assert dockerfile.read_text() == "# User modified this file\n" + + +def test_update_modified_summary_count(update_project, capsys): + """Summary line reports the count of locally modified files.""" + # Modify two tracked files + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.write_text("# modified\n") + devcontainer = update_project / ".devcontainer" / "devcontainer.json" + devcontainer.write_text("// modified\n") + + os.chdir(update_project) + run_update(dry_run=False) + + captured = capsys.readouterr() + assert "2 file(s) were locally modified and skipped" in captured.out + + +def test_update_missing_file_is_created(update_project, capsys): + """Files missing from the project are created fresh.""" + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.unlink() + + os.chdir(update_project) + run_update(dry_run=False) + + captured = capsys.readouterr() + assert "Created: app_src/Dockerfile" in captured.out + assert dockerfile.exists() + + +def test_update_dry_run_makes_no_changes(update_project, capsys): + """Dry run reports what would change but doesn't modify files or write .new files.""" + # Delete a file and modify another + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.unlink() + devcontainer = update_project / ".devcontainer" / "devcontainer.json" + devcontainer.write_text("// modified\n") + + manifest_before = read_manifest(update_project) + + os.chdir(update_project) + run_update(dry_run=True) + + # Deleted file should still be missing + assert not dockerfile.exists() + # No .new file should have been created + assert not (update_project / ".devcontainer" / "devcontainer.json.new").exists() + # Manifest should be unchanged + manifest_after = read_manifest(update_project) + assert manifest_before == manifest_after + + captured = capsys.readouterr() + assert "No changes made (dry run)" in captured.out + + +def test_update_manifest_is_refreshed_after_changes(update_project): + """After updating files, the manifest hashes are refreshed.""" + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.unlink() + + os.chdir(update_project) + run_update(dry_run=False) + + manifest = read_manifest(update_project) + assert "app_src/Dockerfile" in manifest.get("files", {}) + assert hash_file(dockerfile) == manifest["files"]["app_src/Dockerfile"] + + +# ---- run_update --force ---- + + +def test_update_force_overwrites_modified_file(update_project, capsys): + """Force mode overwrites locally modified files instead of writing .new.""" + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.write_text("# User modified this file\n") + + os.chdir(update_project) + run_update(dry_run=False, force=True) + + captured = capsys.readouterr() + # Should show as updated, not skipped + assert "Updated: app_src/Dockerfile" in captured.out + assert "Skipped:" not in captured.out + # No .new file should be created + assert not (update_project / "app_src" / "Dockerfile.new").exists() + # Original should be overwritten with template content + assert "# User modified" not in dockerfile.read_text() + assert "FROM python:" in dockerfile.read_text() + + +def test_update_force_no_new_files_created(update_project): + """Force mode never creates .new files.""" + # Modify all tracked files + for dest_path in get_tracked_files("streamlit").values(): + dest_full = update_project / dest_path + if dest_full.exists(): + dest_full.write_text("# modified\n") + + os.chdir(update_project) + run_update(dry_run=False, force=True) + + # No .new files anywhere + new_files = list(update_project.rglob("*.new")) + assert new_files == [] + + +def test_update_force_updates_manifest(update_project): + """Force mode updates the manifest with new hashes after overwriting.""" + dockerfile = update_project / "app_src" / "Dockerfile" + dockerfile.write_text("# User modified this file\n") + + os.chdir(update_project) + run_update(dry_run=False, force=True) + + manifest = read_manifest(update_project) + assert hash_file(dockerfile) == manifest["files"]["app_src/Dockerfile"] + + +# ---- _classify_file ---- + + +def test_classify_missing_file_returns_create(tmp_path): + """A file that doesn't exist on disk is classified as CREATE.""" + dest_full = tmp_path / "missing.txt" + action = _classify_file(dest_full, {}, "missing.txt", force=False) + assert action == Action.CREATE + + +def test_classify_unchanged_file_returns_update(tmp_path): + """A file whose hash matches the manifest is classified as UPDATE.""" + dest_full = tmp_path / "file.txt" + dest_full.write_text("original content") + file_hash = hash_file(dest_full) + + action = _classify_file(dest_full, {"file.txt": file_hash}, "file.txt", force=False) + assert action == Action.UPDATE + + +def test_classify_modified_file_returns_skip(tmp_path): + """A file whose hash differs from the manifest is classified as SKIP.""" + dest_full = tmp_path / "file.txt" + dest_full.write_text("modified content") + + action = _classify_file(dest_full, {"file.txt": "old-hash"}, "file.txt", force=False) + assert action == Action.SKIP + + +def test_classify_modified_file_with_force_returns_force(tmp_path): + """A modified file with --force is classified as FORCE, not SKIP.""" + dest_full = tmp_path / "file.txt" + dest_full.write_text("modified content") + + action = _classify_file(dest_full, {"file.txt": "old-hash"}, "file.txt", force=True) + assert action == Action.FORCE + + +def test_classify_file_not_in_manifest_returns_update(tmp_path): + """A file that exists but has no manifest entry is classified as UPDATE.""" + dest_full = tmp_path / "file.txt" + dest_full.write_text("some content") + + action = _classify_file(dest_full, {}, "file.txt", force=False) + assert action == Action.UPDATE + + +# ---- _plan_updates ---- + + +@pytest.fixture() +def plan_project(tmp_path): + """Create a minimal project with templates for plan testing. + + Returns a dict with project_dir, templates_dir, tracked, template_vars, + and manifest_hashes for use in _plan_updates calls. + """ + framework = "streamlit" + templates_dir = _get_templates_dir() + tracked = get_tracked_files(framework) + template_vars = { + "app_name": "test-app", + "python_version": "3.13", + "python_version_nodot": "313", + } + + # Copy all tracked files into the project + for template_src, dest_path in tracked.items(): + template_full = templates_dir / template_src + dest_full = tmp_path / dest_path + dest_full.parent.mkdir(parents=True, exist_ok=True) + content = template_full.read_text() + content = _apply_template_vars(content, template_vars) + dest_full.write_text(content) + + # Build manifest hashes from the files we just wrote + manifest_hashes = {} + for _, dest_path in tracked.items(): + dest_full = tmp_path / dest_path + if dest_full.exists(): + manifest_hashes[dest_path] = hash_file(dest_full) + + return { + "project_dir": tmp_path, + "tracked": tracked, + "templates_dir": templates_dir, + "template_vars": template_vars, + "manifest_hashes": manifest_hashes, + } + + +def test_plan_all_unchanged_returns_all_update(plan_project): + """When all files match manifest hashes, every item is ACTION.UPDATE.""" + plan = _plan_updates(**plan_project, force=False) + + assert len(plan) > 0 + assert all(item.action == Action.UPDATE for item in plan) + + +def test_plan_missing_file_returns_create(plan_project): + """A file deleted from the project is planned as CREATE.""" + dockerfile = plan_project["project_dir"] / "app_src" / "Dockerfile" + dockerfile.unlink() + + plan = _plan_updates(**plan_project, force=False) + + create_items = [item for item in plan if item.action == Action.CREATE] + assert any(item.dest_path == "app_src/Dockerfile" for item in create_items) + + +def test_plan_modified_file_returns_skip(plan_project): + """A locally modified file is planned as SKIP.""" + dockerfile = plan_project["project_dir"] / "app_src" / "Dockerfile" + dockerfile.write_text("# user modified\n") + + plan = _plan_updates(**plan_project, force=False) + + skipped = [item for item in plan if item.action == Action.SKIP] + assert any(item.dest_path == "app_src/Dockerfile" for item in skipped) + + +def test_plan_modified_file_with_force_returns_force(plan_project): + """A locally modified file with force=True is planned as FORCE.""" + dockerfile = plan_project["project_dir"] / "app_src" / "Dockerfile" + dockerfile.write_text("# user modified\n") + + plan = _plan_updates(**plan_project, force=True) + + forced = [item for item in plan if item.action == Action.FORCE] + assert any(item.dest_path == "app_src/Dockerfile" for item in forced) + # No SKIP items when force is used + assert not any(item.action == Action.SKIP for item in plan) + + +def test_plan_contains_rendered_content(plan_project): + """Plan items contain the rendered template content with variables substituted.""" + plan = _plan_updates(**plan_project, force=False) + + dockerfile_item = next(item for item in plan if item.dest_path == "app_src/Dockerfile") + # Template variables should be substituted in the content + assert "{{app_name}}" not in dockerfile_item.new_content + assert "{{python_version}}" not in dockerfile_item.new_content + + +def test_plan_skips_missing_templates(plan_project): + """Template files that don't exist in the package are silently excluded.""" + # Add a fake entry to tracked that has no template file + plan_project["tracked"]["nonexistent/template.txt"] = "nonexistent/output.txt" + + plan = _plan_updates(**plan_project, force=False) + + assert not any(item.dest_path == "nonexistent/output.txt" for item in plan) + + +def test_plan_items_have_correct_dest_full(plan_project): + """Each plan item has dest_full pointing to the absolute path in the project.""" + plan = _plan_updates(**plan_project, force=False) + + for item in plan: + expected = plan_project["project_dir"] / item.dest_path + assert item.dest_full == expected + + +# ---- _apply_updates ---- + + +def test_apply_create_writes_file(tmp_path): + """CREATE action writes the file to disk.""" + dest_full = tmp_path / "new_file.txt" + plan = [FileUpdate("new_file.txt", dest_full, "hello world", Action.CREATE)] + + _apply_updates(plan) + + assert dest_full.exists() + assert dest_full.read_text() == "hello world" + + +def test_apply_create_makes_parent_dirs(tmp_path): + """CREATE action creates parent directories if needed.""" + dest_full = tmp_path / "deep" / "nested" / "file.txt" + plan = [FileUpdate("deep/nested/file.txt", dest_full, "content", Action.CREATE)] + + _apply_updates(plan) + + assert dest_full.exists() + assert dest_full.read_text() == "content" + + +def test_apply_update_overwrites_file(tmp_path): + """UPDATE action overwrites an existing file.""" + dest_full = tmp_path / "file.txt" + dest_full.write_text("old content") + plan = [FileUpdate("file.txt", dest_full, "new content", Action.UPDATE)] + + _apply_updates(plan) + + assert dest_full.read_text() == "new content" + + +def test_apply_force_overwrites_file(tmp_path): + """FORCE action overwrites an existing file.""" + dest_full = tmp_path / "file.txt" + dest_full.write_text("user modified") + plan = [FileUpdate("file.txt", dest_full, "template content", Action.FORCE)] + + _apply_updates(plan) + + assert dest_full.read_text() == "template content" + + +def test_apply_skip_writes_new_file(tmp_path): + """SKIP action writes a .new file alongside, leaving the original untouched.""" + dest_full = tmp_path / "file.txt" + dest_full.write_text("user modified") + plan = [FileUpdate("file.txt", dest_full, "template content", Action.SKIP)] + + _apply_updates(plan) + + assert dest_full.read_text() == "user modified" + new_file = tmp_path / "file.txt.new" + assert new_file.exists() + assert new_file.read_text() == "template content" + + +def test_apply_mixed_actions(tmp_path): + """Multiple actions in a single plan are all applied correctly.""" + create_file = tmp_path / "created.txt" + update_file = tmp_path / "updated.txt" + update_file.write_text("old") + skip_file = tmp_path / "skipped.txt" + skip_file.write_text("user version") + + plan = [ + FileUpdate("created.txt", create_file, "new file", Action.CREATE), + FileUpdate("updated.txt", update_file, "new version", Action.UPDATE), + FileUpdate("skipped.txt", skip_file, "template version", Action.SKIP), + ] + + _apply_updates(plan) + + assert create_file.read_text() == "new file" + assert update_file.read_text() == "new version" + assert skip_file.read_text() == "user version" + assert (tmp_path / "skipped.txt.new").read_text() == "template version" + + +# ---- _report_updates ---- + + +def test_report_created_files(capsys): + """Created files are reported with 'Created:' prefix.""" + plan = [FileUpdate("app_src/Dockerfile", Path("/fake"), "", Action.CREATE)] + + _report_updates(plan, dry_run=False) + + captured = capsys.readouterr() + assert "Created: app_src/Dockerfile" in captured.out + + +def test_report_updated_files(capsys): + """Updated files are reported with 'Updated:' prefix.""" + plan = [FileUpdate("app_src/Dockerfile", Path("/fake"), "", Action.UPDATE)] + + _report_updates(plan, dry_run=False) + + captured = capsys.readouterr() + assert "Updated: app_src/Dockerfile" in captured.out + + +def test_report_forced_files_show_as_updated(capsys): + """FORCE actions are reported as 'Updated:', not 'Forced:'.""" + plan = [FileUpdate("app_src/Dockerfile", Path("/fake"), "", Action.FORCE)] + + _report_updates(plan, dry_run=False) + + captured = capsys.readouterr() + assert "Updated: app_src/Dockerfile" in captured.out + + +def test_report_skipped_files_with_review_instructions(capsys): + """Skipped files show the path, .new path, and diff command.""" + plan = [FileUpdate("app_src/Dockerfile", Path("/fake"), "", Action.SKIP)] + + _report_updates(plan, dry_run=False) + + captured = capsys.readouterr() + assert "Skipped: app_src/Dockerfile (locally modified)" in captured.out + assert "app_src/Dockerfile.new" in captured.out + assert "diff app_src/Dockerfile app_src/Dockerfile.new" in captured.out + + +def test_report_skipped_summary_count(capsys): + """Summary reports the count of skipped files.""" + plan = [ + FileUpdate("file1.txt", Path("/fake1"), "", Action.SKIP), + FileUpdate("file2.txt", Path("/fake2"), "", Action.SKIP), + ] + + _report_updates(plan, dry_run=False) + + captured = capsys.readouterr() + assert "2 file(s) were locally modified and skipped" in captured.out + + +def test_report_skipped_summary_not_shown_in_dry_run(capsys): + """Dry run does not show the 'locally modified and skipped' summary.""" + plan = [FileUpdate("file.txt", Path("/fake"), "", Action.SKIP)] + + _report_updates(plan, dry_run=True) + + captured = capsys.readouterr() + assert "locally modified and skipped" not in captured.out + + +def test_report_empty_plan(capsys): + """Empty plan reports 'Nothing to update.'.""" + _report_updates([], dry_run=False) + + captured = capsys.readouterr() + assert "Nothing to update." in captured.out + + +def test_report_dry_run_footer(capsys): + """Dry run shows 'No changes made (dry run).' footer.""" + plan = [FileUpdate("file.txt", Path("/fake"), "", Action.UPDATE)] + + _report_updates(plan, dry_run=True) + + captured = capsys.readouterr() + assert "No changes made (dry run)." in captured.out diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..23dc41f --- /dev/null +++ b/uv.lock @@ -0,0 +1,217 @@ +version = 1 +requires-python = ">=3.11" + +[[package]] +name = "boto3" +version = "1.42.47" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/fe/3363024b6dda5968401f45d8b345ed95ce4fd536d58f799988b4b28184ad/boto3-1.42.47.tar.gz", hash = "sha256:74812a2e29de7c2bd19e446d765cb887394f20f1517388484b51891a410f33b2", size = 112884 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/7b/884e30adab2339ce5cce7b800f5fa619254d36e89e50a8cf39a5524edc35/boto3-1.42.47-py3-none-any.whl", hash = "sha256:ed881ed246027028af566acbb80f008aa619be4d3fdbcc4ad3c75dbe8c34bfaf", size = 140608 }, +] + +[[package]] +name = "botocore" +version = "1.42.47" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/a6/d15f5dfe990abd76dbdb2105a7697e0d948e04c41dfd97c058bc76c7cebd/botocore-1.42.47.tar.gz", hash = "sha256:c26e190c1b4d863ba7b44dc68cc574d8eb862ddae5f0fe3472801daee12a0378", size = 14952255 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/5e/50e3a59b243894088eeb949a654fb21d9ab7d0d703034470de016828d85a/botocore-1.42.47-py3-none-any.whl", hash = "sha256:c60f5feaf189423e17755aca3f1d672b7466620dd2032440b32aaac64ae8cac8", size = 14625351 }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "gds-idea-app-kit" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "boto3" }, + { name = "click" }, + { name = "tomlkit" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "boto3", specifier = ">=1.26.0" }, + { name = "click", specifier = ">=8.0" }, + { name = "tomlkit", specifier = ">=0.12.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=8.0" }, + { name = "ruff", specifier = ">=0.8.0" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484 }, +] + +[[package]] +name = "jmespath" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/59/322338183ecda247fb5d1763a6cbe46eff7222eaeebafd9fa65d4bf5cb11/jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d", size = 27377 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419 }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366 }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "ruff" +version = "0.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/39/5cee96809fbca590abea6b46c6d1c586b49663d1d2830a751cc8fc42c666/ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a", size = 4524893 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/88/3fd1b0aa4b6330d6aaa63a285bc96c9f71970351579152d231ed90914586/ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455", size = 10354332 }, + { url = "https://files.pythonhosted.org/packages/72/f6/62e173fbb7eb75cc29fe2576a1e20f0a46f671a2587b5f604bfb0eaf5f6f/ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d", size = 10767189 }, + { url = "https://files.pythonhosted.org/packages/99/e4/968ae17b676d1d2ff101d56dc69cf333e3a4c985e1ec23803df84fc7bf9e/ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce", size = 10075384 }, + { url = "https://files.pythonhosted.org/packages/a2/bf/9843c6044ab9e20af879c751487e61333ca79a2c8c3058b15722386b8cae/ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621", size = 10481363 }, + { url = "https://files.pythonhosted.org/packages/55/d9/4ada5ccf4cd1f532db1c8d44b6f664f2208d3d93acbeec18f82315e15193/ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9", size = 10187736 }, + { url = "https://files.pythonhosted.org/packages/86/e2/f25eaecd446af7bb132af0a1d5b135a62971a41f5366ff41d06d25e77a91/ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179", size = 10968415 }, + { url = "https://files.pythonhosted.org/packages/e7/dc/f06a8558d06333bf79b497d29a50c3a673d9251214e0d7ec78f90b30aa79/ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d", size = 11809643 }, + { url = "https://files.pythonhosted.org/packages/dd/45/0ece8db2c474ad7df13af3a6d50f76e22a09d078af63078f005057ca59eb/ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78", size = 11234787 }, + { url = "https://files.pythonhosted.org/packages/8a/d9/0e3a81467a120fd265658d127db648e4d3acfe3e4f6f5d4ea79fac47e587/ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4", size = 11112797 }, + { url = "https://files.pythonhosted.org/packages/b2/cb/8c0b3b0c692683f8ff31351dfb6241047fa873a4481a76df4335a8bff716/ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e", size = 11033133 }, + { url = "https://files.pythonhosted.org/packages/f8/5e/23b87370cf0f9081a8c89a753e69a4e8778805b8802ccfe175cc410e50b9/ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662", size = 10442646 }, + { url = "https://files.pythonhosted.org/packages/e1/9a/3c94de5ce642830167e6d00b5c75aacd73e6347b4c7fc6828699b150a5ee/ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1", size = 10195750 }, + { url = "https://files.pythonhosted.org/packages/30/15/e396325080d600b436acc970848d69df9c13977942fb62bb8722d729bee8/ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16", size = 10676120 }, + { url = "https://files.pythonhosted.org/packages/8d/c9/229a23d52a2983de1ad0fb0ee37d36e0257e6f28bfd6b498ee2c76361874/ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3", size = 11201636 }, + { url = "https://files.pythonhosted.org/packages/6f/b0/69adf22f4e24f3677208adb715c578266842e6e6a3cc77483f48dd999ede/ruff-0.15.0-py3-none-win32.whl", hash = "sha256:238a717ef803e501b6d51e0bdd0d2c6e8513fe9eec14002445134d3907cd46c3", size = 10465945 }, + { url = "https://files.pythonhosted.org/packages/51/ad/f813b6e2c97e9b4598be25e94a9147b9af7e60523b0cb5d94d307c15229d/ruff-0.15.0-py3-none-win_amd64.whl", hash = "sha256:dd5e4d3301dc01de614da3cdffc33d4b1b96fb89e45721f1598e5532ccf78b18", size = 11564657 }, + { url = "https://files.pythonhosted.org/packages/f6/b0/2d823f6e77ebe560f4e397d078487e8d52c1516b331e3521bc75db4272ca/ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a", size = 10865753 }, +] + +[[package]] +name = "s3transfer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830 }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "tomlkit" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/af/14b24e41977adb296d6bd1fb59402cf7d60ce364f90c890bd2ec65c43b5a/tomlkit-0.14.0.tar.gz", hash = "sha256:cf00efca415dbd57575befb1f6634c4f42d2d87dbba376128adb42c121b87064", size = 187167 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl", hash = "sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680", size = 39310 }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584 }, +]