diff --git a/.github/workflows/publish_binaries.yml b/.github/workflows/publish_binaries.yml index 44df40b5..cda93455 100644 --- a/.github/workflows/publish_binaries.yml +++ b/.github/workflows/publish_binaries.yml @@ -37,7 +37,7 @@ jobs: - name: Add target run: rustup target add ${{ matrix.target }} - name: Build - run: cargo build --release --locked --target ${{ matrix.target }} + run: cargo build --release --locked --target ${{ matrix.target }} --all-features - name: Upload binaries to release uses: svenstaro/upload-release-action@v2 with: @@ -70,7 +70,7 @@ jobs: - name: Create package id: create-deb run: | - cargo deb --deb-revision="" # --deb-revision="" removes the -1 post tag + cargo deb --locked --all-features --deb-revision="" # --deb-revision="" removes the -1 post tag cd target/debian echo "DEB_FILE=$(find python-project-generator*_*.deb)" >> "$GITHUB_OUTPUT" - name: Upload deb package to release diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 3c3c2d47..2ee0b386 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -18,21 +18,36 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable + with: + components: clippy - name: Cache dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Run cargo clippy run: cargo clippy --all-targets -- --deny warnings + clippy-fastapi: + name: clippy-fastapi + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: install Rust + uses: dtolnay/rust-toolchain@stable + with: + components: clippy + - name: Cache dependencies + uses: Swatinem/rust-cache@v2.8.1 + - name: Run cargo clippy + run: cargo clippy --all-targets -F fastapi -- --deny warnings fmt: name: Rustfmt runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + with: + components: rustfmt + uses: dtolnay/rust-toolchain@stable - name: Cache dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Run cargo fmt @@ -42,17 +57,31 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Run cargo test run: cargo test --locked + test-fastapi: + name: test-fastapi + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v5 + - name: install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2.8.1 + - name: Run cargo test + run: cargo test --locked -F fastapi uv-linting: strategy: fail-fast: false @@ -61,12 +90,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - - name: Install uv on Linux and Mac + - name: Install uv on Linux if: runner.os != 'Windows' run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install uv on Windows @@ -99,16 +127,15 @@ jobs: fail-fast: false matrix: project_type: ["application", "lib"] - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - - name: Install uv on Linux and Mac + - name: Install uv on Linux if: runner.os != 'Windows' run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install uv on Windows @@ -146,9 +173,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Install Poetry @@ -182,13 +208,12 @@ jobs: fail-fast: false matrix: project_type: ["application", "lib"] - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Install Poetry @@ -228,12 +253,13 @@ jobs: - uses: actions/checkout@v5 - name: Install Just uses: taiki-e/install-action@just - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable + with: + components: clippy, rustfmt - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - - name: Install uv on Linux and Mac + - name: Install uv on Linux if: runner.os != 'Windows' run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install uv on Windows @@ -273,16 +299,15 @@ jobs: fail-fast: false matrix: project_type: ["application", "lib"] - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - - name: Install uv on Linux and Mac + - name: Install uv on Linux if: runner.os != 'Windows' run: curl -LsSf https://astral.sh/uv/install.sh | sh - name: Install uv on Windows @@ -320,9 +345,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Set up Python @@ -353,13 +377,12 @@ jobs: fail-fast: false matrix: project_type: ["application", "lib"] - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Set up Python @@ -394,9 +417,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Build package @@ -426,13 +448,12 @@ jobs: fail-fast: false matrix: project_type: ["application", "lib"] - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v5 - - name: Install Rust - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + - name: install Rust + uses: dtolnay/rust-toolchain@stable - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2.8.1 - name: Build package diff --git a/.github/workflows/testing_fastapi.yml b/.github/workflows/testing_fastapi.yml new file mode 100644 index 00000000..7b93fee8 --- /dev/null +++ b/.github/workflows/testing_fastapi.yml @@ -0,0 +1,111 @@ +name: Testing FastAPI +on: + push: + branches: + - main + pull_request: +env: + CARGO_TERM_COLOR: always + RUST_BACKTRACE: 1 + RUSTFLAGS: "-D warnings" + WORKING_DIR: "my-project" + MIN_PYTHON_VERSION: "3.11" + SECRET_KEY: someKey + FIRST_SUPERUSER_EMAIL: some@email.com + FIRST_SUPERUSER_PASSWORD: My_password1 + FIRST_SUPERUSER_NAME: "Wade Watts" + POSTGRES_HOST: 127.0.0.1 + POSTGRES_PORT: 5432 + POSTGRES_USER: postgres + POSTGRES_PASSWORD: test_password + POSTGRES_DB: test_db + VALKEY_HOST: 127.0.0.1 + VALKEY_PASSWORD: test_password + STACK_NAME: test-stack + DOMAIN: 127.0.0.1 + PRODUCTION_MODE: false + DATABASE_URL: postgresql://postgres:test_password@127.0.0.1:5432/test_db +jobs: + test-uv-fastapi-project: + name: test-fastapi-uv-setup-fastapi + strategy: + fail-fast: false + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2.8.1 + - name: Install sqlx-cli + run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres + - name: Install uv + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: ${{ env.MIN_PYTHON_VERSION }} + - name: Build package + run: cargo build --release -F fastapi + - name: Run creation + run: ./scripts/ci_run_fastapi.sh "fastapi" 1 + shell: bash + - name: Install Dependencies + working-directory: ${{ env.WORKING_DIR }} + run: | + uv lock + uv sync --frozen + - name: Pre-commit check + working-directory: ${{ env.WORKING_DIR }} + run: | + uv run pre-commit install + git add . + uv run pre-commit run --all-files + - name: make .env + working-directory: ${{ env.WORKING_DIR }} + run: touch .env + - name: Build and start Docker containers + working-directory: ${{ env.WORKING_DIR }} + run: docker compose up -d + - name: Test with pytest + working-directory: ${{ env.WORKING_DIR }} + run: uv run pytest -n auto + test-uv-non-fastapi-project: + name: test-fastapi-uv-setup-non-fastapi + strategy: + fail-fast: false + matrix: + project_type: ["application", "lib"] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2.8.1 + - name: Install uv + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: ${{ env.MIN_PYTHON_VERSION }} + - name: Build package + run: cargo build --release -F fastapi + - name: Run creation + run: ./scripts/ci_run_fastapi.sh ${{ matrix.project_type }} 1 + shell: bash + - name: Install Dependencies + working-directory: ${{ env.WORKING_DIR }} + run: | + uv lock + uv sync --frozen + - name: Pre-commit check + working-directory: ${{ env.WORKING_DIR }} + run: | + uv run pre-commit install + git add . + uv run pre-commit run --all-files + - name: Test with pytest + working-directory: ${{ env.WORKING_DIR }} + if: matrix.project_type == 'application' + run: uv run pytest diff --git a/Cargo.toml b/Cargo.toml index 89503009..d444250d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,9 @@ license = "MIT" name = "python-project" path = "src/main.rs" +[features] +fastapi = [] + [dependencies] anyhow = "1.0.100" clap = { version = "4.5.48", features = ["color", "suggestions", "derive"] } diff --git a/justfile b/justfile index 7e55bae3..94edb036 100644 --- a/justfile +++ b/justfile @@ -17,14 +17,24 @@ just --justfile {{justfile()}} fmt echo check just --justfile {{justfile()}} check + echo check fastapi features + just --justfile {{justfile()}} check-fastapi echo clippy just --justfile {{justfile()}} clippy + echo clippy ffastapi features + just --justfile {{justfile()}} clippy-fastapi @clippy: - cargo clippy --all-targets + cargo clippy + +@clippy-fastapi: + cargo clippy -F fastapi @check: - cargo check --all-targets + cargo check + +@check-fastapi: + cargo check -F fastapi @fmt: cargo fmt --all @@ -32,5 +42,17 @@ @test: cargo insta test +@test-fastapi: + cargo insta test -F fastapi + +@test-all: + echo testing no fastapi feature + just --justfile {{justfile()}} test + echo testing with fastapi feature + just --justfile {{justfile()}} test-fastapi + @test-review: cargo insta test --review + +@test-review-fastapi: + cargo insta test --review -F fastapi diff --git a/scripts/ci_run_fastapi.sh b/scripts/ci_run_fastapi.sh new file mode 100755 index 00000000..51c26212 --- /dev/null +++ b/scripts/ci_run_fastapi.sh @@ -0,0 +1,125 @@ +#!/bin/bash + +project_name="My Project" +project_slug="" +source_dir="" +project_description="Test Project" +creator="Arthur Dent" +creator_email="arthur@heartofgold.com" +license="1" +fastapi_project="1" +copyright_year="" +version="" +python_version="" +min_python_version="" +gha_versions="" +application="" +project_manager="1" + +# Check for user provided project manager input +if [ $# -gt 1 ]; then + if [ $2 -lt 1 ] || [ $2 -gt 5 ]; then + echo "Invalid project_manager value" + exit 1 + else + project_manager=$2 + fi +fi + +# Check for user provided application input +if [ $# -gt 0 ]; then + if [ $1 = "fastapi" ] ; then + fastapi_project="1" + application="1" + elif [ $1 = "application" ]; then + fastapi_project="2" + application="1" + elif [ $1 = "lib" ]; then + fastapi_project="2" + application="2" + else + echo "Invalid application value" + exit 1 + fi +fi + +# database_manager="" +max_line_length="" +use_dependabot="" +use_continuous_deployment="" +use_release_drafter="" +pyo3_python_manager="" + +if [[ project_manager -eq 3 ]]; then + ./target/release/python-project create -s << EOF +$project_name +$project_slug +$source_dir +$project_description +$creator +$creator_email +$license +$copyright_year +$version +$python_version +$min_python_version +$gha_versions +$project_manager +$pyo3_python_manager +$application +$max_line_length +$use_dependabot +$use_continuous_deployment +$use_release_drafter +EOF +else + if [ "$fastapi_project" = "1" ]; then + ./target/release/python-project create -s << EOF +$project_name +$project_slug +$source_dir +$project_description +$creator +$creator_email +$license +$copyright_year +$version +$fastapi_project +$python_version +$min_python_version +$gha_versions +$project_manager +$max_line_length +$use_dependabot +$use_continuous_deployment +$use_release_drafter +EOF + else + ./target/release/python-project create -s << EOF +$project_name +$project_slug +$source_dir +$project_description +$creator +$creator_email +$license +$copyright_year +$version +$fastapi_project +$python_version +$min_python_version +$gha_versions +$project_manager +$application +$max_line_length +$use_dependabot +$use_continuous_deployment +$use_release_drafter +EOF + fi +fi + +if [ ! -d $project_slug ]; then + echo "Directory not created" + exit 1 +fi diff --git a/src/cli.rs b/src/cli.rs index 4949f597..43177d4a 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -4,6 +4,9 @@ use crate::project_info::{ Day, DependabotSchedule, LicenseType, ProjectManager, Pyo3PythonManager, }; +#[cfg(feature = "fastapi")] +use crate::project_info::{Database, DatabaseManager}; + #[derive(Clone, Debug, ValueEnum)] pub enum ApplicationOrLib { Application, @@ -167,6 +170,30 @@ pub enum Param { /// Remove the save download latest packages value ResetDownloadLatestPackages, + #[cfg(feature = "fastapi")] + /// Save a default value for is FastAPI project + IsFastapiProject { value: BooleanChoice }, + + #[cfg(feature = "fastapi")] + /// Remove the is FastAPI project value + ResetIsFastapiProject, + + #[cfg(feature = "fastapi")] + /// Save a default database value + Database { value: Database }, + + #[cfg(feature = "fastapi")] + /// Remove the database value + ResetDatabase, + + #[cfg(feature = "fastapi")] + /// Save a default database manager + DatabaseManager { value: DatabaseManager }, + + #[cfg(feature = "fastapi")] + /// Remove the database manager value + ResetDatabaseManager, + /// Rerset the config to the default values Reset, diff --git a/src/config.rs b/src/config.rs index 42d6f4e0..3769eaf7 100644 --- a/src/config.rs +++ b/src/config.rs @@ -14,6 +14,9 @@ use crate::project_info::{ Pyo3PythonManager, }; +#[cfg(feature = "fastapi")] +use crate::project_info::{Database, DatabaseManager}; + #[derive(Debug, Deserialize, Serialize, PartialEq, Eq)] pub struct Config { pub creator: Option, @@ -36,6 +39,15 @@ pub struct Config { pub include_docs: Option, pub download_latest_packages: Option, + #[cfg(feature = "fastapi")] + pub is_fastapi_project: Option, + + #[cfg(feature = "fastapi")] + pub database: Option, + + #[cfg(feature = "fastapi")] + pub database_manager: Option, + #[serde(skip)] config_dir: Rc>, #[serde(skip)] @@ -66,6 +78,15 @@ impl Default for Config { download_latest_packages: None, config_dir: config_dir(), config_file_path: config_file_path(), + + #[cfg(feature = "fastapi")] + is_fastapi_project: None, + + #[cfg(feature = "fastapi")] + database: None, + + #[cfg(feature = "fastapi")] + database_manager: None, } } } @@ -99,6 +120,15 @@ impl Config { download_latest_packages: config.download_latest_packages, config_dir: self.config_dir.clone(), config_file_path: self.config_file_path.clone(), + + #[cfg(feature = "fastapi")] + is_fastapi_project: config.is_fastapi_project, + + #[cfg(feature = "fastapi")] + database: config.database, + + #[cfg(feature = "fastapi")] + database_manager: config.database_manager, }; } } @@ -368,6 +398,42 @@ impl Config { Ok(()) } + #[cfg(feature = "fastapi")] + pub fn save_is_fastapi_project(&self, value: bool) -> Result<()> { + self.handle_save_config(|config| &mut config.is_fastapi_project, Some(value))?; + Ok(()) + } + + #[cfg(feature = "fastapi")] + pub fn reset_is_fastapi_project(&self) -> Result<()> { + self.handle_save_config(|config| &mut config.is_fastapi_project, None)?; + Ok(()) + } + + #[cfg(feature = "fastapi")] + pub fn save_database(&self, value: Database) -> Result<()> { + self.handle_save_config(|config| &mut config.database, Some(value))?; + Ok(()) + } + + #[cfg(feature = "fastapi")] + pub fn reset_database(&self) -> Result<()> { + self.handle_save_config(|config| &mut config.database, None)?; + Ok(()) + } + + #[cfg(feature = "fastapi")] + pub fn save_database_manager(&self, value: DatabaseManager) -> Result<()> { + self.handle_save_config(|config| &mut config.database_manager, Some(value))?; + Ok(()) + } + + #[cfg(feature = "fastapi")] + pub fn reset_database_manager(&self) -> Result<()> { + self.handle_save_config(|config| &mut config.database_manager, None)?; + Ok(()) + } + pub fn show(&self) { let config = self.load_config(); print_config_value("Creator", &config.creator); @@ -410,6 +476,12 @@ impl Config { print_config_value("Use Multi OS CI", &config.use_multi_os_ci); print_config_value("Include Docs", &config.include_docs); print_config_value("Download Latest Packages", &config.download_latest_packages); + + #[cfg(feature = "fastapi")] + print_config_value("FastAPI Project", &config.is_fastapi_project); + + #[cfg(feature = "fastapi")] + print_config_value("Database Manager", &config.is_fastapi_project); } } @@ -895,4 +967,72 @@ mod tests { assert_eq!(result.download_latest_packages, None); } + + #[cfg(feature = "fastapi")] + #[test] + fn test_save_is_fastapi_project() { + let config = mock_config(); + let expected = true; + config.save_is_fastapi_project(expected).unwrap(); + let result = config.load_config(); + + assert_eq!(result.is_fastapi_project, Some(expected)); + } + + #[cfg(feature = "fastapi")] + #[test] + fn test_reset_is_fastapi_project() { + let config = mock_config(); + config.save_is_fastapi_project(true).unwrap(); + config.reset_is_fastapi_project().unwrap(); + let result = config.load_config(); + + assert_eq!(result.is_fastapi_project, None); + } + + #[cfg(feature = "fastapi")] + #[test] + fn test_save_database() { + let config = mock_config(); + let expected = Database::Postgresql; + config.save_database(expected.clone()).unwrap(); + let result = config.load_config(); + + assert_eq!(result.database, Some(expected)); + } + + #[cfg(feature = "fastapi")] + #[test] + fn test_reset_database() { + let config = mock_config(); + config.save_database(Database::Postgresql).unwrap(); + config.reset_database().unwrap(); + let result = config.load_config(); + + assert_eq!(result.database, None); + } + + #[cfg(feature = "fastapi")] + #[test] + fn test_save_database_manager() { + let config = mock_config(); + let expected = DatabaseManager::AsyncPg; + config.save_database_manager(expected.clone()).unwrap(); + let result = config.load_config(); + + assert_eq!(result.database_manager, Some(expected)); + } + + #[cfg(feature = "fastapi")] + #[test] + fn test_reset_database_manager() { + let config = mock_config(); + config + .save_database_manager(DatabaseManager::SqlAlchemy) + .unwrap(); + config.reset_database_manager().unwrap(); + let result = config.load_config(); + + assert_eq!(result.database_manager, None); + } } diff --git a/src/fastapi/core_files.rs b/src/fastapi/core_files.rs new file mode 100644 index 00000000..210b7398 --- /dev/null +++ b/src/fastapi/core_files.rs @@ -0,0 +1,378 @@ +use anyhow::Result; + +use crate::{file_manager::save_file_with_content, project_info::ProjectInfo}; + +fn create_cache_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from __future__ import annotations + +import valkey.asyncio as valkey + +from {module}.core.config import settings + + +class Cache: + def __init__(self) -> None: + self._pool: valkey.ConnectionPool | None = None + self.client: valkey.Valkey | None = None + + async def create_client(self, *, db: int = 0) -> None: + self._pool = await self._create_pool(db) + self.client = valkey.Valkey.from_pool(self._pool) + + async def close_client(self) -> None: + if self.client: + await self.client.aclose() + + if self._pool: + await self._pool.aclose() + + async def _create_pool(self, db: int = 0) -> valkey.ConnectionPool: + return valkey.ConnectionPool( + host=settings.VALKEY_HOST, + port=settings.VALKEY_PORT, + password=settings.VALKEY_PASSWORD.get_secret_value(), + db=db, + ) + + +cache = Cache() +"# + ) +} + +pub fn save_cache_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("core/cache.py"); + let file_content = create_cache_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_config_file(project_info: &ProjectInfo) -> String { + let project_name = &project_info.project_name; + + format!( + r#"from __future__ import annotations + +import warnings +from typing import Annotated, Any, Literal, Self + +from dotenv import find_dotenv, load_dotenv +from pydantic import ( + AnyUrl, + BeforeValidator, + EmailStr, + SecretStr, + computed_field, + model_validator, +) +from pydantic_settings import BaseSettings, SettingsConfigDict + +load_dotenv(find_dotenv(".env")) + + +def _parse_cors(v: Any) -> list[str] | str: + if isinstance(v, str) and not v.startswith("["): + return [i.strip() for i in v.split(",")] + elif isinstance(v, list | str): + return v + raise ValueError(v) + + +class Settings(BaseSettings): + model_config = SettingsConfigDict(env_file_encoding="utf-8", extra="ignore") + + API_V1_PREFIX: str = "/api/v1" + TITLE: str = "{project_name}" + PRODUCTION_MODE: bool = True + SECRET_KEY: SecretStr + # 60 minutes * 24 hours * 8 days = 8 days + ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8 + ENVIRONMENT: Literal["local", "testing", "production"] = "local" + DOMAIN: str = "127.0.0.1" + FIRST_SUPERUSER_EMAIL: EmailStr + FIRST_SUPERUSER_PASSWORD: SecretStr + FIRST_SUPERUSER_NAME: str + BACKEND_CORS_ORIGINS: Annotated[list[AnyUrl] | str, BeforeValidator(_parse_cors)] = [] + LOG_LEVEL: Literal["DEBUG", "INFO", "WARNING", "ERROR"] = "INFO" + POSTGRES_HOST: str = "127.0.0.1" + POSTGRES_PORT: int = 5432 + POSTGRES_USER: str + POSTGRES_PASSWORD: SecretStr + POSTGRES_DB: str + POSTGRES_POOL_MIN_SIZE: int = 10 + POSTGRES_POOL_MAX_SIZE: int = 50 + POSTGRES_POOL_ACQUIRE_TIMEOUT: int = 30 + POSTGRES_POOL_MAX_LIFETIME: int = 3600 + VALKEY_HOST: str = "127.0.0.1" + VALKEY_PASSWORD: SecretStr + VALKEY_PORT: int = 6379 + + @computed_field # type: ignore[prop-decorator] + @property + def all_cors_origins(self) -> list[str]: + return [str(origin).rstrip("/") for origin in self.BACKEND_CORS_ORIGINS] + + @computed_field # type: ignore[prop-decorator] + @property + def server_host(self) -> str: + # Use HTTPS for anything other than local development + if self.ENVIRONMENT == "local": + return f"http://{{self.DOMAIN}}" + return f"https://{{self.DOMAIN}}" + + def _check_default_secret(self, var_name: str, value: str | None) -> None: + if value == "changethis": + message = ( + f'The value of {{var_name}} is "changethis", ' + "for security, please change it, at least for deployments." + ) + if self.ENVIRONMENT == "local": + warnings.warn(message, stacklevel=1) + else: + raise ValueError(message) + + @model_validator(mode="after") + def _enforce_non_default_secrets(self) -> Self: + self._check_default_secret("SECRET_KEY", self.SECRET_KEY.get_secret_value()) + self._check_default_secret( + "FIRST_SUPERUSER_PASSWORD", self.FIRST_SUPERUSER_PASSWORD.get_secret_value() + ) + self._check_default_secret("POSTGRES_PASSWORD", self.POSTGRES_PASSWORD.get_secret_value()) + + return self + + +settings = Settings() # type: ignore +"# + ) +} + +pub fn save_config_file(project_info: &ProjectInfo) -> Result<()> { + let base = project_info.source_dir_path(); + let file_path = base.join("core/config.py"); + let file_content = create_config_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_core_utils_file() -> String { + r#"from __future__ import annotations + +from collections.abc import Callable +from typing import Any +from uuid import uuid4 + +from fastapi import APIRouter as FastAPIRouter +from fastapi.types import DecoratedCallable + + +class APIRouter(FastAPIRouter): + """This resolves both paths that end in a / slash and those that don't. + + For example https://my_site and https://my_site/ will be routed to the same place. + """ + + def api_route( + self, path: str, *, include_in_schema: bool = True, **kwargs: Any + ) -> Callable[[DecoratedCallable], DecoratedCallable]: + """Updated api_route function that automatically configures routes to have 2 versions. + + One without a trailing slash and another with it. + """ + if path.endswith("/"): + path = path[:-1] + + add_path = super().api_route(path, include_in_schema=include_in_schema, **kwargs) + + alternate_path = f"{path}/" + add_alternate_path = super().api_route(alternate_path, include_in_schema=False, **kwargs) + + def decorator(func: DecoratedCallable) -> DecoratedCallable: + add_alternate_path(func) + return add_path(func) + + return decorator + + +def create_db_primary_key() -> str: + return str(uuid4()) +"# + .to_string() +} + +pub fn save_core_utils_file(project_info: &ProjectInfo) -> Result<()> { + let base = project_info.source_dir_path(); + let file_path = base.join("core/utils.py"); + let file_content = create_core_utils_file(); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_db_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from __future__ import annotations + +import asyncpg +from loguru import logger + +from {module}.core.config import settings +from {module}.core.security import get_password_hash +from {module}.core.utils import create_db_primary_key +from {module}.exceptions import NoDbPoolError +from {module}.services.db.user_services import get_user_by_email + + +class Database: + def __init__(self, db_name: str | None = None) -> None: + self.db_name = db_name or settings.POSTGRES_DB + self.db_pool: asyncpg.Pool | None = None + + async def create_pool(self, min_size: int | None = None, max_size: int | None = None) -> None: + min_size = min_size or settings.POSTGRES_POOL_MIN_SIZE + max_size = max_size or settings.POSTGRES_POOL_MAX_SIZE + + self.db_pool = await asyncpg.create_pool( + user=settings.POSTGRES_USER, + password=settings.POSTGRES_PASSWORD.get_secret_value(), + database=self.db_name, + host=settings.POSTGRES_HOST, + port=settings.POSTGRES_PORT, + min_size=min_size, + max_size=max_size, + max_inactive_connection_lifetime=settings.POSTGRES_POOL_MAX_LIFETIME, + ) + + async def close_pool(self) -> None: + if self.db_pool: + await self.db_pool.close() + + async def create_first_superuser(self) -> None: + if self.db_pool is None: # pragma: no cover + logger.error("No db pool created") + raise NoDbPoolError("No db pool created") + + db_user = await get_user_by_email(pool=self.db_pool, email=settings.FIRST_SUPERUSER_EMAIL) + + if db_user: # pragma: no cover + if db_user.is_active and db_user.is_superuser: + logger.debug("First super user already exists, skipping.") + return None + else: + logger.info( + f"User with email {{settings.FIRST_SUPERUSER_EMAIL}} found, but is not active or is not a superuser, updating." + ) + update_query = """ + UPDATE users + SET is_active = true, is_superuser = true + WHERE email = $1 + """ + + async with self.db_pool.acquire() as conn: + try: + await conn.execute(update_query, settings.FIRST_SUPERUSER_EMAIL) + except asyncpg.exceptions.UniqueViolationError: + logger.info("first superuser already added, skipping") + + return None + + logger.debug(f"User with email {{settings.FIRST_SUPERUSER_EMAIL}} not found, adding") + query = """ + INSERT INTO users ( + id, email, full_name, hashed_password, is_active, is_superuser + ) + VALUES ($1, $2, $3, $4, $5, $6) + """ + + hashed_password = get_password_hash(settings.FIRST_SUPERUSER_PASSWORD.get_secret_value()) + async with self.db_pool.acquire() as conn: + try: + await conn.execute( + query, + create_db_primary_key(), + settings.FIRST_SUPERUSER_EMAIL, + settings.FIRST_SUPERUSER_NAME, + hashed_password, + True, + True, + ) + # Check this because there could be a race condition between workers where the user wasn't + # found by multiple workers and they all try to add it at the same time + except asyncpg.exceptions.UniqueViolationError: # pragma: no cover + logger.info("First superuser already added, skipping") + + +db = Database() +"# + ) +} + +pub fn save_db_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("core/db.py"); + let file_content = create_db_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_security_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from __future__ import annotations + +from datetime import UTC, datetime, timedelta + +import jwt +from pwdlib import PasswordHash +from pwdlib.hashers.argon2 import Argon2Hasher + +from {module}.core.config import settings + +password_hash = PasswordHash((Argon2Hasher(),)) + + +ALGORITHM = "HS256" + + +def create_access_token(subject: str, is_superuser: bool, expires_delta: timedelta) -> str: + expire = datetime.now(UTC) + expires_delta + to_encode = {{"exp": expire, "sub": subject, "is_superuser": is_superuser}} + encoded_jwt = jwt.encode( + to_encode, key=settings.SECRET_KEY.get_secret_value(), algorithm=ALGORITHM + ) + return encoded_jwt + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return password_hash.verify(plain_password, hashed_password) + + +def get_password_hash(password: str) -> str: + return password_hash.hash(password) +"# + ) +} + +pub fn save_security_file(project_info: &ProjectInfo) -> Result<()> { + let base = project_info.source_dir_path(); + let file_path = base.join("core/security.py"); + let file_content = create_security_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} diff --git a/src/fastapi/docker_files.rs b/src/fastapi/docker_files.rs new file mode 100644 index 00000000..f2d92599 --- /dev/null +++ b/src/fastapi/docker_files.rs @@ -0,0 +1,542 @@ +use anyhow::Result; + +use crate::{ + file_manager::save_file_with_content, + project_info::{ProjectInfo, ProjectManager}, +}; + +fn create_dockercompose_file(project_info: &ProjectInfo) -> String { + let base_name = &project_info.project_slug; + + format!( + r#"services: + backend: + image: {base_name}-backend:latest + restart: unless-stopped + networks: + - traefik-public-{base_name} + - default + build: + context: . + container_name: {base_name}-backend + healthcheck: + test: ["CMD", "curl", "-f", "http://127.0.0.1:8000/api/v1/health"] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + depends_on: + db: + condition: service_healthy + restart: true + valkey: + condition: service_healthy + restart: true + migrations: + condition: service_completed_successfully + env_file: + - .env + environment: + - POSTGRES_HOST=db + - VALKEY_HOST=valkey + labels: + - traefik.enable=true + - traefik.docker.network=traefik-public-{base_name} + - traefik.constraint-label=traefik-public + + - traefik.http.services.${{STACK_NAME?Variable not set}}-backend.loadbalancer.server.port=8000 + + # Rate limiting middleware + - traefik.http.middlewares.${{STACK_NAME?Variable not set}}-api-rate-limit.ratelimit.burst=50 + - traefik.http.middlewares.${{STACK_NAME?Variable not set}}-api-rate-limit.ratelimit.average=25 + + # Security headers middleware (backend-specific) + - traefik.http.middlewares.${{STACK_NAME?Variable not set}}-security-headers.headers.contenttypenosniff=true + - traefik.http.middlewares.${{STACK_NAME?Variable not set}}-security-headers.headers.referrerpolicy=strict-origin-when-cross-origin + - traefik.http.middlewares.${{STACK_NAME?Variable not set}}-security-headers.headers.forcestsheader=true + - traefik.http.middlewares.${{STACK_NAME?Variable not set}}-security-headers.headers.stsincludesubdomains=true + - traefik.http.middlewares.${{STACK_NAME?Variable not set}}-security-headers.headers.stsseconds=31536000 + + - traefik.http.routers.${{STACK_NAME?Variable not set}}-backend-http.rule=Host(`api.${{DOMAIN?Variable not set}}`) + - traefik.http.routers.${{STACK_NAME?Variable not set}}-backend-http.entrypoints=http + + - traefik.http.routers.${{STACK_NAME?Variable not set}}-backend-https.rule=Host(`api.${{DOMAIN?Variable not set}}`) - traefik.http.routers.${{STACK_NAME?Variable not set}}-backend-https.entrypoints=https - traefik.http.routers.${{STACK_NAME?Variable not set}}-backend-https.tls=true + - traefik.http.routers.${{STACK_NAME?Variable not set}}-backend-https.tls.certresolver=le + + # Enable redirection for HTTP and HTTPS + - traefik.http.routers.${{STACK_NAME?Variable not set}}-backend-http.middlewares=https-redirect + + # Enable rate limiting and security headers + - traefik.http.routers.${{STACK_NAME?Variable not set}}-backend-https.middlewares=${{STACK_NAME?Variable not set}}-api-rate-limit,${{STACK_NAME?Variable not set}}-security-headers + + db: + image: postgres:17-alpine + restart: unless-stopped + container_name: {base_name}-db + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $POSTGRES_USER -d $POSTGRES_DB"] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + expose: + - 5432 + env_file: + - .env + environment: + - PGDATA=/var/lib/postgresql/data/pgdata + - POSTGRES_PASSWORD=${{POSTGRES_PASSWORD?Variable not set}} + - POSTGRES_USER=${{POSTGRES_USER?Variable not set}} + - POSTGRES_DB=${{POSTGRES_DB?Variable not set}} + volumes: + - {base_name}-db-data:/var/lib/postgresql/data + + valkey: + image: valkey/valkey:8-alpine + restart: unless-stopped + container_name: {base_name}-valkey + healthcheck: + test: + [ + "CMD", + "valkey-cli", + "--no-auth-warning", + "-a", + "${{VALKEY_PASSWORD?Variable not set}}", + "ping", + ] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + expose: + - 6379 + env_file: + - .env + command: valkey-server --requirepass ${{VALKEY_PASSWORD?Variable not set}} + volumes: + - {base_name}-valkey-data:/var/lib/valkey/data + + migrations: + image: ghcr.io/sanders41/sqlx-migration-runner:1 + container_name: {base_name}-migrations + env_file: + - .env + environment: + - POSTGRES_HOST=db + - DATABASE_URL=postgresql://${{POSTGRES_USER}}:${{POSTGRES_PASSWORD}}@db:5432/${{POSTGRES_DB}} + depends_on: + db: + condition: service_healthy + restart: true + volumes: + - ./migrations:/migrations + +volumes: + {base_name}-db-data: + {base_name}-valkey-data: + +networks: + traefik-public-{base_name}: + name: traefik-public-{base_name} + # Allow setting it to false for testing + external: true +"# + ) +} + +pub fn save_dockercompose_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("docker-compose.yml"); + let file_content = create_dockercompose_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_dockercompose_override_file(project_info: &ProjectInfo) -> String { + let base_name = &project_info.project_slug; + + format!( + r#"services: + proxy: + image: traefik:3 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + ports: + - "80:80" + - "8090:8080" + # Duplicate the command from docker-compose.yml to add --api.insecure=true + command: + # Enable Docker in Traefik, so that it reads labels from Docker services + - --providers.docker + # Add a constraint to only use services with the label for this stack + - --providers.docker.constraints=Label(`traefik.constraint-label`, `traefik-public`) + # Do not expose all Docker services, only the ones explicitly exposed + - --providers.docker.exposedbydefault=false + # Create an entrypoint "http" listening on port 80 + - --entrypoints.http.address=:80 + # Enable the access log, with HTTP requests + - --accesslog + # Enable the Traefik log, for configurations and errors + - --log + # Enable debug logging for local development + - --log.level=DEBUG + # Enable the Dashboard and API + - --api + # Enable the Dashboard and API in insecure mode for local development + - --api.insecure=true + labels: + # Enable Traefik for this service, to make it available in the public network + - traefik.enable=true + - traefik.constraint-label=traefik-public + networks: + - traefik-public-{base_name} + - default + + backend: + image: {base_name}-backend + restart: no + healthcheck: + test: ["CMD", "curl", "-f", "http://127.0.0.1:8000/api/v1/health"] + interval: 10s + retries: 5 + start_period: 10s + timeout: 5s + ports: + - "8000:8000" + networks: + - traefik-public-{base_name} + - default + build: + context: . + container_name: {base_name}-backend + depends_on: + db: + condition: service_healthy + restart: true + valkey: + condition: service_healthy + restart: true + env_file: + - .env + environment: + - SECRET_KEY=someKey + - POSTGRES_HOST=db + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=test_password + - VALKEY_HOST=valkey + - VALKEY_PASSWORD=test_password + - ENVIRONMENT=local + labels: + - traefik.enable=true + - traefik.docker.network=traefik-public-{base_name} + - traefik.constraint-label=traefik-public + - traefik.http.services.${{STACK_NAME:-{base_name}}}-backend.loadbalancer.server.port=8000 + - traefik.http.routers.${{STACK_NAME:-{base_name}}}-backend-http.rule=Host(`api.127.0.0.1`) + - traefik.http.routers.${{STACK_NAME:-{base_name}}}-backend-http.entrypoints=http + - traefik.http.routers.${{STACK_NAME:-{base_name}}}-backend-https.rule= + - traefik.http.routers.${{STACK_NAME:-{base_name}}}-backend-https.entrypoints= + - traefik.http.routers.${{STACK_NAME:-{base_name}}}-backend-https.tls= + - traefik.http.routers.${{STACK_NAME:-{base_name}}}-backend-https.tls.certresolver= + - traefik.http.routers.${{STACK_NAME:-{base_name}}}-backend-http.middlewares= + + db: + restart: "no" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $POSTGRES_USER -d $POSTGRES_DB"] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + ports: + - "5432:5432" + + valkey: + restart: "no" + # By default only 16 databases are allowed. Bumping this just for testing so that tests can + # run in parallel without impacting each other + command: valkey-server --requirepass test_password --databases 100 + healthcheck: + test: + [ + "CMD", + "valkey-cli", + "--no-auth-warning", + "-a", + "${{VALKEY_PASSWORD?Variable not set}}", + "ping", + ] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s + ports: + - 6379:6379 + +networks: + traefik-public-{base_name}: + # For local dev, don't expect an external Traefik network + external: false +"# + ) +} + +pub fn save_dockercompose_override_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("docker-compose.override.yml"); + let file_content = create_dockercompose_override_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_dockercompose_traefik_file(project_info: &ProjectInfo) -> String { + let base_name = &project_info.project_slug; + + format!( + r#"services: + traefik: + image: traefik:3 + container_name: {base_name}-traefik + ports: + # Listen on port 80, default for HTTP, necessary to redirect to HTTPS + - 80:80 + # Listen on port 443, default for HTTPS + - 443:443 + restart: unless-stopped + env_file: + - .env + labels: + # Enable Traefik for this service, to make it available in the public network + - traefik.enable=true + # Use the traefik-public network (declared below) + - traefik.docker.network=traefik-public + # Define the port inside of the Docker service to use + - traefik.http.services.traefik-dashboard.loadbalancer.server.port=8080 + # Make Traefik use this domain (from an environment variable) in HTTP + - traefik.http.routers.traefik-dashboard-http.entrypoints=http + - traefik.http.routers.traefik-dashboard-http.rule=Host(`traefik.${{DOMAIN?Variable not set}}`) + # traefik-https the actual router using HTTPS + - traefik.http.routers.traefik-dashboard-https.entrypoints=https + - traefik.http.routers.traefik-dashboard-https.rule=Host(`traefik.${{DOMAIN?Variable not set}}`) + - traefik.http.routers.traefik-dashboard-https.tls=true + # Use the "le" (Let's Encrypt) resolver created below + - traefik.http.routers.traefik-dashboard-https.tls.certresolver=le + # Use the special Traefik service api@internal with the web UI/Dashboard + - traefik.http.routers.traefik-dashboard-https.service=api@internal + # https-redirect middleware to redirect HTTP to HTTPS + - traefik.http.middlewares.https-redirect.redirectscheme.scheme=https + - traefik.http.middlewares.https-redirect.redirectscheme.permanent=true + # traefik-http set up only to use the middleware to redirect to https + - traefik.http.routers.traefik-dashboard-http.middlewares=https-redirect + # admin-auth middleware with HTTP Basic auth + # Using the environment variables USERNAME and HASHED_PASSWORD + - traefik.http.middlewares.admin-auth.basicauth.users=${{USERNAME?Variable not set}}:${{HASHED_PASSWORD?Variable not set}} + # Enable HTTP Basic auth, using the middleware created above + - traefik.http.routers.traefik-dashboard-https.middlewares=admin-auth + volumes: + # Add Docker as a mounted volume, so that Traefik can read the labels of other services + - /var/run/docker.sock:/var/run/docker.sock:ro + # Mount the volume to store the certificates + - {base_name}-traefik-public-certificates:/certificates + command: + # Enable Docker in Traefik, so that it reads labels from Docker services + - --providers.docker + # Do not expose all Docker services, only the ones explicitly exposed + - --providers.docker.exposedbydefault=false + # Create an entrypoint "http" listening on port 80 + - --entrypoints.http.address=:80 + # Create an entrypoint "https" listening on port 443 + - --entrypoints.https.address=:443 + # Create the certificate resolver "le" for Let's Encrypt, uses the environment variable EMAIL + - --certificatesresolvers.le.acme.email=${{EMAIL?Variable not set}} + # Store the Let's Encrypt certificates in the mounted volume + - --certificatesresolvers.le.acme.storage=/certificates/acme.json + # Use the TLS Challenge for Let's Encrypt + - --certificatesresolvers.le.acme.tlschallenge=true + # Enable the access log, with HTTP requests + - --accesslog + # Enable the Traefik log, for configurations and errors + - --log + # Enable the Dashboard and API + - --api + networks: + # Use the public network created to be shared between Traefik and + # any other service that needs to be publicly available with HTTPS + - traefik-public + +volumes: + # Create a volume to store the certificates, even if the container is recreated + {base_name}-traefik-public-certificates: + +networks: + # Use the previously created public network "traefik-public", shared with other + # services that need to be publicly available via this Traefik + traefik-public: + name: traefik-public + external: true +"# + ) +} + +pub fn save_dockercompose_traefik_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("docker-compose.treafik.yml"); + let file_content = create_dockercompose_traefik_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_dockerfile(project_info: &ProjectInfo) -> String { + let python_version = &project_info.python_version; + let source_dir = &project_info.source_dir; + match project_info.project_manager { + ProjectManager::Uv => format!( + r#"# syntax=docker/dockerfile:1 + +FROM ubuntu:24.04 AS builder + +WORKDIR /app + +ENV \ + PYTHONUNBUFFERED=true \ + UV_PYTHON_INSTALL_DIR=/opt/uv/python \ + UV_LINK_MODE=copy + +RUN : \ + && apt-get update \ + && apt-get install -y --no-install-recommends \ + curl \ + ca-certificates \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install uv +ADD https://astral.sh/uv/install.sh /uv-installer.sh + +RUN sh /uv-installer.sh && rm /uv-installer.sh + +ENV PATH="/root/.local/bin:$PATH" + +COPY pyproject.toml uv.lock ./ + +RUN --mount=type=cache,target=/root/.cache/uv \ + uv venv -p {python_version} \ + && uv sync --locked --no-dev --no-install-project --no-editable + +COPY . /app + +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --locked --no-dev --no-editable + + +# Build production stage +FROM ubuntu:24.04 AS prod + +RUN useradd appuser + +WORKDIR /app + +RUN chown appuser:appuser /app + +ENV \ + PYTHONUNBUFFERED=true \ + PATH="/app/.venv/bin:$PATH" \ + PORT="8000" + +COPY --from=builder /app/.venv /app/.venv +COPY --from=builder /app/{source_dir} /app/{source_dir} +COPY --from=builder /opt/uv/python /opt/uv/python +COPY ./scripts/entrypoint.sh /app + +RUN chmod +x /app/entrypoint.sh + +EXPOSE 8000 + +USER appuser + +ENTRYPOINT ["./entrypoint.sh"] +"#, + ), + _ => todo!("Implement this"), + } +} + +pub fn save_dockerfile(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("Dockerfile"); + let file_content = create_dockerfile(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_dockerignore(project_info: &ProjectInfo) -> String { + let mut info = r#"__pycache__ +app.egg-info +*.pyc +.mypy_cache +.pytest_cache +.ruff_cache +.coverage +htmlcov +.cache +.venv +.env* +*.log +Dockerfile +.dockerignore +.git +tests +tests-results +"# + .to_string(); + + if project_info.project_manager == ProjectManager::Maturin { + info.push_str("target\n"); + } + + info +} + +pub fn save_dockerfileignore(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join(".dockerignore"); + let file_content = create_dockerignore(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_entrypoint_script(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"#!/bin/bash + +CORES=$(nproc --all) +WORKERS=$((($CORES * 2 + 1) > 8 ? 8 : ($CORES * 2 + 1))) + +echo Starting Granian with $WORKERS workers + +.venv/bin/granian ./{module}/main:app --host 0.0.0.0 --port 8000 --interface asgi --no-ws --workers ${{WORKERS}} --runtime-mode st --loop uvloop --log-level info --log --workers-lifetime 10800 --respawn-interval 30 --process-name granian-at-reporter +"# + ) +} + +pub fn save_entrypoint_script(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("scripts/entrypoint.sh"); + let file_content = create_entrypoint_script(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} diff --git a/src/fastapi/fastapi_files.rs b/src/fastapi/fastapi_files.rs new file mode 100644 index 00000000..ae0e00d0 --- /dev/null +++ b/src/fastapi/fastapi_files.rs @@ -0,0 +1,408 @@ +use std::{ + fs::{create_dir_all, File}, + path::Path, +}; + +use anyhow::Result; +use rayon::prelude::*; + +use crate::{ + fastapi::{ + core_files::{ + save_cache_file, save_config_file, save_core_utils_file, save_db_file, + save_security_file, + }, + docker_files::{ + save_dockercompose_file, save_dockercompose_override_file, + save_dockercompose_traefik_file, save_dockerfile, save_dockerfileignore, + save_entrypoint_script, + }, + migration_files::save_initial_migrations, + model_files::{save_message_model_file, save_token_models_file, save_user_models_file}, + route_files::{ + save_deps_file, save_health_route, save_login_route, save_router_file, + save_users_route, save_version_route, + }, + service_files::{save_cache_user_services_file, save_db_user_services_file}, + test_files::{ + save_config_test_file, save_conftest_file, save_health_route_test_file, + save_login_route_test_file, save_main_test_file, save_test_deps_file, + save_test_utils_file, save_user_model_test_file, save_user_routes_test_file, + save_user_services_cache_test_file, save_user_services_db_test_file, + save_version_route_test_file, + }, + }, + file_manager::save_file_with_content, + project_info::{DatabaseManager, ProjectInfo}, +}; + +pub fn generate_fastapi(project_info: &ProjectInfo) -> Result<()> { + create_directories(project_info)?; + + [ + save_cache_file, + save_cache_user_services_file, + save_config_test_file, + save_conftest_file, + save_db_file, + save_db_user_services_file, + save_dockercompose_file, + save_dockercompose_override_file, + save_dockercompose_traefik_file, + save_dockerfileignore, + save_dockerfile, + save_entrypoint_script, + save_example_env_file, + save_exceptions_file, + save_initial_migrations, + save_main_file, + save_main_test_file, + save_message_model_file, + save_config_file, + save_core_utils_file, + save_deps_file, + save_health_route, + save_health_route_test_file, + save_login_route, + save_login_route_test_file, + save_router_file, + save_security_file, + save_test_deps_file, + save_test_utils_file, + save_token_models_file, + save_types_file, + save_user_models_file, + save_user_model_test_file, + save_users_route, + save_user_routes_test_file, + save_user_services_cache_test_file, + save_user_services_db_test_file, + save_version_route, + save_version_route_test_file, + ] + .into_par_iter() + .map(|f| f(project_info)) + .collect::, _>>()?; + + Ok(()) +} + +fn create_example_env_file(project_info: &ProjectInfo) -> String { + let mut info = r#"SECRET_KEY=someKey +FIRST_SUPERUSER_EMAIL=some@email.com +FIRST_SUPERUSER_PASSWORD=changethis +FIRST_SUPERUSER_NAME="Wade Watts" +POSTGRES_HOST=127.0.0.1 +POSTGRES_PORT=5432 +POSTGRES_USER=postgres +POSTGRES_PASSWORD=some_password +POSTGRES_DB=changethis +VALKEY_HOST=127.0.0.1 +VALKEY_PASSWORD=test_password +STACK_NAME=changethis +DOMAIN=127.0.0.1 +PRODUCTION_MODE=false +"# + .to_string(); + + if let Some(database_manager) = &project_info.database_manager { + if database_manager == &DatabaseManager::AsyncPg { + info.push_str("DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:{POSTGRES_PORT}/${POSTGRES_DB}\n"); + } + } + + info +} + +fn save_example_env_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join(".env-example"); + let file_content = create_example_env_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_exceptions_file() -> String { + r#"class DbInsertError(Exception): + pass + + +class DbUpdateError(Exception): + pass + + +class NoDbPoolError(Exception): + pass + + +class UserNotFoundError(Exception): + pass +"# + .to_string() +} + +fn save_exceptions_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("exceptions.py"); + let file_content = create_exceptions_file(); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_main_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + format!( + r#"from __future__ import annotations + +import sys +import traceback +from collections.abc import AsyncGenerator +from contextlib import asynccontextmanager + +from fastapi import FastAPI, Request, Response +from fastapi.responses import ORJSONResponse +from loguru import logger +from starlette.exceptions import HTTPException as StarletteHTTPException +from starlette.middleware.cors import CORSMiddleware +from starlette.responses import JSONResponse + +from {module}.api.router import api_router +from {module}.core.cache import cache +from {module}.core.config import settings +from {module}.core.db import db + +logger.remove() # Remove the default logger so log level can be set +logger.add(sys.stderr, level=settings.LOG_LEVEL) + + +@asynccontextmanager +async def lifespan(_: FastAPI) -> AsyncGenerator: # pragma: no cover + logger.info("Initalizing database connection pool") + try: + await db.create_pool() + except Exception as e: + logger.error(f"Error creating db connection pool: {{e}}") + raise + + logger.info("Initializing cache client") + try: + await cache.create_client() + except Exception as e: + logger.error(f"Error creating cache client: {{e}}") + raise + + logger.info("Saving first superuser") + try: + await db.create_first_superuser() + except Exception as e: + logger.error(f"Error creating first superuser: {{e}}") + raise e + + yield + + logger.info("Closing database connection pool") + try: + await db.close_pool() + except Exception as e: + logger.error(f"Error closing db connection pool: {{e}}") + raise + + logger.info("Closing cache client") + try: + await cache.close_client() + except Exception as e: + logger.error(f"Error closing cache client: {{e}}") + raise + + +openapi_url = f"{{settings.API_V1_PREFIX}}/openapi.json" + +app = FastAPI( + title=settings.TITLE, + lifespan=lifespan, + openapi_url=openapi_url, + default_response_class=ORJSONResponse, +) + + +@app.exception_handler(StarletteHTTPException) +async def http_exception_handler(request: Request, exc: StarletteHTTPException) -> Response: + if exc.status_code >= 500: + stack_trace = ( + "".join( + traceback.format_exception( + type(exc.__cause__), exc.__cause__, exc.__cause__.__traceback__ + ) + ) + if exc.__cause__ + else traceback.format_exc() + ) + + original_exc_type = type(exc.__cause__).__name__ if exc.__cause__ else "HTTPException" + original_exc_msg = str(exc.__cause__) if exc.__cause__ else str(exc.detail) + + msg = f"""HTTP {{exc.status_code}} error in {{request.method}} {{request.url.path}}\n + Original exception: {{original_exc_type}}: {{original_exc_msg}}\n + HTTP detail: {{exc.detail}}\n + Stack trace:\n{{stack_trace}}""" + + logger.error(msg) + + return JSONResponse(status_code=exc.status_code, content={{"detail": exc.detail}}) + + +if settings.all_cors_origins: + app.add_middleware( + CORSMiddleware, + allow_origins=settings.all_cors_origins, + allow_credentials=True, + allow_methods=["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"], + allow_headers=["Authorization", "Content-Type"], + ) + +app.include_router(api_router) +"# + ) +} + +fn save_main_file(project_info: &ProjectInfo) -> Result<()> { + let base = project_info.source_dir_path(); + let file_path = base.join("main.py"); + let file_content = create_main_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_types_file() -> String { + r#"from typing import Any, Literal, TypeAlias + +ActiveFilter: TypeAlias = Literal["all", "active", "inactive"] +Json: TypeAlias = dict[str, Any] +"# + .to_string() +} + +fn save_types_file(project_info: &ProjectInfo) -> Result<()> { + let base = project_info.source_dir_path(); + let file_path = base.join("types.py"); + let file_content = create_types_file(); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_directories(project_info: &ProjectInfo) -> Result<()> { + [ + create_api_dir, + create_core_dir, + create_migrations_dir, + create_models_dir, + create_scripts_dir, + create_services_dir, + create_test_dir, + ] + .into_par_iter() + .map(|f| f(project_info)) + .collect::, _>>()?; + + Ok(()) +} + +fn create_api_dir(project_info: &ProjectInfo) -> Result<()> { + let src = &project_info.source_dir_path(); + let api_dir = src.join("api"); + let routes_dir = api_dir.join("routes"); + create_dir_all(&routes_dir)?; + save_init_file(&api_dir)?; + save_init_file(&routes_dir)?; + + Ok(()) +} + +fn create_core_dir(project_info: &ProjectInfo) -> Result<()> { + let src = &project_info.source_dir_path(); + let core_dir = src.join("core"); + create_dir_all(&core_dir)?; + save_init_file(&core_dir)?; + + Ok(()) +} + +fn create_migrations_dir(project_info: &ProjectInfo) -> Result<()> { + let base = project_info.base_dir(); + let migrations_dir = base.join("migrations"); + create_dir_all(migrations_dir)?; + + Ok(()) +} + +fn create_models_dir(project_info: &ProjectInfo) -> Result<()> { + let src = &project_info.source_dir_path(); + let models_dir = src.join("models"); + create_dir_all(&models_dir)?; + save_init_file(&models_dir)?; + + Ok(()) +} + +fn create_scripts_dir(project_info: &ProjectInfo) -> Result<()> { + let src = &project_info.base_dir(); + let models_dir = src.join("scripts"); + create_dir_all(&models_dir)?; + + Ok(()) +} + +fn create_services_dir(project_info: &ProjectInfo) -> Result<()> { + let src = &project_info.source_dir_path(); + let services_dir = src.join("services"); + let services_db_dir = services_dir.join("db"); + let services_cache_dir = services_dir.join("cache"); + create_dir_all(&services_db_dir)?; + create_dir_all(&services_cache_dir)?; + save_init_file(&services_dir)?; + save_init_file(&services_db_dir)?; + save_init_file(&services_cache_dir)?; + + Ok(()) +} + +fn create_test_dir(project_info: &ProjectInfo) -> Result<()> { + let src = &project_info.base_dir(); + let test_dir = src.join("tests"); + let api_dir = test_dir.join("api"); + let routes_dir = api_dir.join("routes"); + let core_dir = test_dir.join("core"); + let models_dir = test_dir.join("models"); + let services_dir = test_dir.join("services"); + let services_db_dir = services_dir.join("db"); + let services_cache_dir = services_dir.join("cache"); + create_dir_all(&routes_dir)?; + create_dir_all(&core_dir)?; + create_dir_all(&models_dir)?; + create_dir_all(&services_cache_dir)?; + create_dir_all(&services_db_dir)?; + save_init_file(&api_dir)?; + save_init_file(&core_dir)?; + save_init_file(&models_dir)?; + save_init_file(&routes_dir)?; + save_init_file(&services_dir)?; + save_init_file(&services_db_dir)?; + save_init_file(&services_cache_dir)?; + + Ok(()) +} + +fn save_init_file(path: &Path) -> Result<()> { + let file_path = path.join("__init__.py"); + File::create(file_path)?; + + Ok(()) +} diff --git a/src/fastapi/fastapi_installer.rs b/src/fastapi/fastapi_installer.rs new file mode 100644 index 00000000..696f9e4d --- /dev/null +++ b/src/fastapi/fastapi_installer.rs @@ -0,0 +1,148 @@ +use anyhow::{bail, Result}; + +use crate::project_info::{DatabaseManager, ProjectInfo, ProjectManager}; + +const FASTAPI_BASE_DEPENDENCIES: &[&str] = &[ + "asyncpg", + "camel-converter[pydantic]", + "fastapi", + "granian[pname,reload]", + "httptools", + "loguru", + "orjson", + "pwdlib[argon2]", + "pydantic[email]", + "pydantic-settings", + "pyjwt", + "python-multipart", + "uvloop; sys_platform != 'win32'", + "valkey", +]; + +const FASTAPI_BASE_DEV_DEPENDENCIES: &[&str] = &["httpx", "pytest-xdist"]; + +pub fn install_fastapi_dependencies(project_info: &ProjectInfo) -> Result<()> { + match project_info.project_manager { + ProjectManager::Uv => uv_fastapi_depencency_installer(project_info)?, + ProjectManager::Poetry => poetry_fastapi_depencency_installer(project_info)?, + ProjectManager::Setuptools => setuptools_fastapi_depencency_installer(project_info)?, + ProjectManager::Pixi => bail!("Pixi is not currently supported for FastAPI projects"), + ProjectManager::Maturin => maturin_fastapi_depencency_installer(project_info)?, + }; + + Ok(()) +} + +fn uv_fastapi_depencency_installer(project_info: &ProjectInfo) -> Result<()> { + let mut dependencies = FASTAPI_BASE_DEPENDENCIES.to_vec(); + if project_info.database_manager == Some(DatabaseManager::SqlAlchemy) { + dependencies.push("sqlalchemy"); + dependencies.push("alembic"); + } + let mut args = vec!["add"]; + args.extend(dependencies); + let output = std::process::Command::new("uv") + .args(args) + .current_dir(project_info.base_dir()) + .output()?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + bail!("Failed to install FastAPI dependencies: {stderr}"); + } + + let dev_dependencies = FASTAPI_BASE_DEV_DEPENDENCIES.to_vec(); + let mut dev_args = vec!["add", "--group=dev"]; + dev_args.extend(dev_dependencies); + let output = std::process::Command::new("uv") + .args(dev_args) + .current_dir(project_info.base_dir()) + .output()?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + bail!("Failed to install FastAPI dependencies: {stderr}"); + } + + Ok(()) +} + +fn poetry_fastapi_depencency_installer(project_info: &ProjectInfo) -> Result<()> { + let mut dependencies = FASTAPI_BASE_DEPENDENCIES.to_vec(); + if project_info.database_manager == Some(DatabaseManager::SqlAlchemy) { + dependencies.push("sqlalchemy"); + dependencies.push("alembic"); + } + let mut args = vec!["add"]; + args.extend(dependencies); + let output = std::process::Command::new("poetry") + .args(args) + .current_dir(project_info.base_dir()) + .output()?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + bail!("Failed to install FastAPI dependencies: {stderr}"); + } + + let dev_dependencies = FASTAPI_BASE_DEV_DEPENDENCIES.to_vec(); + let mut dev_args = vec!["add", "--group=dev"]; + dev_args.extend(dev_dependencies); + let output = std::process::Command::new("poetry") + .args(dev_args) + .current_dir(project_info.base_dir()) + .output()?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + bail!("Failed to install FastAPI dependencies: {stderr}"); + } + + Ok(()) +} + +fn setuptools_fastapi_depencency_installer(project_info: &ProjectInfo) -> Result<()> { + let venv_output = std::process::Command::new("python") + .args(["-m", "venv", ".venv"]) + .current_dir(project_info.base_dir()) + .output()?; + + if !venv_output.status.success() { + let stderr = String::from_utf8_lossy(&venv_output.stderr); + bail!("Failed to create virtual environment: {stderr}"); + } + + let mut dependencies = FASTAPI_BASE_DEPENDENCIES.to_vec(); + let dev_dependencies = FASTAPI_BASE_DEV_DEPENDENCIES.to_vec(); + if project_info.database_manager == Some(DatabaseManager::SqlAlchemy) { + dependencies.push("sqlalchemy"); + dependencies.push("alembic"); + } + let mut args = vec!["-m", "pip", "install"]; + args.extend(dependencies); + args.extend(dev_dependencies); + let output = std::process::Command::new(".venv/bin/python") + .args(args) + .current_dir(project_info.base_dir()) + .output()?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + bail!("Failed to install FastAPI dependencies: {stderr}"); + } + + Ok(()) +} + +fn maturin_fastapi_depencency_installer(project_info: &ProjectInfo) -> Result<()> { + use crate::project_info::Pyo3PythonManager; + + if let Some(pyo3_python_manager) = &project_info.pyo3_python_manager { + match pyo3_python_manager { + Pyo3PythonManager::Uv => uv_fastapi_depencency_installer(project_info), + Pyo3PythonManager::Setuptools => setuptools_fastapi_depencency_installer(project_info), + } + } else { + bail!("No Python project mangager provided for PyO3 project"); + } +} diff --git a/src/fastapi/migration_files.rs b/src/fastapi/migration_files.rs new file mode 100644 index 00000000..d79b1fe4 --- /dev/null +++ b/src/fastapi/migration_files.rs @@ -0,0 +1,52 @@ +use anyhow::Result; +use time::OffsetDateTime; + +use crate::{file_manager::save_file_with_content, project_info::ProjectInfo}; + +fn create_initial_up_migration() -> String { + r#"CREATE TABLE IF NOT EXISTS users ( + id TEXT PRIMARY KEY, + email TEXT NOT NULL UNIQUE, + full_name TEXT NOT NULL, + hashed_password TEXT NOT NULL, + is_active BOOLEAN NOT NULL DEFAULT true, + is_superuser BOOLEAN NOT NULL DEFAULT false, + last_login TIMESTAMP NOT NULL DEFAULT NOW() +); +"# + .to_string() +} + +fn create_initial_down_migration() -> String { + r#"DROP TABLE IF EXISTS users; +"# + .to_string() +} + +pub fn save_initial_migrations(project_info: &ProjectInfo) -> Result<()> { + let now = OffsetDateTime::now_utc(); + let migration_prefix = format!( + "{:04}{:02}{:02}{:02}{:02}{:02}", + now.year(), + now.month() as u8, + now.day(), + now.hour(), + now.minute(), + now.second() + ); + let up_file_name = format!("{migration_prefix}_init.up.sql"); + let down_file_name = format!("{migration_prefix}_init.down.sql"); + + let base = project_info.base_dir(); + let up_file_path = base.join(format!("migrations/{up_file_name}")); + let up_file_content = create_initial_up_migration(); + + save_file_with_content(&up_file_path, &up_file_content)?; + + let down_file_path = base.join(format!("migrations/{down_file_name}")); + let down_file_content = create_initial_down_migration(); + + save_file_with_content(&down_file_path, &down_file_content)?; + + Ok(()) +} diff --git a/src/fastapi/mod.rs b/src/fastapi/mod.rs new file mode 100644 index 00000000..ac29082e --- /dev/null +++ b/src/fastapi/mod.rs @@ -0,0 +1,10 @@ +pub mod fastapi_files; +pub mod fastapi_installer; + +mod core_files; +mod docker_files; +mod migration_files; +mod model_files; +mod route_files; +mod service_files; +mod test_files; diff --git a/src/fastapi/model_files.rs b/src/fastapi/model_files.rs new file mode 100644 index 00000000..91e5b3ee --- /dev/null +++ b/src/fastapi/model_files.rs @@ -0,0 +1,159 @@ +use anyhow::Result; + +use crate::{file_manager::save_file_with_content, project_info::ProjectInfo}; + +fn create_message_model_file() -> String { + r#"from __future__ import annotations + +from camel_converter.pydantic_base import CamelBase + + +class Message(CamelBase): + """Used for generic messages.""" + + message: str +"# + .to_string() +} + +pub fn save_message_model_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("models/message.py"); + let file_content = create_message_model_file(); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_token_models_file() -> String { + r#"from pydantic import BaseModel + + +class Token(BaseModel): + """Don't use CamelBase here because FastAPI requires snake case vairables for the token.""" + + access_token: str + token_type: str = "bearer" + + +class TokenPayload(BaseModel): + """Contents of the JWT token.""" + + sub: str | None = None + is_superuser: bool = False +"# + .to_string() +} + +pub fn save_token_models_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("models/token.py"); + let file_content = create_token_models_file(); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_user_models_file() -> String { + r#"from __future__ import annotations + +import re +from datetime import datetime + +from camel_converter.pydantic_base import CamelBase +from pydantic import EmailStr, Field, field_validator + + +class _UserBase(CamelBase): + email: EmailStr = Field(max_length=255) + is_active: bool = True + is_superuser: bool = False + full_name: str = Field(max_length=255) + + +class UserCreate(_UserBase): + password: str = Field(min_length=8, max_length=255) + + @field_validator("password") + @classmethod + def validate_password_requirements(cls, v: str) -> str: + return _validate_password(v) + + +class UserUpdate(CamelBase): + email: EmailStr | None = Field(default=None, max_length=255) + is_active: bool | None = None + is_superuser: bool | None = None + password: str | None = Field(default=None, min_length=8, max_length=255) + full_name: str | None = Field(default=None, max_length=255) + + @field_validator("password") + @classmethod + def validate_password_requirements(cls, v: str) -> str: + return _validate_password(v) + + +class UserUpdateMe(CamelBase): + email: EmailStr | None = Field(default=None, max_length=255) + full_name: str | None = Field(default=None, max_length=255) + + +class UpdatePassword(CamelBase): + current_password: str = Field(min_length=8, max_length=255) + new_password: str = Field(min_length=8, max_length=255) + + +class User(_UserBase): + id: str + hashed_password: str + + +class UserPublic(_UserBase): + id: str + + +class UsersPublic(CamelBase): + data: list[UserPublic] + count: int + total_users: int + + +class UserInDb(_UserBase): + id: str + hashed_password: str + last_login: datetime + + +def _validate_password(password: str) -> str: + """Makes sure the password meets the minimum requirements. + + Passwords must to contain at least 1 uppercase letter, 1 lowercase letter, a number, and a + special character. They must be a minimum of 8 characters. + """ + if ( + not ( + re.search(r"[A-Z]", password) + and re.search(r"[a-z]", password) + and re.search(r"\d", password) + and re.search(r"[!@#$%^&*()_+\-=\[\]{};':\"\\|,.<>\/?]", password) + ) + or len(password) < 8 + ): + raise ValueError( + "Password must contain at least one uppercase letter, one lowercase letter, one number, and one special character. They must be a minimum of 8 characters." + ) + return password +"#.to_string() +} + +pub fn save_user_models_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("models/users.py"); + let file_content = create_user_models_file(); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} diff --git a/src/fastapi/route_files.rs b/src/fastapi/route_files.rs new file mode 100644 index 00000000..60be8a29 --- /dev/null +++ b/src/fastapi/route_files.rs @@ -0,0 +1,864 @@ +use anyhow::Result; + +use crate::{file_manager::save_file_with_content, project_info::ProjectInfo}; + +fn create_deps_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from collections.abc import AsyncGenerator +from typing import Annotated, Any, cast + +import asyncpg +import jwt +import valkey.asyncio as valkey +from fastapi import Depends, HTTPException, Request +from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel +from fastapi.security import OAuth2 +from fastapi.security.utils import get_authorization_scheme_param +from jwt.exceptions import InvalidTokenError +from loguru import logger +from pydantic import ValidationError +from starlette.status import ( + HTTP_401_UNAUTHORIZED, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_503_SERVICE_UNAVAILABLE, +) + +from {module}.core.cache import cache +from {module}.core.config import settings +from {module}.core.db import db +from {module}.core.security import ALGORITHM +from {module}.models.token import TokenPayload +from {module}.models.users import UserInDb +from {module}.services.db.user_services import get_user_by_id + + +class OAuth2PasswordBearerWithCookie(OAuth2): + def __init__( + self, + tokenUrl: str, + scheme_name: str | None = None, + scopes: dict[str, str] | None = None, + description: str | None = None, + auto_error: bool = True, + ): + if not scopes: + scopes = {{}} + flows = OAuthFlowsModel(password=cast(Any, {{"tokenUrl": tokenUrl, "scopes": scopes}})) + super().__init__( + flows=flows, + scheme_name=scheme_name, + description=description, + auto_error=auto_error, + ) + + async def __call__(self, request: Request) -> str | None: + authorization = request.cookies.get( # changed to accept access token from httpOnly Cookie + "access_token" + ) + + if authorization: + scheme, param = get_authorization_scheme_param(authorization) + else: # Cookie not found, check headers. + auth_header = request.headers.get("Authorization") + if not auth_header: + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={{"WWW-Authenticate": "Bearer"}}, + ) + + scheme, param = get_authorization_scheme_param(auth_header) + + if scheme.lower() != "bearer": + if self.auto_error: + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + headers={{"WWW-Authenticate": "Bearer"}}, + ) + else: # pragma: no cover + return None + return param + + +reusable_oauth2 = OAuth2PasswordBearerWithCookie( + tokenUrl=f"{{settings.API_V1_PREFIX}}/login/access-token" +) +TokenDep = Annotated[str, Depends(reusable_oauth2)] + + +async def get_cache_client() -> AsyncGenerator[valkey.Valkey]: + if cache.client is None: # pragma: no cover + logger.error("No cache client created") + raise HTTPException( + status_code=HTTP_503_SERVICE_UNAVAILABLE, detail="The cache is currently unavailable" + ) + + yield cache.client + + +CacheClient = Annotated[valkey.Valkey, Depends(get_cache_client)] + + +async def get_db_pool() -> AsyncGenerator[asyncpg.Pool]: + if db.db_pool is None: # pragma: no cover + logger.error("No database pool created") + raise HTTPException( + status_code=HTTP_503_SERVICE_UNAVAILABLE, detail="The database is currently unavailable" + ) + + yield db.db_pool + + +DbPool = Annotated[asyncpg.Pool, Depends(get_db_pool)] + + +async def get_current_user(pool: DbPool, cache_client: CacheClient, token: TokenDep) -> UserInDb: + try: + logger.debug("Decoding JWT token") + payload = jwt.decode( + token, key=settings.SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM] + ) + token_data = TokenPayload(**payload) + except (InvalidTokenError, ValidationError) as e: + logger.debug(f"Error decoding token: {{e}}") + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="Could not validate credentials", + ) from e + if token_data.sub is None: # pragma: no cover + logger.debug("Token does not countain sub data") + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, detail="Count not validate credientials" + ) + user_id = token_data.sub + user = await get_user_by_id(pool=pool, cache_client=cache_client, user_id=user_id) + if not user: # pragma: no cover + logger.debug("User not found") + raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail="User not found") + if not user.is_active: + logger.debug("User is inactive") + raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail="Inactive user") + + return user + + +CurrentUser = Annotated[UserInDb, Depends(get_current_user)] + + +def get_current_active_superuser(current_user: CurrentUser) -> UserInDb: + if not current_user.is_superuser: + logger.debug("The current user is not a super user") + raise HTTPException(status_code=403, detail="The user doesn't have enough privileges") + return current_user +"# + ) +} + +pub fn save_deps_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("api/deps.py"); + let file_content = create_deps_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_health_route(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from __future__ import annotations + +from loguru import logger + +from {module}.api.deps import CacheClient, DbPool +from {module}.core.config import settings +from {module}.core.utils import APIRouter + +router = APIRouter(tags=["Health"], prefix=f"{{settings.API_V1_PREFIX}}/health") + + +@router.get("/") +async def health(*, cache_client: CacheClient, pool: DbPool) -> dict[str, str]: + """Check the health of the server.""" + + logger.debug("Checking health") + health = {{"server": "healthy"}} + + logger.debug("Checking db health") + try: + async with pool.acquire() as conn: + await conn.execute("SELECT 1") + health["db"] = "healthy" + except Exception as e: + logger.error(f"Unable to ping the database: {{e}}") + health["db"] = "unhealthy" + + logger.debug("Checking cache health") + try: + await cache_client.ping() + health["cache"] = "healthy" + except Exception as e: + logger.error(f"Unable to ping the cache server: {{e}}") + health["cache"] = "unhealthy" + + return health +"# + ) +} + +pub fn save_health_route(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("api/routes/health.py"); + let file_content = create_health_route(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_login_route(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from __future__ import annotations + +from datetime import timedelta +from typing import Annotated + +from fastapi import Depends, HTTPException, Response +from fastapi.security import OAuth2PasswordRequestForm +from loguru import logger +from starlette.status import ( + HTTP_400_BAD_REQUEST, + HTTP_401_UNAUTHORIZED, + HTTP_500_INTERNAL_SERVER_ERROR, +) + +from {module}.api.deps import CacheClient, CurrentUser, DbPool +from {module}.core import security +from {module}.core.config import settings +from {module}.core.utils import APIRouter +from {module}.models.token import Token +from {module}.models.users import UserPublic +from {module}.services.db import user_services + +router = APIRouter(tags=["Login"], prefix=f"{{settings.API_V1_PREFIX}}") + + +@router.post("/login/access-token") +async def login_access_token( + *, response: Response, pool: DbPool, form_data: Annotated[OAuth2PasswordRequestForm, Depends()] +) -> Token: + """OAuth2 compatible token login, get an access token for future requests.""" + + logger.debug("Authenticating user") + user = await user_services.authenticate( + pool=pool, email=form_data.username, password=form_data.password + ) + + if not user: + logger.debug("Incorrect email or password") + raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Incorrect email or password") + elif not user.is_active: + logger.debug("Inactive user") + raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Inactive user") + access_token_expires = timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) + + access_token = security.create_access_token( + str(user.id), user.is_superuser, expires_delta=access_token_expires + ) + + response.set_cookie( + key="access_token", + value=f"Bearer {{access_token}}", + httponly=True, + secure=settings.PRODUCTION_MODE, + ) + + return Token(access_token=access_token) + + +@router.post("/login/test-token") +async def test_token( + *, db_pool: DbPool, cache_client: CacheClient, current_user: CurrentUser +) -> UserPublic: + """Test access token.""" + + try: + user_public = await user_services.get_user_public_by_id( + pool=db_pool, cache_client=cache_client, user_id=current_user.id + ) + except Exception as e: # pragma: no cover + logger.error(f"An error occurred while testing the user token: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while testing the user token", + ) from e + + if user_public is None: # pragma: no cover + raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Not authorized") + + return user_public +"# + ) +} + +pub fn save_login_route(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("api/routes/login.py"); + let file_content = create_login_route(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_router_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from {module}.api.routes import health, login, users, version +from {module}.core.utils import APIRouter + +api_router = APIRouter() +api_router.include_router(health.router) +api_router.include_router(login.router) +api_router.include_router(users.router) +api_router.include_router(version.router) +"# + ) +} + +pub fn save_router_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("api/router.py"); + let file_content = create_router_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_users_route(project_info: &ProjectInfo) -> String { + let module = project_info.module_name(); + + format!( + r#"from __future__ import annotations + +from fastapi import Depends, HTTPException +from loguru import logger +from starlette.status import ( + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_409_CONFLICT, + HTTP_500_INTERNAL_SERVER_ERROR, +) + +from {module}.api.deps import ( + CacheClient, + CurrentUser, + DbPool, + get_current_active_superuser, +) +from {module}.core.config import settings +from {module}.core.security import verify_password +from {module}.core.utils import APIRouter +from {module}.models.message import Message +from {module}.models.users import ( + UpdatePassword, + UserCreate, + UserPublic, + UsersPublic, + UserUpdate, + UserUpdateMe, +) +from {module}.services.db import user_services +from {module}.types import ActiveFilter + +router = APIRouter(tags=["Users"], prefix=f"{{settings.API_V1_PREFIX}}/users") + + +@router.get("/", dependencies=[Depends(get_current_active_superuser)]) +async def read_users( + *, + db_pool: DbPool, + cache_client: CacheClient, + offset: int = 0, + limit: int = 100, +) -> UsersPublic: + """Retrieve users. + + Administrator rights required. + """ + + logger.debug(f"Getting users with offset {{offset}} and limit {{limit}}") + try: + users_public = await user_services.get_users_public( + pool=db_pool, + cache_client=cache_client, + offset=offset, + limit=limit, + ) + except* Exception as eg: # pragma: no cover + for e in eg.exceptions: + logger.error(f"An error occurred while retrieving users: {{e}}") + + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while retrieving users", + ) from eg + + return users_public + + +@router.post("/") +async def create_user( + *, + db_pool: DbPool, + cache_client: CacheClient, + user_in: UserCreate, +) -> UserPublic: + """Create a new user.""" + + logger.debug("Creating new user") + try: + user = await user_services.get_user_by_email(pool=db_pool, email=user_in.email) + except Exception as e: # pragma: no cover + logger.error( + f"An error occurred while checking if the email {{user_in.email}} already exists for creating a user: {{e}}" + ) + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while creating the user.", + ) from e + + if user: + logger.debug(f"User with email address {{user_in.email}} already exists") + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail="A user with this email address already exists in the system", + ) + + try: + created_user = await user_services.create_user( + pool=db_pool, cache_client=cache_client, user=user_in + ) + except Exception as e: # pragma: no cover + logger.error( + f"An error occurred while creating the user with email address {{user_in.email}}: {{e}}" + ) + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while creating the user", + ) from e + + try: + user_public = await user_services.get_user_public_by_id( + pool=db_pool, + cache_client=cache_client, + user_id=created_user.id, + ) + except Exception as e: # pragma: no cover + logger.error(f"An error occurred while creating the user: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while creating the user", + ) from e + + if user_public is None: # pragma: no cover + logger.error(f"User with id {{created_user.id}} not found after creation") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while creating the user", + ) + + return user_public + + +@router.patch("/me") +async def update_user_me( + *, + db_pool: DbPool, + cache_client: CacheClient, + user_in: UserUpdateMe, + current_user: CurrentUser, +) -> UserPublic: + """Update own user.""" + + logger.debug("Updating current user") + if user_in.email: + try: + existing_user = await user_services.get_user_by_email(pool=db_pool, email=user_in.email) + except Exception as e: # pragma: no cover + logger.error( + f"An error occurred while updating me, checking if the email already exists: {{e}}" + ) + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while updating the user", + ) from e + + if existing_user and existing_user.id != current_user.id: + logger.debug(f"User with email address {{user_in.email}} already exists") + raise HTTPException( + status_code=HTTP_409_CONFLICT, + detail="A user with this email address already exists", + ) + + try: + updated_user = await user_services.update_user( + pool=db_pool, cache_client=cache_client, db_user=current_user, user_in=user_in + ) + except Exception as e: # pragma: no cover + logger.error(f"An error occurred while updating me: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while updating the user", + ) from e + + try: + user_public = await user_services.get_user_public_by_id( + pool=db_pool, + cache_client=cache_client, + user_id=updated_user.id, + ) + except Exception as e: # pragma: no cover + logger.error(f"Error updating user: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while updating the user", + ) from e + + if user_public is None: # pragma: no cover + logger.error(f"User with id {{updated_user.id}} not found after update") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while updating the user", + ) + + return user_public + + +@router.patch("/me/password", status_code=HTTP_204_NO_CONTENT) +async def update_password_me( + *, + db_pool: DbPool, + cache_client: CacheClient, + user_in: UpdatePassword, + current_user: CurrentUser, +) -> None: + """Update own password.""" + + if not verify_password(user_in.current_password, current_user.hashed_password): + logger.debug("Passwords do not match") + raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Incorrect password") + if user_in.current_password == user_in.new_password: + logger.debug("Password not changed") + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail="New password cannot be the same as the current one", + ) + + try: + logger.debug("Updating password") + await user_services.update_user( + pool=db_pool, cache_client=cache_client, db_user=current_user, user_in=user_in + ) + except Exception as e: # pragma: no cover + logger.error(f"An error occurred updating the password: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while updating the password", + ) from e + + +@router.get("/me") +async def read_user_me( + *, + db_pool: DbPool, + cache_client: CacheClient, + current_user: CurrentUser, + active_filter: ActiveFilter = "active", +) -> UserPublic: + """Get current user.""" + + try: + user_public = await user_services.get_user_public_by_id( + pool=db_pool, + cache_client=cache_client, + user_id=current_user.id, + active_filter=active_filter, + ) + except Exception as e: # pragma: no cover + logger.error(f"Error reading user me: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while getting the user", + ) from e + + # Fail safe, shouldn't be possible to get here + if user_public is None: # pragma: no cover + logger.debug("User not found") + raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail="User not found") + + return user_public + + +@router.delete("/me", status_code=HTTP_204_NO_CONTENT) +async def delete_user_me( + *, db_pool: DbPool, cache_client: CacheClient, current_user: CurrentUser +) -> None: + """Delete own user.""" + + logger.debug("Deleting current user") + if current_user.is_superuser: + logger.debug("Super users are not allowed to delete themselves") + raise HTTPException( + status_code=HTTP_400_BAD_REQUEST, + detail="Super users are not allowed to delete themselves", + ) + + try: + await user_services.delete_user( + pool=db_pool, cache_client=cache_client, user_id=current_user.id + ) + except* Exception as eg: # pragma: no cover + for ex in eg.exceptions: # type: ignore[assignment] + logger.error(f"An error occurred while deleting the user: {{ex}}") + + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while deleting the user", + ) from eg + + +@router.get("/{{user_id}}") +async def read_user_by_id( + *, + db_pool: DbPool, + cache_client: CacheClient, + user_id: str, + current_user: CurrentUser, + active_filter: ActiveFilter = "active", +) -> UserPublic: + """Get a specific user by id.""" + + stripped_user_id = user_id.strip() + logger.debug(f"Getting user with id {{stripped_user_id}}") + try: + user = await user_services.get_user_public_by_id( + pool=db_pool, + cache_client=cache_client, + user_id=stripped_user_id, + active_filter=active_filter, + ) + except Exception as e: # pragma: no cover + logger.error(f"An error occurred while retrieving user with id {{stripped_user_id}}: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while retrieving the user", + ) from e + + if user is None: + logger.debug(f"User with id {{stripped_user_id}} not found") + raise HTTPException( + status_code=HTTP_404_NOT_FOUND, + detail="The user with this id does not exist in the system", + ) + + if user.id == current_user.id: + return user + if not current_user.is_superuser: + logger.debug("Current user is not an admin and does not have enough privileges to get user") + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="The user doesn't have enough privileges", + ) + return user + + +@router.patch( + "/{{user_id}}", + dependencies=[Depends(get_current_active_superuser)], +) +async def update_user( + *, + db_pool: DbPool, + cache_client: CacheClient, + user_id: str, + user_in: UserUpdate, + active_filter: ActiveFilter = "active", +) -> UserPublic: + """Update a user. + + Administrator rights required. + """ + + stripped_user_id = user_id.strip() + logger.debug(f"Updating user {{user_id}}") + try: + db_user = await user_services.get_user_by_id( + pool=db_pool, cache_client=cache_client, user_id=stripped_user_id + ) + except Exception as e: # pragma: no cover + logger.error(f"An error occurred while retrieving user {{user_id}} for updating: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while retrieving the user for updating", + ) from e + + if not db_user: + logger.debug(f"User with id {{stripped_user_id}} not found") + raise HTTPException( + status_code=HTTP_404_NOT_FOUND, + detail="The user with this id does not exist in the system", + ) + if user_in.email: + existing_user = await user_services.get_user_by_email(pool=db_pool, email=user_in.email) + if existing_user and existing_user.id != user_id: + logger.debug(f"A user with email {{user_in.email}} already exists") + raise HTTPException( + status_code=HTTP_409_CONFLICT, detail="User with this email already exists" + ) + + try: + if user_in.password: + db_user = await user_services.update_user( + pool=db_pool, + cache_client=cache_client, + db_user=db_user, + user_in=user_in, + ) + else: + db_user = await user_services.update_user( + pool=db_pool, cache_client=cache_client, db_user=db_user, user_in=user_in + ) + except* Exception as eg: # pragma: no cover + for ex in eg.exceptions: + logger.error(f"An error occurred while updating user {{stripped_user_id}}: {{ex}}") + + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while updating the user", + ) from eg + + try: + user_public = await user_services.get_user_public_by_id( + pool=db_pool, cache_client=cache_client, user_id=db_user.id, active_filter=active_filter + ) + except Exception as e: # pragma: no cover + logger.error(f"Error updating the user: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while updating the user", + ) from e + + if user_public is None: # pragma: no cover + logger.error(f"User with id {{db_user.id}} not found after updating") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while updating the user", + ) + + return user_public + + +@router.delete("/{{user_id}}", dependencies=[Depends(get_current_active_superuser)]) +async def delete_user( + *, db_pool: DbPool, cache_client: CacheClient, current_user: CurrentUser, user_id: str +) -> Message: + """Delete a user. + + Administrator rights required. + """ + + stripped_user_id = user_id.strip() + logger.debug(f"Deleting user with id {{stripped_user_id}}") + try: + user = await user_services.get_user_by_id( + pool=db_pool, cache_client=cache_client, user_id=stripped_user_id + ) + except Exception as e: # pragma: no cover + logger.error(f"An error occurred while retrieving user {{user_id}} for deleting: {{e}}") + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while retrieving the user for deleting", + ) from e + + if not user: + logger.debug(f"User with id {{stripped_user_id}} not found") + raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail="User not found") + if user == current_user: + logger.debug("Super users are not allowed to delete themselves") + raise HTTPException( + status_code=HTTP_403_FORBIDDEN, + detail="Super users are not allowed to delete themselves", + ) + try: + await user_services.delete_user( + pool=db_pool, cache_client=cache_client, user_id=stripped_user_id + ) + except* Exception as eg: # pragma: no cover + for ex in eg.exceptions: + logger.error(f"An error occurred while delete the user: {{ex}}") + + raise HTTPException( + status_code=HTTP_500_INTERNAL_SERVER_ERROR, + detail="An error occurred while deleting the user", + ) from eg + + return Message(message="User deleted successfully") +"# + ) +} + +pub fn save_users_route(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("api/routes/users.py"); + let file_content = create_users_route(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_version_route(project_info: &ProjectInfo) -> String { + let module = project_info.module_name(); + + format!( + r#"from __future__ import annotations + +from {module} import __version__ +from {module}.core.config import settings +from {module}.core.utils import APIRouter + +router = APIRouter(tags=["Version"], prefix=f"{{settings.API_V1_PREFIX}}/version") + + +@router.get("/") +async def read_version() -> dict[str, str]: + """Get the current api software version.""" + + return {{"version": __version__}} +"# + ) +} + +pub fn save_version_route(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("api/routes/version.py"); + let file_content = create_version_route(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} diff --git a/src/fastapi/service_files.rs b/src/fastapi/service_files.rs new file mode 100644 index 00000000..96328f4e --- /dev/null +++ b/src/fastapi/service_files.rs @@ -0,0 +1,414 @@ +use anyhow::Result; + +use crate::{file_manager::save_file_with_content, project_info::ProjectInfo}; + +fn create_cache_user_services_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from __future__ import annotations + +from typing import TYPE_CHECKING + +import orjson + +from {module}.models.users import UserInDb, UserPublic, UsersPublic + +if TYPE_CHECKING: # pragma: no cover + from valkey.asyncio import Valkey + + +async def delete_all_users_public(*, cache_client: Valkey) -> None: + keys = [key async for key in cache_client.scan_iter("users:public:*")] + + if not keys: + return None + + await cache_client.unlink(*keys) + + +async def get_users_public(*, cache_client: Valkey, offset: int, limit: int) -> UsersPublic | None: + users = await cache_client.get(name=f"users:public:{{offset}}:{{limit}}") # type: ignore[misc] + if not users: + return None + + json_data = orjson.loads(users) + + return UsersPublic( + data=[UserPublic(**user) for user in json_data["data"]], + count=json_data["count"], + total_users=json_data["total_users"], + ) + + +async def cache_users_public( + *, cache_client: Valkey, users_public: UsersPublic, offset: int, limit: int +) -> None: + """Cache users by page, expire cache after 1 minutes.""" + + await cache_client.setex( + name=f"users:public:{{offset}}:{{limit}}", + time=60, + value=orjson.dumps(users_public.model_dump()), + ) + + +async def cache_user(*, cache_client: Valkey, user: UserInDb) -> None: + """Cache user, expire cache after 1 minutes.""" + + await cache_client.setex(name=f"user:{{user.id}}", time=60, value=orjson.dumps(user.model_dump())) + + +async def delete_cached_user(*, cache_client: Valkey, user_id: str) -> None: + await cache_client.unlink(f"user:{{user_id}}") + + +async def get_cached_user(*, cache_client: Valkey, user_id: str) -> UserInDb | None: + user = await cache_client.get(name=f"user:{{user_id}}") # type: ignore[misc] + + if not user: + return None + + return UserInDb(**orjson.loads(user)) +"# + ) +} + +pub fn save_cache_user_services_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("services/cache/user_cache_services.py"); + let file_content = create_cache_user_services_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + Ok(()) +} + +fn create_db_user_services_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from __future__ import annotations + +import asyncio +from typing import TYPE_CHECKING + +from loguru import logger + +from {module}.core.security import get_password_hash, verify_password +from {module}.core.utils import create_db_primary_key +from {module}.exceptions import DbInsertError, DbUpdateError, UserNotFoundError +from {module}.models.users import ( + UpdatePassword, + UserCreate, + UserInDb, + UserPublic, + UsersPublic, + UserUpdate, + UserUpdateMe, +) +from {module}.services.cache import user_cache_services + +if TYPE_CHECKING: # pragma: no cover + from asyncpg import Pool + from valkey.asyncio import Valkey + + from {module}.types import ActiveFilter + + +async def authenticate(*, pool: Pool, email: str, password: str) -> UserInDb | None: + db_user = await get_user_by_email(pool=pool, email=email) + + if not db_user or not verify_password(password, db_user.hashed_password): + return None + + return db_user + + +async def create_user(*, pool: Pool, cache_client: Valkey, user: UserCreate) -> UserInDb: + query = """ + INSERT INTO users ( + id, + email, + full_name, + hashed_password, + is_active, + is_superuser + ) + VALUES ($1, $2, $3, $4, $5, $6) + RETURNING + id, + email, + full_name, + hashed_password, + is_active, + is_superuser, + last_login + """ + + async with pool.acquire() as conn: + result = await conn.fetchrow( + query, + create_db_primary_key(), + user.email, + user.full_name, + get_password_hash(user.password), + user.is_active, + user.is_superuser, + ) + + # failsafe: this shouldn't happen + if not result: # pragma: no cover + raise DbInsertError("Unable to find user after inserting") + + logger.debug("Deleting cached users public") + await user_cache_services.delete_all_users_public(cache_client=cache_client) + + return UserInDb(**dict(result)) + + +async def delete_user(*, pool: Pool, cache_client: Valkey, user_id: str) -> None: + query = "DELETE FROM users WHERE id = $1" + async with pool.acquire() as conn: + async with asyncio.TaskGroup() as tg: + db_task = tg.create_task(conn.execute(query, user_id)) + tg.create_task( + user_cache_services.delete_cached_user(cache_client=cache_client, user_id=user_id) + ) + + result = await db_task + + if result == "DELETE 0": # pragma: no cover + raise UserNotFoundError(f"User with id {{user_id}} not found") + + +async def get_users(*, pool: Pool, offset: int = 0, limit: int = 100) -> list[UserInDb] | None: + query = """ + SELECT id, + email, + full_name, + hashed_password, + is_active, + is_superuser, + last_login + FROM users + OFFSET $1 + LIMIT $2 + """ + + async with pool.acquire() as conn: + results = await conn.fetch(query, offset, limit) + + # Failsafe: this shouldn't happen because the first superuser always gets added at startup + if not results: # pragma: no cover + return None + + return [UserInDb(**x) for x in results] + + +async def get_users_public( + *, + pool: Pool, + cache_client: Valkey, + offset: int = 0, + limit: int = 100, +) -> UsersPublic: + cached_users = await user_cache_services.get_users_public( + cache_client=cache_client, + offset=offset, + limit=limit, + ) + + if cached_users: + logger.debug("Users page found in cache, returning") + return cached_users + + async with asyncio.TaskGroup() as tg: + users_task = tg.create_task(get_users(pool=pool, offset=offset, limit=limit)) + total_task = tg.create_task(get_total_user_count(pool=pool)) + + db_users = await users_task + total = await total_task + data = [UserPublic(**user.model_dump()) for user in db_users] if db_users else [] + users_public = UsersPublic(data=data, count=len(data), total_users=total) + + logger.debug("Caching users public") + await user_cache_services.cache_users_public( + cache_client=cache_client, users_public=users_public, offset=offset, limit=limit + ) + + return users_public + + +async def get_user_by_email(*, pool: Pool, email: str) -> UserInDb | None: + query = """ + SELECT id, + email, + full_name, + hashed_password, + is_active, + is_superuser, + last_login + FROM users + WHERE email = $1 + """ + async with pool.acquire() as conn: + db_user = await conn.fetchrow(query, email) + + if not db_user: + return None + + return UserInDb(**dict(db_user)) + + +async def get_user_public_by_email(*, pool: Pool, email: str) -> UserPublic | None: + user = await get_user_by_email(pool=pool, email=email) + if not user: + return None + + return UserPublic(**user.model_dump()) + + +async def get_user_by_id(*, pool: Pool, cache_client: Valkey, user_id: str) -> UserInDb | None: + cached_user = await user_cache_services.get_cached_user( + cache_client=cache_client, user_id=user_id + ) + + if cached_user: + logger.debug("User found in cache, returning") + return cached_user + + query = """ + SELECT id, + email, + full_name, + hashed_password, + is_active, + is_superuser, + last_login + FROM users + WHERE id = $1 + """ + + async with pool.acquire() as conn: + db_user = await conn.fetchrow(query, user_id) + + if not db_user: + return None + + user = UserInDb(**db_user) + + logger.debug("Caching user") + await user_cache_services.cache_user(cache_client=cache_client, user=user) + + return user + + +async def get_user_public_by_id( + *, + pool: Pool, + cache_client: Valkey, + user_id: str, + active_filter: ActiveFilter = "all", +) -> UserPublic | None: + user = await get_user_by_id(pool=pool, cache_client=cache_client, user_id=user_id) + + if not user: + return None + + return UserPublic(**user.model_dump()) + + +async def get_total_user_count(*, pool: Pool) -> int: + query = """ + SELECT COUNT(*) as total_count + FROM users + """ + + async with pool.acquire() as conn: + result = await conn.fetchrow(query) + + return result["total_count"] + + +async def update_user( + *, + pool: Pool, + cache_client: Valkey, + db_user: UserInDb, + user_in: UserUpdate | UserUpdateMe | UpdatePassword, +) -> UserInDb: + if isinstance(user_in, UpdatePassword): + query = """ + UPDATE users + SET hashed_password=$1 + WHERE id = $2 + RETURNING + id, + email, + full_name, + hashed_password, + is_active, + is_superuser, + last_login + """ + + async with pool.acquire() as conn: + async with asyncio.TaskGroup() as tg: + db_task = tg.create_task( + conn.fetchrow(query, get_password_hash(user_in.new_password), db_user.id) + ) + tg.create_task( + user_cache_services.delete_cached_user( + cache_client=cache_client, user_id=db_user.id + ) + ) + + result = await db_task + + else: + user_data = user_in.model_dump(exclude_unset=True) + if "password" in user_data: + user_data["hashed_password"] = get_password_hash(user_data.pop("password")) + set_clause = ", ".join([f"{{key}} = ${{i + 2}}" for i, key in enumerate(user_data.keys())]) + query = f""" + UPDATE users + SET {{set_clause}} + WHERE id = $1 + RETURNING + id, + email, + full_name, + hashed_password, + is_active, + is_superuser, + last_login + """ + + async with pool.acquire() as conn: + async with asyncio.TaskGroup() as tg: + db_task = tg.create_task(conn.fetchrow(query, db_user.id, *user_data.values())) + tg.create_task( + user_cache_services.delete_cached_user( + cache_client=cache_client, user_id=db_user.id + ) + ) + + result = await db_task + + if not result or result == "UPDATE 0": # pragma: no cover + raise DbUpdateError("Error updating user") + + return UserInDb(**dict(result)) +"# + ) +} + +pub fn save_db_user_services_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.source_dir_path(); + let file_path = base.join("services/db/user_services.py"); + let file_content = create_db_user_services_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} diff --git a/src/fastapi/test_files.rs b/src/fastapi/test_files.rs new file mode 100644 index 00000000..216412fd --- /dev/null +++ b/src/fastapi/test_files.rs @@ -0,0 +1,1442 @@ +use anyhow::Result; + +use crate::{file_manager::save_file_with_content, project_info::ProjectInfo}; + +fn create_config_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"import pytest +from pydantic import AnyUrl, SecretStr + +from {module}.core.config import Settings + + +def test_check_default_secret_production(): + with pytest.raises(ValueError): + Settings( + FIRST_SUPERUSER_EMAIL="user@email.com", + FIRST_SUPERUSER_PASSWORD=SecretStr("Abc$123be"), + FIRST_SUPERUSER_NAME="Some Name", + POSTGRES_HOST="http://localhost", + POSTGRES_USER="postgres", + POSTGRES_PASSWORD=SecretStr("Somepassword!"), + POSTGRES_DB="test_db", + VALKEY_HOST="http://localhost", + VALKEY_PASSWORD=SecretStr("Somepassword!"), + ENVIRONMENT="production", + SECRET_KEY=SecretStr("changethis"), + ) + + +def test_check_default_secret_testing(): + with pytest.raises(ValueError): + Settings( + FIRST_SUPERUSER_EMAIL="user@email.com", + FIRST_SUPERUSER_PASSWORD=SecretStr("Abc$123be"), + FIRST_SUPERUSER_NAME="Some Name", + POSTGRES_HOST="http://localhost", + POSTGRES_USER="postgres", + POSTGRES_PASSWORD=SecretStr("Somepassword!"), + POSTGRES_DB="test_db", + VALKEY_HOST="http://localhost", + VALKEY_PASSWORD=SecretStr("Somepassword!"), + ENVIRONMENT="testing", + SECRET_KEY=SecretStr("changethis"), + ) + + +def test_check_default_secret_local(): + with pytest.warns( + UserWarning, + match='The value of SECRET_KEY is "changethis", for security, please change it, at least for deployments.', + ): + Settings( + FIRST_SUPERUSER_EMAIL="user@email.com", + FIRST_SUPERUSER_PASSWORD=SecretStr("Abc$123be"), + FIRST_SUPERUSER_NAME="Some Name", + POSTGRES_HOST="http://localhost", + POSTGRES_USER="postgres", + POSTGRES_PASSWORD=SecretStr("Somepassword!"), + POSTGRES_DB="test_db", + VALKEY_HOST="http://localhost", + VALKEY_PASSWORD=SecretStr("Somepassword!"), + ENVIRONMENT="local", + SECRET_KEY=SecretStr("changethis"), + ) + + +def test_serer_host_production(): + settings = Settings( + FIRST_SUPERUSER_EMAIL="user@email.com", + FIRST_SUPERUSER_PASSWORD=SecretStr("Abc$123be"), + FIRST_SUPERUSER_NAME="Some Name", + POSTGRES_HOST="http://localhost", + POSTGRES_USER="postgres", + POSTGRES_PASSWORD=SecretStr("Somepassword!"), + POSTGRES_DB="test_db", + VALKEY_HOST="http://localhost", + VALKEY_PASSWORD=SecretStr("Somepassword!"), + SECRET_KEY=SecretStr("Somesecretkey"), + ENVIRONMENT="production", + ) + + assert settings.server_host == f"https://{{settings.DOMAIN}}" + + +def test_serer_host_testing(): + settings = Settings( + FIRST_SUPERUSER_EMAIL="user@email.com", + FIRST_SUPERUSER_PASSWORD=SecretStr("Abc$123be"), + FIRST_SUPERUSER_NAME="Some Name", + POSTGRES_HOST="http://localhost", + POSTGRES_USER="postgres", + POSTGRES_PASSWORD=SecretStr("Somepassword!"), + POSTGRES_DB="test_db", + VALKEY_HOST="http://localhost", + VALKEY_PASSWORD=SecretStr("Somepassword!"), + SECRET_KEY=SecretStr("Somesecretkey"), + ENVIRONMENT="testing", + ) + + assert settings.server_host == f"https://{{settings.DOMAIN}}" + + +def test_serer_host_local(): + settings = Settings( + FIRST_SUPERUSER_EMAIL="user@email.com", + FIRST_SUPERUSER_PASSWORD=SecretStr("Abc$123be"), + FIRST_SUPERUSER_NAME="Some Name", + POSTGRES_HOST="http://localhost", + POSTGRES_USER="postgres", + POSTGRES_PASSWORD=SecretStr("Somepassword!"), + POSTGRES_DB="test_db", + VALKEY_HOST="http://localhost", + VALKEY_PASSWORD=SecretStr("Somepassword!"), + SECRET_KEY=SecretStr("Somesecretkey"), + ENVIRONMENT="local", + ) + + assert settings.server_host == f"http://{{settings.DOMAIN}}" + + +def test_parse_cors_error(): + with pytest.raises(ValueError): + Settings( + FIRST_SUPERUSER_EMAIL="user@email.com", + FIRST_SUPERUSER_PASSWORD=SecretStr("Abc$123be"), + FIRST_SUPERUSER_NAME="Some Name", + POSTGRES_HOST="http://localhost", + POSTGRES_USER="postgres", + POSTGRES_PASSWORD=SecretStr("Somepassword!"), + POSTGRES_DB="test_db", + VALKEY_HOST="http://localhost", + VALKEY_PASSWORD=SecretStr("Somepassword!"), + SECRET_KEY=SecretStr("Somesecretkey"), + BACKEND_CORS_ORIGINS=1, # type: ignore + ) + + +def test_parse_cors_string(): + settings = Settings( + FIRST_SUPERUSER_EMAIL="user@email.com", + FIRST_SUPERUSER_PASSWORD=SecretStr("Abc$123be"), + FIRST_SUPERUSER_NAME="Some Name", + POSTGRES_HOST="http://localhost", + POSTGRES_USER="postgres", + POSTGRES_PASSWORD=SecretStr("Somepassword!"), + POSTGRES_DB="test_db", + VALKEY_HOST="http://localhost", + VALKEY_PASSWORD=SecretStr("Somepassword!"), + SECRET_KEY=SecretStr("Somesecretkey"), + BACKEND_CORS_ORIGINS="http://localhost, http://127.0.0.1", + ) + + assert settings.BACKEND_CORS_ORIGINS == [AnyUrl("http://localhost"), AnyUrl("http://127.0.0.1")] +"# + ) +} + +pub fn save_config_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/core/test_config.py"); + let file_content = create_config_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_conftest_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from __future__ import annotations + +import itertools +import os +import subprocess +from pathlib import Path +from unittest.mock import patch +from uuid import uuid4 + +import asyncpg +import pytest +from httpx import ASGITransport, AsyncClient + +from {module}.api.deps import get_cache_client, get_db_pool +from {module}.core.cache import cache +from {module}.core.config import settings +from {module}.core.db import Database +from {module}.main import app +from {module}.models.users import UserCreate +from {module}.services.db import user_services +from tests.utils import ( + get_superuser_token_headers, + random_email, + random_lower_string, + random_password, +) + +ROOT_PATH = Path().absolute() +ASSETS_DIR = ROOT_PATH / "tests" / "assets" + + +async def user_authentication_headers(test_client, email, password): + data = {{"username": email, "password": password}} + + result = await test_client.post("/login/access-token", data=data) + response = result.json() + auth_token = response["access_token"] + return {{"Authorization": f"Bearer {{auth_token}}"}} + + +@pytest.fixture(scope="session") +def valkey_db_index(worker_id): + if worker_id == "master": + return 0 + else: + return int(worker_id.lstrip("gw")) + 1 + + +DBS_PER_WORKER = 5 +MAX_DB_INDEX = 99 +MAX_WORKERS = MAX_DB_INDEX // DBS_PER_WORKER +_db_counters: dict[int, itertools.count[int]] = {{}} + + +@pytest.fixture +def next_db(worker_id): + """Calculate db number per worker so data doesn't clash in parallel tests.""" + if worker_id == "master": + return 1 + + worker_num = int(worker_id.lstrip("gw") or "0") + + if worker_num >= MAX_WORKERS: + raise RuntimeError( + f"Worker {{worker_id}} exceeds DB allocation limit (max {{MAX_WORKERS}} workers). " + f"Either reduce number of workers or decrease DBS_PER_WORKER." + ) + + base = 1 + (worker_num * DBS_PER_WORKER) # skip db=0 + if base + DBS_PER_WORKER - 1 > MAX_DB_INDEX: + raise RuntimeError(f"Worker {{worker_id}} would exceed MAX_DB_INDEX with base {{base}}") + + if worker_id not in _db_counters: + _db_counters[worker_id] = itertools.count(0) + + offset = next(_db_counters[worker_id]) % DBS_PER_WORKER + db_index = base + offset + + return db_index + + +@pytest.fixture +def db_name(worker_id): + base_name = "ae_reporter_test" + unique_suffix = str(uuid4()).replace("-", "")[:8] + if worker_id == "master": + return f"{{base_name}}_{{unique_suffix}}" + return f"{{base_name}}_{{worker_id}}_{{unique_suffix}}" + + +@pytest.fixture(autouse=True) +async def test_cache(next_db): + await cache.create_client(db=next_db) + yield cache + await cache.client.flushdb() # type: ignore + await cache.close_client() + + +@pytest.fixture +def apply_migrations(db_name): + test_db_url = f"postgresql://{{settings.POSTGRES_USER}}:{{settings.POSTGRES_PASSWORD.get_secret_value()}}@{{settings.POSTGRES_HOST}}:5432/{{db_name}}" + migration_dir = ROOT_PATH + + with patch.dict(os.environ, {{"DATABASE_URL": test_db_url}}): + subprocess.run(["sqlx", "database", "create"], cwd=migration_dir) + subprocess.run(["sqlx", "migrate", "run"], cwd=migration_dir) + yield + + +@pytest.fixture +async def test_db(db_name, apply_migrations): + test_db = Database(db_name=db_name) + await test_db.create_pool(min_size=1, max_size=2) + await test_db.create_first_superuser() + yield test_db + await test_db.close_pool() + + # Need to connect to "postgres" db instead of the db being dropped + conn = await asyncpg.connect( + database="postgres", + user=settings.POSTGRES_USER, + password=settings.POSTGRES_PASSWORD.get_secret_value(), + host=settings.POSTGRES_HOST, + ) + + # Terminate any remaining connections to the test database + await conn.execute( + """ + SELECT pg_terminate_backend(pid) + FROM pg_stat_activity + WHERE datname = $1 AND pid <> pg_backend_pid() + """, + db_name, + ) + + await conn.execute(f'DROP DATABASE "{{db_name}}"') + await conn.close() + + +@pytest.fixture +async def test_client(test_db, test_cache): + app.dependency_overrides[get_cache_client] = lambda: test_cache.client + app.dependency_overrides[get_db_pool] = lambda: test_db.db_pool + async with AsyncClient( + transport=ASGITransport(app=app), base_url=f"http://127.0.0.1{{settings.API_V1_PREFIX}}" + ) as client: + yield client + app.dependency_overrides.clear() + + +@pytest.fixture +async def superuser_token_headers(test_client): + return await get_superuser_token_headers(test_client) + + +@pytest.fixture +def normal_user_credentials(): + return {{ + "password": random_password(), + "full_name": random_lower_string(), + "email": random_email(), + }} + + +@pytest.fixture +async def normal_user_token_headers(test_db, test_client, test_cache, normal_user_credentials): + user = await user_services.get_user_by_email( + pool=test_db.db_pool, email=normal_user_credentials["email"] + ) + if not user: + user = await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=normal_user_credentials["email"], + password=normal_user_credentials["password"], + full_name=normal_user_credentials["full_name"], + ), + ) + + return await user_authentication_headers( + test_client=test_client, + email=normal_user_credentials["email"], + password=normal_user_credentials["password"], + ) + + +@pytest.fixture +async def test_user(test_db, test_cache): + email = random_email() + password = random_password() + full_name = random_lower_string() + user = await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=email, + password=password, + full_name=full_name, + ), + ) + + return user +"# + ) +} + +pub fn save_conftest_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/conftest.py"); + let file_content = create_conftest_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_health_route_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from unittest.mock import AsyncMock, MagicMock + +import pytest +from httpx import ASGITransport, AsyncClient + +from {module}.api.deps import get_cache_client, get_db_pool +from {module}.core.config import settings +from {module}.main import app + + +@pytest.fixture +def failing_db_pool(): + mock_pool = MagicMock() + mock_acquire = AsyncMock() + mock_acquire.__aenter__.side_effect = Exception("DB down") + mock_pool.acquire.return_value = mock_acquire + + return mock_pool + + +@pytest.fixture +async def test_client_bad_db(failing_db_pool, test_cache): + app.dependency_overrides[get_cache_client] = lambda: test_cache.client + app.dependency_overrides[get_db_pool] = lambda: failing_db_pool + async with AsyncClient( + transport=ASGITransport(app=app), base_url=f"http://127.0.0.1{{settings.API_V1_PREFIX}}" + ) as client: + yield client + app.dependency_overrides.clear() + + +@pytest.fixture +def failing_cache_client(): + mock_client = MagicMock() + mock_acquire = AsyncMock() + mock_acquire.__aenter__.side_effect = Exception("Cache down") + mock_client.acquire.return_value = mock_acquire + return mock_client + + +@pytest.fixture +async def test_client_bad_cache(failing_cache_client, test_db): + app.dependency_overrides[get_cache_client] = lambda: failing_cache_client + app.dependency_overrides[get_db_pool] = lambda: test_db.db_pool + async with AsyncClient( + transport=ASGITransport(app=app), base_url=f"http://127.0.0.1{{settings.API_V1_PREFIX}}" + ) as client: + yield client + app.dependency_overrides.clear() + + +async def test_health(test_client): + result = await test_client.get("health") + + assert result.status_code == 200 + assert result.json()["server"] == "healthy" + assert result.json()["db"] == "healthy" + assert result.json()["cache"] == "healthy" + + +async def test_health_no_db(test_client_bad_db): + result = await test_client_bad_db.get("health") + + assert result.status_code == 200 + assert result.json()["server"] == "healthy" + assert result.json()["db"] == "unhealthy" + assert result.json()["cache"] == "healthy" + + +async def test_health_no_cache(test_client_bad_cache): + result = await test_client_bad_cache.get("health") + + assert result.status_code == 200 + assert result.json()["server"] == "healthy" + assert result.json()["db"] == "healthy" + assert result.json()["cache"] == "unhealthy" +"# + ) +} + +pub fn save_health_route_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/api/routes/test_health_route.py"); + let file_content = create_health_route_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_login_route_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from unittest.mock import Mock + +from fastapi import Request + +from {module}.api.deps import get_current_user +from {module}.core.config import settings +from tests.utils import random_password + + +async def test_get_access_token(test_client): + login_data = {{ + "username": settings.FIRST_SUPERUSER_EMAIL, + "password": settings.FIRST_SUPERUSER_PASSWORD.get_secret_value(), + }} + response = await test_client.post("/login/access-token", data=login_data) + tokens = response.json() + assert response.status_code == 200 + assert "access_token" in tokens + assert tokens["access_token"] + + +async def test_get_access_token_incorrect_password(test_client): + login_data = {{ + "username": settings.FIRST_SUPERUSER_EMAIL, + "password": random_password(), + }} + response = await test_client.post("/login/access-token", data=login_data) + assert response.status_code == 400 + + +async def test_use_access_token(test_client, superuser_token_headers): + response = await test_client.post( + "/login/test-token", + headers=superuser_token_headers, + ) + result = response.json() + assert response.status_code == 200 + assert "email" in result + + +async def test_access_token_inactive_user( + test_client, + superuser_token_headers, + normal_user_token_headers, + normal_user_credentials, + test_db, + test_cache, +): + mock_request = Mock(spec=Request) + mock_request.url.path = "/api/v1/users/me" + + user = await get_current_user( + test_db.db_pool, + test_cache.client, + normal_user_token_headers["Authorization"].split(" ", 1)[1], + ) + + test_client.cookies.clear() + response = await test_client.patch( + f"/users/{{user.id}}", + headers=superuser_token_headers, + json={{"fullName": user.full_name, "isActive": False}}, + ) + + assert response.status_code == 200 + + login_data = {{ + "username": user.email, + "password": normal_user_credentials["password"], + }} + test_client.cookies.clear() + response = await test_client.post("/login/access-token", data=login_data) + + assert response.status_code == 401 +"# + ) +} + +pub fn save_login_route_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/api/routes/test_login_routes.py"); + let file_content = create_login_route_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_test_uitls_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"import random +import string + +from {module}.core.config import settings + + +def random_email() -> str: + return f"{{random_lower_string()}}@{{random_lower_string()}}.com" + + +def random_lower_string() -> str: + return "".join(random.choices(string.ascii_lowercase, k=32)) + + +def random_password() -> str: + password = "".join(random.choices(string.ascii_lowercase, k=32)) + return f"A{{password}}1_" + + +async def get_superuser_token_headers(test_client): + login_data = {{ + "username": settings.FIRST_SUPERUSER_EMAIL, + "password": settings.FIRST_SUPERUSER_PASSWORD.get_secret_value(), + }} + response = await test_client.post("/login/access-token", data=login_data) + tokens = response.json() + a_token = tokens["access_token"] + headers = {{"Authorization": f"Bearer {{a_token}}"}} + return headers +"# + ) +} + +pub fn save_test_utils_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/utils.py"); + let file_content = create_test_uitls_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_test_deps_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from unittest.mock import Mock + +import pytest +from fastapi import HTTPException, Request + +from {module}.api.deps import get_cache_client, get_current_user, get_db_pool +from {module}.core.cache import cache +from {module}.core.db import db + + +async def test_auth_no_authorization_in_header(test_client, normal_user_token_headers): + del normal_user_token_headers["Authorization"] + test_client.cookies.clear() + response = await test_client.get( + "/users/me", + headers=normal_user_token_headers, + ) + + assert response.status_code == 401 + + +async def test_auth_no_bearer(test_client, normal_user_token_headers): + normal_user_token_headers["Authorization"] = normal_user_token_headers[ + "Authorization" + ].removeprefix("Bearer ") + test_client.cookies.clear() + response = await test_client.get( + "/users/me", + headers=normal_user_token_headers, + ) + + assert response.status_code == 401 + + +async def test_get_current_user_invalid_token(test_db, test_cache): + mock_request = Mock(spec=Request) + mock_request.url.path = "/api/v1/users/me" + + with pytest.raises(HTTPException) as ex: + await get_current_user( + test_db.db_pool, + test_cache.client, + "e", + ) + + assert ex.value.status_code == 403 + + +async def test_get_current_user_inactive( + test_client, test_cache, normal_user_token_headers, superuser_token_headers, test_db +): + mock_request = Mock(spec=Request) + mock_request.url.path = "/api/v1/users/me" + + user = await get_current_user( + test_db.db_pool, + test_cache.client, + normal_user_token_headers["Authorization"].split(" ", 1)[1], + ) + + test_client.cookies.clear() + response = await test_client.patch( + f"/users/{{user.id}}", + headers=superuser_token_headers, + json={{"fullName": user.full_name, "isActive": False}}, + ) + + assert response.status_code == 200 + + with pytest.raises(HTTPException) as ex: + await get_current_user( + test_db.db_pool, + test_cache.client, + normal_user_token_headers["Authorization"].split(" ", 1)[1], + ) + + assert ex.value.status_code == 403 + + +@pytest.fixture +async def temp_db_pool(): + await db.create_pool() + yield + await db.close_pool() + + +@pytest.mark.usefixtures("temp_db_pool") +async def test_get_db_pool_success(): + async for pool in get_db_pool(): + assert pool is not None + + +@pytest.fixture +async def temp_cache_client(): + await cache.create_client() + yield + await cache.close_client() + + +@pytest.mark.usefixtures("temp_cache_client") +async def test_get_cache_client_success(): + async for client in get_cache_client(): + assert client is not None +"# + ) +} + +pub fn save_test_deps_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/api/test_deps.py"); + let file_content = create_test_deps_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_user_model_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"import pytest + +from {module}.models.users import UserCreate, UserUpdate +from tests.utils import random_email, random_lower_string + + +@pytest.mark.parametrize("password", ("loweronly1.", "UPPER1*ONLY", "no@Number", "nospEcial4")) +def test_user_create_invalid_password(password): + with pytest.raises(ValueError) as e: + UserCreate( + email=random_email(), + full_name=random_lower_string(), + password=password, + ) + + assert ( + "Password must contain at least one uppercase letter, one lowercase letter, one number, and one special character" + in (str(e.value)) + ) + + +def test_user_create_short_password(): + with pytest.raises(ValueError) as e: + UserUpdate( + email=random_email(), + full_name=random_lower_string(), + password="Short1_", + ) + + assert "at least 8 characters" in (str(e.value)) + + +@pytest.mark.parametrize("password", ("loweronly1.", "UPPER1*ONLY", "no@Number", "nospEcial4")) +def test_user_update_invalid_password(password): + with pytest.raises(ValueError) as e: + UserUpdate( + email=random_email(), + full_name=random_lower_string(), + password=password, + ) + + assert ( + "Password must contain at least one uppercase letter, one lowercase letter, one number, and one special character" + in (str(e.value)) + ) + + +def test_user_update_short_password(): + with pytest.raises(ValueError) as e: + UserCreate( + email=random_email(), + full_name=random_lower_string(), + password="Short1_", + ) + + assert "at least 8 characters" in (str(e.value)) +"# + ) +} + +pub fn save_user_model_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/models/test_users.py"); + let file_content = create_user_model_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_user_routes_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from uuid import uuid4 + +from {module}.core.config import settings +from {module}.core.security import verify_password +from {module}.models.users import UserCreate +from {module}.services.db import user_services +from tests.utils import random_email, random_lower_string, random_password + + +async def test_get_users_superuser_me(test_client, superuser_token_headers): + response = await test_client.get("/users/me", headers=superuser_token_headers) + current_user = response.json() + assert current_user is not None + assert current_user["isActive"] is True + assert current_user["isSuperuser"] + assert current_user["email"] == settings.FIRST_SUPERUSER_EMAIL + assert current_user["fullName"] == settings.FIRST_SUPERUSER_NAME + + +async def test_get_users_normal_user_me(test_client, normal_user_token_headers): + response = await test_client.get("/users/me", headers=normal_user_token_headers) + current_user = response.json() + assert current_user is not None + assert current_user["isActive"] is True + assert current_user["isSuperuser"] is False + assert current_user["email"] is not None + + +async def test_get_existing_user(test_db, test_client, superuser_token_headers, test_user): + user_id = test_user.id + response = await test_client.get( + f"/users/{{user_id}}", + headers=superuser_token_headers, + ) + assert 200 <= response.status_code < 300 + api_user = response.json() + existing_user = await user_services.get_user_by_email( + pool=test_db.db_pool, email=test_user.email + ) + assert existing_user + assert existing_user.email == api_user["email"] + + +async def test_get_user_not_found(test_client, superuser_token_headers): + response = await test_client.get( + "/users/bad", + headers=superuser_token_headers, + ) + assert response.status_code == 404 + + +async def test_get_existing_user_current_user(test_client, test_db, test_cache): + email = random_email() + password = random_password() + full_name = random_lower_string() + user = await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=email, + password=password, + full_name=full_name, + ), + ) + + user_id = user.id + login_data = {{ + "username": email, + "password": password, + }} + response = await test_client.post("/login/access-token", data=login_data) + tokens = response.json() + access_token = tokens["access_token"] + headers = {{"Authorization": f"Bearer {{access_token}}"}} + + response = await test_client.get( + f"/users/{{user_id}}", + headers=headers, + ) + assert 200 <= response.status_code < 300 + api_user = response.json() + existing_user = await user_services.get_user_by_email(pool=test_db.db_pool, email=email) + assert existing_user + assert existing_user.email == api_user["email"] + + +async def test_get_existing_user_permissions_error( + test_client, normal_user_token_headers, test_user +): + response = await test_client.get( + f"/users/{{test_user.id}}", + headers=normal_user_token_headers, + ) + assert response.status_code == 403 + assert response.json() == {{"detail": "The user doesn't have enough privileges"}} + + +async def test_create_user(test_client): + username = random_email() + password = random_password() + full_name = random_lower_string() + data = {{ + "email": username, + "password": password, + "fullName": full_name, + }} + response = await test_client.post( + "/users/", + json=data, + ) + assert response.status_code == 200 + + +async def test_create_user_existing_username(test_client, test_db, test_cache): + username = random_email() + password = random_password() + full_name = random_lower_string() + await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=username, + password=password, + full_name=full_name, + ), + ) + data = {{ + "email": username, + "password": password, + "fullName": full_name, + }} + response = await test_client.post( + "/users/", + json=data, + ) + created_user = response.json() + assert response.status_code == 400 + assert "A user with this email address already exists" in created_user["detail"] + + +async def test_read_users(test_client, superuser_token_headers, test_db, test_cache): + username = random_email() + password = random_password() + full_name = random_lower_string() + username2 = random_email() + password2 = random_password() + full_name2 = random_lower_string() + await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=username, + password=password, + full_name=full_name, + ), + ) + + await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=username2, + password=password2, + full_name=full_name2, + ), + ) + + response = await test_client.get("/users/", headers=superuser_token_headers) + all_users = response.json() + + assert len(all_users["data"]) > 1 + assert "count" in all_users + for item in all_users["data"]: + assert "email" in item + assert all_users["totalUsers"] >= 2 + + +async def test_update_user_me(test_client, normal_user_token_headers, test_db): + full_name = "Updated" + email = random_email() + data = {{"fullName": full_name, "email": email}} + response = await test_client.patch( + "/users/me", + headers=normal_user_token_headers, + json=data, + ) + assert response.status_code == 200 + updated_user = response.json() + assert updated_user["email"] == email + assert updated_user["fullName"] == full_name + + user_db = await user_services.get_user_by_email(pool=test_db.db_pool, email=email) + assert user_db + assert user_db.email == email + assert user_db.full_name == full_name + + +async def test_update_password_me(test_client, superuser_token_headers, test_db): + new_password = random_password() + data = {{ + "current_password": settings.FIRST_SUPERUSER_PASSWORD.get_secret_value(), + "new_password": new_password, + }} + response = await test_client.patch( + "/users/me/password", + headers=superuser_token_headers, + json=data, + ) + assert response.status_code == 204 + + user_db = await user_services.get_user_by_email( + pool=test_db.db_pool, email=settings.FIRST_SUPERUSER_EMAIL + ) + assert user_db + assert user_db.email == settings.FIRST_SUPERUSER_EMAIL + assert verify_password(new_password, user_db.hashed_password) + + +async def test_update_password_me_incorrect_password(test_client, superuser_token_headers): + bad_password = random_password() + new_password = random_password() + data = {{"current_password": bad_password, "new_password": new_password}} + response = await test_client.patch( + "/users/me/password", + headers=superuser_token_headers, + json=data, + ) + assert response.status_code == 400 + updated_user = response.json() + assert updated_user["detail"] == "Incorrect password" + + +async def test_update_user_me_email_exists( + test_client, test_db, normal_user_token_headers, test_cache +): + email = random_email() + password = random_password() + full_name = random_lower_string() + await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=email, + password=password, + full_name=full_name, + ), + ) + data = {{"email": email}} + response = await test_client.patch( + "/users/me", + headers=normal_user_token_headers, + json=data, + ) + assert response.status_code == 409 + assert response.json()["detail"] == "A user with this email address already exists" + + +async def test_update_password_me_same_password_error(test_client, superuser_token_headers): + data = {{ + "currentPassword": settings.FIRST_SUPERUSER_PASSWORD.get_secret_value(), + "newPassword": settings.FIRST_SUPERUSER_PASSWORD.get_secret_value(), + }} + response = await test_client.patch( + "/users/me/password", + headers=superuser_token_headers, + json=data, + ) + assert response.status_code == 400 + updated_user = response.json() + assert updated_user["detail"] == "New password cannot be the same as the current one" + + +async def test_update_user(test_client, superuser_token_headers, test_db, test_user): + data = {{"fullName": "Updated_full_name"}} + response = await test_client.patch( + f"/users/{{test_user.id}}", + headers=superuser_token_headers, + json=data, + ) + assert response.status_code == 200 + updated_user = response.json() + + assert updated_user["fullName"] == "Updated_full_name" + + user_db = await user_services.get_user_by_email(pool=test_db.db_pool, email=test_user.email) + assert user_db + assert user_db.full_name == "Updated_full_name" + + +async def test_update_user_password(test_client, superuser_token_headers, test_user): + data = {{"password": "Test_password1"}} + response = await test_client.patch( + f"/users/{{test_user.id}}", + headers=superuser_token_headers, + json=data, + ) + + assert response.status_code == 200 + + +async def test_update_user_not_exists(test_client, superuser_token_headers): + data = {{"fullName": "Updated_full_name"}} + response = await test_client.patch( + f"/users/{{str(uuid4())}}", + headers=superuser_token_headers, + json=data, + ) + assert response.status_code == 404 + assert response.json()["detail"] == "The user with this id does not exist in the system" + + +async def test_update_user_email_exists(test_client, superuser_token_headers, test_db, test_cache): + username = random_email() + password = random_password() + full_name = random_lower_string() + username2 = random_email() + password2 = random_password() + full_name_2 = random_lower_string() + user = await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=username, + password=password, + full_name=full_name, + ), + ) + + user2 = await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=username2, + password=password2, + full_name=full_name_2, + ), + ) + + data = {{"email": user2.email}} + response = await test_client.patch( + f"/users/{{user.id}}", + headers=superuser_token_headers, + json=data, + ) + assert response.status_code == 409 + assert response.json()["detail"] == "User with this email already exists" + + +async def test_delete_user_me(test_client, test_db, test_cache): + username = random_email() + password = random_password() + full_name = random_lower_string() + user = await user_services.create_user( + pool=test_db.db_pool, + cache_client=test_cache.client, + user=UserCreate( + email=username, + password=password, + full_name=full_name, + ), + ) + user_id = user.id + + login_data = {{ + "username": username, + "password": password, + }} + response = await test_client.post("/login/access-token", data=login_data) + tokens = response.json() + access_token = tokens["access_token"] + headers = {{"Authorization": f"Bearer {{access_token}}"}} + + response = await test_client.delete( + "/users/me", + headers=headers, + ) + assert response.status_code == 204 + result = await user_services.get_user_by_id( + pool=test_db.db_pool, cache_client=test_cache.client, user_id=user_id + ) + assert result is None + + +async def test_delete_user_me_as_superuser(test_client, superuser_token_headers): + response = await test_client.delete( + "/users/me", + headers=superuser_token_headers, + ) + assert response.status_code == 400 + response = response.json() + assert response["detail"] == "Super users are not allowed to delete themselves" + + +async def test_delete_user_super_user( + test_client, superuser_token_headers, test_db, test_user, test_cache +): + user_id = test_user.id + response = await test_client.delete( + f"/users/{{user_id}}", + headers=superuser_token_headers, + ) + assert response.status_code == 200 + deleted_user = response.json() + assert deleted_user["message"] == "User deleted successfully" + result = await user_services.get_user_by_id( + pool=test_db.db_pool, cache_client=test_cache.client, user_id=user_id + ) + assert result is None + + +async def test_delete_user_not_found(test_client, superuser_token_headers): + response = await test_client.delete( + f"/users/{{str(uuid4())}}", + headers=superuser_token_headers, + ) + assert response.status_code == 404 + assert response.json()["detail"] == "User not found" + + +async def test_delete_user_current_super_user_error(test_client, superuser_token_headers, test_db): + super_user = await user_services.get_user_by_email( + pool=test_db.db_pool, email=settings.FIRST_SUPERUSER_EMAIL + ) + assert super_user + user_id = super_user.id + + response = await test_client.delete( + f"/users/{{user_id}}", + headers=superuser_token_headers, + ) + assert response.status_code == 403 + assert response.json()["detail"] == "Super users are not allowed to delete themselves" + + +async def test_delete_user_without_privileges(test_client, normal_user_token_headers, test_user): + response = await test_client.delete( + f"/users/{{test_user.id}}", + headers=normal_user_token_headers, + ) + assert response.status_code == 403 + assert response.json()["detail"] == "The user doesn't have enough privileges" +"# + ) +} + +pub fn save_user_routes_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/api/routes/test_users.py"); + let file_content = create_user_routes_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_version_route_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"from {module} import __version__ + + +async def test_read_version(test_client): + response = await test_client.get("version") + assert response.status_code == 200 + assert response.json()["version"] == __version__ +"# + ) +} + +pub fn save_version_route_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/api/routes/test_version.py"); + let file_content = create_version_route_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_user_services_cache_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"import pytest + +from {module}.services.cache.user_cache_services import delete_all_users_public +from {module}.services.db import user_services + + +@pytest.mark.usefixtures("test_user") +async def test_delete_all_users_public(test_db, test_cache): + await user_services.get_users_public(pool=test_db.db_pool, cache_client=test_cache.client) + + keys_before = [key async for key in test_cache.client.scan_iter("users:public:*")] + assert len(keys_before) > 0 + + await delete_all_users_public(cache_client=test_cache.client) + + keys_after = [key async for key in test_cache.client.scan_iter("users:public:*")] + assert len(keys_after) == 0 +"# + ) +} + +pub fn save_user_services_cache_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/services/cache/test_user_services.py"); + let file_content = create_user_services_cache_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_user_services_db_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"import pytest + +from {module}.services.db.user_services import get_user_public_by_email, get_users_public +from tests.utils import random_email + + +@pytest.mark.usefixtures("test_user") +async def test_get_users_public_cache(test_db, test_cache): + result = await get_users_public(pool=test_db.db_pool, cache_client=test_cache.client) + # retrieve again to hit cache + result_cache = await get_users_public(pool=test_db.db_pool, cache_client=test_cache.client) + + assert result == result_cache + + +async def test_get_user_public_by_email(test_db, test_user): + result = await get_user_public_by_email(pool=test_db.db_pool, email=test_user.email) + + assert result is not None + assert result.email == test_user.email + + +async def test_get_user_public_by_email_not_found(test_db): + result = await get_user_public_by_email(pool=test_db.db_pool, email=random_email()) + + assert result is None +"# + ) +} + +pub fn save_user_services_db_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/services/db/test_user_services.py"); + let file_content = create_user_services_db_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} + +fn create_main_test_file(project_info: &ProjectInfo) -> String { + let module = &project_info.module_name(); + + format!( + r#"import importlib +from unittest.mock import patch + +from fastapi.testclient import TestClient +from loguru import logger + +from {module} import main +from {module}.core.config import settings + + +async def test_http_exception_handler(test_client, normal_user_token_headers, caplog): + logger.add(caplog.handler, level="ERROR", format="{{message}}") + + with patch( + "{module}.services.db.user_services.get_user_by_id", + side_effect=Exception("Server crashed"), + ): + response = await test_client.get("users/me", headers=normal_user_token_headers) + + assert response.status_code == 500 + assert "Server crashed" in caplog.text + + +def test_cors_middleware_added(test_client): + with patch.object( + type(settings), + "all_cors_origins", + new=property(lambda _: ["https://example.com"]), + ): + importlib.reload(main) + app = main.app + client = TestClient(app) + + resp = client.options( + "/", + headers={{ + "Origin": "https://example.com", + "Access-Control-Request-Method": "GET", + "Access-Control-Request-Headers": "Authorization, Content-Type", + }}, + ) + + assert resp.status_code == 200 + assert resp.headers.get("access-control-allow-origin") == "https://example.com" +"# + ) +} + +pub fn save_main_test_file(project_info: &ProjectInfo) -> Result<()> { + let base = &project_info.base_dir(); + let file_path = base.join("tests/test_main.py"); + let file_content = create_main_test_file(project_info); + + save_file_with_content(&file_path, &file_content)?; + + Ok(()) +} diff --git a/src/file_manager.rs b/src/file_manager.rs index 57226e1d..a8872924 100644 --- a/src/file_manager.rs +++ b/src/file_manager.rs @@ -1,6 +1,4 @@ -use std::fs::File; -use std::io::prelude::*; -use std::path::PathBuf; +use std::{fs::File, io::prelude::*, path::PathBuf}; use anyhow::Result; @@ -14,7 +12,7 @@ pub fn save_file_with_content(file_path: &PathBuf, file_content: &str) -> Result } pub fn save_empty_src_file(project_info: &ProjectInfo, file_name: &str) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); + let module = project_info.module_name(); let file_path = project_info .base_dir() .join(format!("{}/{}", &module, file_name)); diff --git a/src/github_actions.rs b/src/github_actions.rs index f1c7efec..9993bebf 100644 --- a/src/github_actions.rs +++ b/src/github_actions.rs @@ -1,8 +1,8 @@ use anyhow::{bail, Result}; -use crate::file_manager::save_file_with_content; -use crate::project_info::{ - Day, DependabotSchedule, ProjectInfo, ProjectManager, Pyo3PythonManager, +use crate::{ + file_manager::save_file_with_content, + project_info::{Day, DependabotSchedule, ProjectInfo, ProjectManager, Pyo3PythonManager}, }; fn build_actions_python_test_versions(github_action_python_test_versions: &[String]) -> String { @@ -81,6 +81,101 @@ jobs: ) } +#[cfg(feature = "fastapi")] +fn create_poetry_ci_testing_fastapi_file( + source_dir: &str, + min_python_version: &str, + github_action_python_test_versions: &[String], +) -> String { + let python_versions = build_actions_python_test_versions(github_action_python_test_versions); + + format!( + r#"name: Testing + +on: + push: + branches: + - main + pull_request: +env: + PYTHON_VERSION: "{min_python_version}" + SECRET_KEY: "someKey" + PRODUCTION_MODE: false + FIRST_SUPERUSER_EMAIL: "some@email.com" + FIRST_SUPERUSER_PASSWORD: "somePassword1!" + FIRST_SUPERUSER_NAME: "Super User" + POSTGRES_HOST: "127.0.0.1" + POSTGRES_USER: "postgres" + POSTGRES_PASSWORD: "test_password" + POSTGRES_DB: "test_db" + VALKEY_HOST: "127.0.0.1" + VALKEY_PASSWORD: "test_password" + MEILISEARCH_HOST: http://127.0.0.1 + STACK_NAME: "test-stack" + DOMAIN: "127.0.0.1" + LOG_LEVEL: "DEBUG" + CI: true +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + - name: Install sqlx-cli + run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres + - name: Install Poetry + run: pipx install poetry + - name: Configure poetry + run: | + poetry config virtualenvs.create true + poetry config virtualenvs.in-project true + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: ${{{{ env.PYTHON_VERSION }}}} + cache: "poetry" + - name: Install Dependencies + run: poetry install + - name: Ruff format check + run: poetry run ruff format {source_dir} tests --check + - name: Lint with ruff + run: poetry run ruff check . + - name: mypy check + run: poetry run mypy . + testing: + strategy: + fail-fast: false + matrix: + python-version: [{python_versions}] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Poetry + run: pipx install poetry + - name: Configure poetry + run: | + poetry config virtualenvs.create true + poetry config virtualenvs.in-project true + - name: Set up Python ${{{{ matrix.python-version }}}} + uses: actions/setup-python@v6 + with: + python-version: ${{{{ matrix.python-version }}}} + cache: "poetry" + - name: Install Dependencies + run: poetry install + - name: make .env + run: touch .env + - name: Start docker containers + run: docker compose up db valkey migrations -d + - name: Test with pytest + run: poetry run pytest -n auto +"# + ) +} + fn create_setuptools_ci_testing_linux_only_file( source_dir: &str, min_python_version: &str, @@ -141,6 +236,93 @@ jobs: ) } +#[cfg(feature = "fastapi")] +fn create_setuptools_ci_testing_fastapi_file( + source_dir: &str, + min_python_version: &str, + github_action_python_test_versions: &[String], +) -> String { + let python_versions = build_actions_python_test_versions(github_action_python_test_versions); + + format!( + r#"name: Testing + +on: + push: + branches: + - main + pull_request: +env: + PYTHON_VERSION: "{min_python_version}" + SECRET_KEY: "someKey" + PRODUCTION_MODE: false + FIRST_SUPERUSER_EMAIL: "some@email.com" + FIRST_SUPERUSER_PASSWORD: "somePassword1!" + FIRST_SUPERUSER_NAME: "Super User" + POSTGRES_HOST: "127.0.0.1" + POSTGRES_USER: "postgres" + POSTGRES_PASSWORD: "test_password" + POSTGRES_DB: "test_db" + VALKEY_HOST: "127.0.0.1" + VALKEY_PASSWORD: "test_password" + MEILISEARCH_HOST: http://127.0.0.1 + STACK_NAME: "test-stack" + DOMAIN: "127.0.0.1" + LOG_LEVEL: "DEBUG" + CI: true +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: ${{{{ env.PYTHON_VERSION }}}} + cache: "pip" + - name: Install Dependencies + run: | + python -m pip install -U pip + python -m pip install -r requirements-dev.txt + - name: Ruff format check + run: ruff format {source_dir} tests --check + - name: Lint with ruff + run: ruff check . + - name: mypy check + run: mypy . + testing: + strategy: + fail-fast: false + matrix: + python-version: [{python_versions}] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + - name: Install sqlx-cli + run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres + - name: Set up Python ${{{{ matrix.python-version }}}} + uses: actions/setup-python@v6 + with: + python-version: ${{{{ matrix.python-version }}}} + cache: "pip" + - name: Install Dependencies + run: | + python -m pip install -U pip + python -m pip install -r requirements-dev.txt + - name: make .env + run: touch .env + - name: Start docker containers + run: docker compose up db valkey migrations -d + - name: Test with pytest + run: python -m pytest -n auto +"# + ) +} + fn create_uv_ci_testing_linux_only_file( source_dir: &str, min_python_version: &str, @@ -172,13 +354,245 @@ jobs: with: python-version: ${{{{ env.PYTHON_VERSION }}}} - name: Install Dependencies - run: uv sync --frozen + run: uv sync --frozen + - name: Ruff format check + run: uv run ruff format {source_dir} tests --check + - name: Lint with ruff + run: uv run ruff check . + - name: mypy check + run: uv run mypy . + testing: + strategy: + fail-fast: false + matrix: + python-version: [{python_versions}] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + - name: Set up Python ${{{{ matrix.python-version }}}} + uses: actions/setup-python@v6 + with: + python-version: ${{{{ matrix.python-version }}}} + - name: Install Dependencies + run: uv sync --frozen + - name: Test with pytest + run: uv run pytest +"# + ) +} + +#[cfg(feature = "fastapi")] +fn create_uv_ci_testing_fastapi_file( + source_dir: &str, + min_python_version: &str, + github_action_python_test_versions: &[String], +) -> String { + let python_versions = build_actions_python_test_versions(github_action_python_test_versions); + + format!( + r#"name: Testing + +on: + push: + branches: + - main + pull_request: +env: + PYTHON_VERSION: "{min_python_version}" + SECRET_KEY: "someKey" + PRODUCTION_MODE: false + FIRST_SUPERUSER_EMAIL: "some@email.com" + FIRST_SUPERUSER_PASSWORD: "somePassword1!" + FIRST_SUPERUSER_NAME: "Super User" + POSTGRES_HOST: "127.0.0.1" + POSTGRES_USER: "postgres" + POSTGRES_PASSWORD: "test_password" + POSTGRES_DB: "test_db" + VALKEY_HOST: "127.0.0.1" + VALKEY_PASSWORD: "test_password" + MEILISEARCH_HOST: http://127.0.0.1 + STACK_NAME: "test-stack" + DOMAIN: "127.0.0.1" + LOG_LEVEL: "DEBUG" + CI: true +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: ${{{{ env.PYTHON_VERSION }}}} + - name: Install Dependencies + run: uv sync --frozen + - name: Ruff format check + run: uv run ruff format {source_dir} tests --check + - name: Lint with ruff + run: uv run ruff check . + - name: mypy check + run: uv run mypy . + testing: + strategy: + fail-fast: false + matrix: + python-version: [{python_versions}] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + - name: Install sqlx-cli + run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + - name: Set up Python ${{{{ matrix.python-version }}}} + uses: actions/setup-python@v6 + with: + python-version: ${{{{ matrix.python-version }}}} + - name: Install Dependencies + run: uv sync --frozen + - name: make .env + run: touch .env + - name: Start docker containers + run: docker compose up db valkey migrations -d + - name: Test with pytest + run: uv run pytest -n auto +"# + ) +} + +fn create_pixi_ci_testing_linux_only_file( + min_python_version: &str, + github_action_python_test_versions: &[String], +) -> String { + let python_versions = build_actions_python_test_versions(github_action_python_test_versions); + + format!( + r#"name: Testing + +on: + push: + branches: + - main + pull_request: +env: + PYTHON_VERSION: "{min_python_version}" +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.30.0 + - name: Set up Python + run: pixi add python=="${{{{ env.PYTHON_VERSION }}}}.*" + - name: Ruff format check + run: pixi run run-ruff-format + - name: Lint with ruff + run: pixi run run-ruff-check + - name: mypy check + run: pixi run run-mypy + testing: + strategy: + fail-fast: false + matrix: + python-version: [{python_versions}] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.30.0 + - name: Set up Python ${{{{ matrix.python-version }}}} + run: pixi add python=="${{{{ matrix.python-version }}}}.*" + - name: Test with pytest + run: pixi run run-pytest +"# + ) +} + +fn create_ci_testing_linux_only_file_pyo3( + source_dir: &str, + min_python_version: &str, + github_action_python_test_versions: &[String], + pyo3_python_manager: &Pyo3PythonManager, +) -> String { + let python_versions = build_actions_python_test_versions(github_action_python_test_versions); + match pyo3_python_manager { + Pyo3PythonManager::Uv => format!( + r#"name: Testing + +on: + push: + branches: + - main + pull_request: +env: + CARGO_TERM_COLOR: always + RUST_BACKTRACE: 1 + RUSTFLAGS: "-D warnings" + PYTHON_VERSION: "{min_python_version}" +jobs: + clippy: + name: Clippy + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + - name: Run cargo clippy + run: cargo clippy --all-targets -- --deny warnings + fmt: + name: Rustfmt + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + - name: Run cargo fmt + run: cargo fmt --all -- --check + python-linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 + with: + enable-cache: true + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: ${{{{ env.PYTHON_VERSION }}}} + - name: Install Dependencies + run: | + uv sync --frozen + uv run maturin build - name: Ruff format check run: uv run ruff format {source_dir} tests --check - name: Lint with ruff run: uv run ruff check . - name: mypy check - run: uv run mypy . + run: uv run mypy {source_dir} tests testing: strategy: fail-fast: false @@ -196,21 +610,15 @@ jobs: with: python-version: ${{{{ matrix.python-version }}}} - name: Install Dependencies - run: uv sync --frozen + run: | + uv sync --frozen + uv run maturin build - name: Test with pytest run: uv run pytest "# - ) -} - -fn create_pixi_ci_testing_linux_only_file( - min_python_version: &str, - github_action_python_test_versions: &[String], -) -> String { - let python_versions = build_actions_python_test_versions(github_action_python_test_versions); - - format!( - r#"name: Testing + ), + Pyo3PythonManager::Setuptools => format!( + r#"name: Testing on: push: @@ -218,24 +626,54 @@ on: - main pull_request: env: + CARGO_TERM_COLOR: always + RUST_BACKTRACE: 1 + RUSTFLAGS: "-D warnings" PYTHON_VERSION: "{min_python_version}" jobs: - linting: + clippy: + name: Clippy + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + - name: Run cargo clippy + run: cargo clippy --all-targets -- --deny warnings + fmt: + name: Rustfmt + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + - name: Run cargo fmt + run: cargo fmt --all -- --check + python-linting: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Install Pixi - uses: prefix-dev/setup-pixi@v0.8.1 - with: - pixi-version: v0.30.0 - name: Set up Python - run: pixi add python=="${{{{ env.PYTHON_VERSION }}}}.*" + uses: actions/setup-python@v6 + with: + python-version: ${{{{ env.PYTHON_VERSION }}}} + cache: "pip" + - name: Install Dependencies + run: | + python -m pip install -U pip + python -m pip install -r requirements-dev.txt + python -m pip install -e . + maturin build --out dist - name: Ruff format check - run: pixi run run-ruff-format + run: ruff format {source_dir} tests --check - name: Lint with ruff - run: pixi run run-ruff-check + run: ruff check . - name: mypy check - run: pixi run run-mypy + run: mypy . testing: strategy: fail-fast: false @@ -244,19 +682,26 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Install Pixi - uses: prefix-dev/setup-pixi@v0.8.1 - with: - pixi-version: v0.30.0 - name: Set up Python ${{{{ matrix.python-version }}}} - run: pixi add python=="${{{{ matrix.python-version }}}}.*" + uses: actions/setup-python@v6 + with: + python-version: ${{{{ matrix.python-version }}}} + cache: "pip" + - name: Install Dependencies + run: | + python -m pip install -U pip + python -m pip install -r requirements-dev.txt + python -m pip install -e . + maturin build --out dist - name: Test with pytest - run: pixi run run-pytest + run: pytest "# - ) + ), + } } -fn create_ci_testing_linux_only_file_pyo3( +#[cfg(feature = "fastapi")] +fn create_ci_testing_fastapi_file_pyo3( source_dir: &str, min_python_version: &str, github_action_python_test_versions: &[String], @@ -277,6 +722,22 @@ env: RUST_BACKTRACE: 1 RUSTFLAGS: "-D warnings" PYTHON_VERSION: "{min_python_version}" + SECRET_KEY: "someKey" + PRODUCTION_MODE: false + FIRST_SUPERUSER_EMAIL: "some@email.com" + FIRST_SUPERUSER_PASSWORD: "somePassword1!" + FIRST_SUPERUSER_NAME: "Super User" + POSTGRES_HOST: "127.0.0.1" + POSTGRES_USER: "postgres" + POSTGRES_PASSWORD: "test_password" + POSTGRES_DB: "test_db" + VALKEY_HOST: "127.0.0.1" + VALKEY_PASSWORD: "test_password" + MEILISEARCH_HOST: http://127.0.0.1 + STACK_NAME: "test-stack" + DOMAIN: "127.0.0.1" + LOG_LEVEL: "DEBUG" + CI: true jobs: clippy: name: Clippy @@ -299,7 +760,7 @@ jobs: - name: Cache dependencies uses: Swatinem/rust-cache@v2 - name: Run cargo fmt - run: cargo fmt --all -- --check + run: cargo fmt --all -- --check python-linting: runs-on: ubuntu-latest steps: @@ -330,6 +791,12 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 + - name: Install sqlx-cli + run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres - name: Install uv uses: astral-sh/setup-uv@v6 with: @@ -342,8 +809,12 @@ jobs: run: | uv sync --frozen uv run maturin build + - name: make .env + run: touch .env + - name: Start docker containers + run: docker compose up db valkey migrations -d - name: Test with pytest - run: uv run pytest + run: uv run pytest -n auto "# ), Pyo3PythonManager::Setuptools => format!( @@ -359,6 +830,22 @@ env: RUST_BACKTRACE: 1 RUSTFLAGS: "-D warnings" PYTHON_VERSION: "{min_python_version}" + SECRET_KEY: "someKey" + PRODUCTION_MODE: false + FIRST_SUPERUSER_EMAIL: "some@email.com" + FIRST_SUPERUSER_PASSWORD: "somePassword1!" + FIRST_SUPERUSER_NAME: "Super User" + POSTGRES_HOST: "127.0.0.1" + POSTGRES_USER: "postgres" + POSTGRES_PASSWORD: "test_password" + POSTGRES_DB: "test_db" + VALKEY_HOST: "127.0.0.1" + VALKEY_PASSWORD: "test_password" + MEILISEARCH_HOST: http://127.0.0.1 + STACK_NAME: "test-stack" + DOMAIN: "127.0.0.1" + LOG_LEVEL: "DEBUG" + CI: true jobs: clippy: name: Clippy @@ -411,19 +898,29 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + - name: Cache dependencies + uses: Swatinem/rust-cache@v2 - name: Set up Python ${{{{ matrix.python-version }}}} uses: actions/setup-python@v6 with: python-version: ${{{{ matrix.python-version }}}} cache: "pip" + - name: Install sqlx-cli + run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres - name: Install Dependencies run: | python -m pip install -U pip python -m pip install -r requirements-dev.txt python -m pip install -e . maturin build --out dist + - name: make .env + run: touch .env + - name: Start docker containers + run: docker compose up db valkey migrations -d - name: Test with pytest - run: pytest + run: python -m pytest -n auto "# ), } @@ -436,6 +933,24 @@ pub fn save_ci_testing_linux_only_file(project_info: &ProjectInfo) -> Result<()> let content = match &project_info.project_manager { ProjectManager::Maturin => { if let Some(pyo3_python_manager) = &project_info.pyo3_python_manager { + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + create_ci_testing_fastapi_file_pyo3( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + pyo3_python_manager, + ) + } else { + create_ci_testing_linux_only_file_pyo3( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + pyo3_python_manager, + ) + } + + #[cfg(not(feature = "fastapi"))] create_ci_testing_linux_only_file_pyo3( &project_info.source_dir, &project_info.min_python_version, @@ -446,21 +961,75 @@ pub fn save_ci_testing_linux_only_file(project_info: &ProjectInfo) -> Result<()> bail!("A PyO3 Python manager is required for maturin"); } } - ProjectManager::Poetry => create_poetry_ci_testing_linux_only_file( - &project_info.source_dir, - &project_info.min_python_version, - &project_info.github_actions_python_test_versions, - ), - ProjectManager::Setuptools => create_setuptools_ci_testing_linux_only_file( - &project_info.source_dir, - &project_info.min_python_version, - &project_info.github_actions_python_test_versions, - ), - ProjectManager::Uv => create_uv_ci_testing_linux_only_file( - &project_info.source_dir, - &project_info.min_python_version, - &project_info.github_actions_python_test_versions, - ), + ProjectManager::Poetry => { + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + create_poetry_ci_testing_fastapi_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } else { + create_poetry_ci_testing_linux_only_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } + + #[cfg(not(feature = "fastapi"))] + create_poetry_ci_testing_linux_only_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } + ProjectManager::Setuptools => { + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + create_setuptools_ci_testing_fastapi_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } else { + create_setuptools_ci_testing_linux_only_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } + + #[cfg(not(feature = "fastapi"))] + create_setuptools_ci_testing_linux_only_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } + ProjectManager::Uv => { + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + create_uv_ci_testing_fastapi_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } else { + create_uv_ci_testing_linux_only_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } + + #[cfg(not(feature = "fastapi"))] + create_uv_ci_testing_linux_only_file( + &project_info.source_dir, + &project_info.min_python_version, + &project_info.github_actions_python_test_versions, + ) + } ProjectManager::Pixi => create_pixi_ci_testing_linux_only_file( &project_info.min_python_version, &project_info.github_actions_python_test_versions, @@ -1328,6 +1897,99 @@ jobs: ) } +#[cfg(feature = "fastapi")] +fn create_testing_deploy_file() -> String { + r#"name: Deploy to Testing +on: + push: + branches: + - main + workflow_dispatch: +jobs: + deploy: + runs-on: + - self-hosted + - testing + env: + ENVIRONMENT: testing + DOMAIN: ${{ secrets.DOMAIN_TESTING }} + STACK_NAME: ${{ secrets.STACK_NAME_TESTING }} + SECRET_KEY: ${{ secrets.SECRET_KEY }} + FIRST_SUPERUSER_EMAIL: ${{ secrets.FIRST_SUPERUSER_EMAIL }} + FIRST_SUPERUSER_PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }} + FIRST_SUPERUSER_NAME: ${{ secrets.FIRST_SUPERUSER_NAME }} + POSTGRES_HOST: ${{ secrets.POSTGRES_HOST }} + POSTGRES_USER: ${{ secrets.POSTGRES_USER }} + POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }} + POSTGRES_DB: ${{ secrets.POSTGRES_DB }} + VALKEY_HOST: ${{ secrets.VALKEY_HOST }} + VALKEY_PASSWORD: ${{ secrets.VALKEY_PASSWORD }} + HANDWRITING_OCR_TOKEN: ${{ secrets.HANDWRITING_OCR_TOKEN }} + USERNAME: ${{ secrets.FIRST_SUPERUSER_EMAIL }} + PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }} + EMAIL: ${{ secrets.FIRST_SUPERUSER_EMAIL }} + ERROR_NOTIFICATION_URL: ${{ secrets.ERROR_NOTIFICATION_URL_TESTING }} + LOG_LEVEL: "DEBUG" + steps: + - name: Fix permissions + run: | + if [ -d "./data" ]; then + sudo chown -R $USER:$USER ./data + fi + - name: Checkout + uses: actions/checkout@v5 + - name: Create .env file + run: | + HASHED_PASSWORD=$(openssl passwd -apr1 "${PASSWORD}" | sed 's/\$/\$\$/g') + cat > .env << EOF + ENVIRONMENT=${ENVIRONMENT} + DOMAIN=${DOMAIN} + STACK_NAME=${STACK_NAME} + SECRET_KEY=${SECRET_KEY} + FIRST_SUPERUSER_EMAIL=${FIRST_SUPERUSER_EMAIL} + FIRST_SUPERUSER_PASSWORD=${FIRST_SUPERUSER_PASSWORD} + FIRST_SUPERUSER_NAME=${FIRST_SUPERUSER_NAME} + POSTGRES_HOST=${POSTGRES_HOST} + POSTGRES_USER=${POSTGRES_USER} + POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + POSTGRES_DB=${POSTGRES_DB} + VALKEY_HOST=${VALKEY_HOST} + VALKEY_PASSWORD=${VALKEY_PASSWORD} + HANDWRITING_OCR_TOKEN=${HANDWRITING_OCR_TOKEN} + USERNAME=${FIRST_SUPERUSER_EMAIL} + PASSWORD=${FIRST_SUPERUSER_PASSWORD} + HASHED_PASSWORD=${HASHED_PASSWORD} + EMAIL=${FIRST_SUPERUSER_EMAIL} + ERROR_NOTIFICATION_URL=${ERROR_NOTIFICATION_URL} + LOG_LEVEL=${LOG_LEVEL} + EOF + - name: Build and restart containers + timeout-minutes: 15 + run: | + docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_TESTING }} build + docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_TESTING }} up -d +"#.to_string() +} + +#[cfg(feature = "fastapi")] +pub fn save_deploy_files(project_info: &ProjectInfo) -> Result<()> { + let testing_file_path = project_info + .base_dir() + .join(".github/workflows/deploy_testing.yml"); + let testing_content = create_testing_deploy_file(); + + save_file_with_content(&testing_file_path, &testing_content)?; + + let production_file_path = project_info + .base_dir() + .join(".github/workflows/deploy_production.yml"); + let production_content = create_testing_deploy_file(); + + save_file_with_content(&production_file_path, &production_content)?; + + Ok(()) +} + pub fn save_pypi_publish_file(project_info: &ProjectInfo) -> Result<()> { let file_path = project_info .base_dir() @@ -1562,6 +2224,9 @@ mod tests { use std::fs::create_dir_all; use tmp_path::tmp_path; + #[cfg(feature = "fastapi")] + use crate::project_info::DatabaseManager; + #[tmp_path] fn project_info_dummy() -> ProjectInfo { ProjectInfo { @@ -1597,6 +2262,12 @@ mod tests { docs_info: None, download_latest_packages: false, project_root_dir: Some(tmp_path), + + #[cfg(feature = "fastapi")] + is_fastapi_project: false, + + #[cfg(feature = "fastapi")] + database_manager: None, } } @@ -1641,6 +2312,25 @@ mod tests { assert_yaml_snapshot!(content); } + #[cfg(feature = "fastapi")] + #[test] + fn test_save_poetry_ci_testing_fastapi_file() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Poetry; + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(base.join(".github/workflows")).unwrap(); + let expected_file = base.join(".github/workflows/testing.yml"); + save_ci_testing_linux_only_file(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + #[test] fn test_save_ci_testing_linux_only_file_pyo3() { let mut project_info = project_info_dummy(); @@ -1657,6 +2347,25 @@ mod tests { assert_yaml_snapshot!(content); } + #[cfg(feature = "fastapi")] + #[test] + fn test_save_ci_testing_fastapi_file_pyo3() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Maturin; + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(base.join(".github/workflows")).unwrap(); + let expected_file = base.join(".github/workflows/testing.yml"); + save_ci_testing_linux_only_file(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + #[test] fn test_save_setuptools_ci_testing_linux_only_file() { let mut project_info = project_info_dummy(); @@ -1674,6 +2383,25 @@ mod tests { assert_yaml_snapshot!(content); } + #[cfg(feature = "fastapi")] + #[test] + fn test_save_setuptools_ci_fastapi_file() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Setuptools; + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(base.join(".github/workflows")).unwrap(); + let expected_file = base.join(".github/workflows/testing.yml"); + save_ci_testing_linux_only_file(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + #[test] fn test_save_uv_ci_testing_linux_only_file() { let mut project_info = project_info_dummy(); @@ -1691,6 +2419,25 @@ mod tests { assert_yaml_snapshot!(content); } + #[cfg(feature = "fastapi")] + #[test] + fn test_save_uv_ci_testing_fastapi_file() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Uv; + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(base.join(".github/workflows")).unwrap(); + let expected_file = base.join(".github/workflows/testing.yml"); + save_ci_testing_linux_only_file(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + #[test] fn test_save_pixi_ci_testing_linux_only_file() { let mut project_info = project_info_dummy(); @@ -2140,6 +2887,28 @@ mod tests { assert_yaml_snapshot!(content); } + #[cfg(feature = "fastapi")] + #[test] + fn test_save_deploy_files() { + let mut project_info = project_info_dummy(); + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(base.join(".github/workflows")).unwrap(); + let expected_test_file = base.join(".github/workflows/deploy_testing.yml"); + let expected_production_file = base.join(".github/workflows/deploy_production.yml"); + save_deploy_files(&project_info).unwrap(); + + assert!(expected_test_file.is_file()); + assert!(expected_production_file.is_file()); + + let test_content = std::fs::read_to_string(expected_test_file).unwrap(); + let production_content = std::fs::read_to_string(expected_production_file).unwrap(); + + assert_yaml_snapshot!(test_content); + assert_yaml_snapshot!(production_content); + } + #[test] fn test_save_docs_publish_file_pixi() { let mut project_info = project_info_dummy(); diff --git a/src/licenses.rs b/src/licenses.rs index 6a3c883c..ad7942a4 100644 --- a/src/licenses.rs +++ b/src/licenses.rs @@ -1,7 +1,9 @@ use anyhow::{bail, Result}; -use crate::file_manager::save_file_with_content; -use crate::project_info::{LicenseType, ProjectInfo}; +use crate::{ + file_manager::save_file_with_content, + project_info::{LicenseType, ProjectInfo}, +}; fn create_apache_license() -> String { r#" Apache License @@ -292,6 +294,12 @@ mod tests { docs_info: None, download_latest_packages: false, project_root_dir: Some(tmp_path), + + #[cfg(feature = "fastapi")] + is_fastapi_project: false, + + #[cfg(feature = "fastapi")] + database_manager: None, } } diff --git a/src/main.rs b/src/main.rs index a0d9b4f3..562e7a21 100644 --- a/src/main.rs +++ b/src/main.rs @@ -10,9 +10,10 @@ mod python_files; mod rust_files; mod utils; -use std::fs::remove_dir_all; -use std::process::exit; -use std::time::Duration; +#[cfg(feature = "fastapi")] +mod fastapi; + +use std::{fs::remove_dir_all, process::exit, time::Duration}; use anyhow::{Error, Result}; use clap::Parser; @@ -20,10 +21,17 @@ use cli::ApplicationOrLib; use colored::*; use indicatif::{ProgressBar, ProgressStyle}; -use crate::cli::{Args, BooleanChoice, Command, Param}; -use crate::config::Config; -use crate::project_generator::generate_project; -use crate::project_info::{get_project_info, ProjectInfo}; +use crate::{ + cli::{Args, BooleanChoice, Command, Param}, + config::Config, + project_generator::generate_project, + project_info::{get_project_info, ProjectInfo}, +}; + +#[cfg(feature = "fastapi")] +use crate::fastapi::{ + fastapi_files::generate_fastapi, fastapi_installer::install_fastapi_dependencies, +}; fn create(project_info: &ProjectInfo) -> Result<()> { generate_project(project_info)?; @@ -32,6 +40,12 @@ fn create(project_info: &ProjectInfo) -> Result<()> { .output() .expect("Failed to initialize git"); + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + install_fastapi_dependencies(project_info)?; + generate_fastapi(project_info)?; + } + Ok(()) } @@ -389,6 +403,63 @@ fn main() { exit(1); } } + + #[cfg(feature = "fastapi")] + Param::IsFastapiProject { value } => match value { + BooleanChoice::True => { + if let Err(e) = Config::default().save_is_fastapi_project(true) { + print_error(e); + exit(1); + } + } + BooleanChoice::False => { + if let Err(e) = Config::default().save_is_fastapi_project(false) { + print_error(e); + exit(1); + } + } + }, + + #[cfg(feature = "fastapi")] + Param::ResetIsFastapiProject => { + if let Err(e) = Config::default().reset_is_fastapi_project() { + print_error(e); + exit(1); + } + } + + #[cfg(feature = "fastapi")] + Param::Database { value } => { + if let Err(e) = Config::default().save_database(value) { + print_error(e); + exit(1); + } + } + + #[cfg(feature = "fastapi")] + Param::ResetDatabase => { + if let Err(e) = Config::default().reset_database() { + print_error(e); + exit(1); + } + } + + #[cfg(feature = "fastapi")] + Param::DatabaseManager { value } => { + if let Err(e) = Config::default().save_database_manager(value) { + print_error(e); + exit(1); + } + } + + #[cfg(feature = "fastapi")] + Param::ResetDatabaseManager => { + if let Err(e) = Config::default().reset_database_manager() { + print_error(e); + exit(1); + } + } + Param::Reset => { if Config::reset().is_err() { let message = "Error resetting config."; @@ -446,6 +517,12 @@ mod tests { docs_info: None, download_latest_packages: false, project_root_dir: Some(tmp_path), + + #[cfg(feature = "fastapi")] + is_fastapi_project: false, + + #[cfg(feature = "fastapi")] + database_manager: None, }; create_dir_all(&slug_dir).unwrap(); assert!(slug_dir.exists()); diff --git a/src/project_generator.rs b/src/project_generator.rs index 1c289248..cbc1bd71 100644 --- a/src/project_generator.rs +++ b/src/project_generator.rs @@ -2,27 +2,30 @@ use std::fs::create_dir_all; use anyhow::{bail, Result}; use colored::*; -use minijinja::render; use rayon::prelude::*; -use crate::file_manager::{save_empty_src_file, save_file_with_content}; -use crate::github_actions::{ - save_ci_testing_linux_only_file, save_ci_testing_multi_os_file, save_dependabot_file, - save_docs_publish_file, save_pypi_publish_file, save_release_drafter_file, +use crate::{ + file_manager::{save_empty_src_file, save_file_with_content}, + github_actions::{ + save_ci_testing_linux_only_file, save_ci_testing_multi_os_file, save_dependabot_file, + save_docs_publish_file, save_pypi_publish_file, save_release_drafter_file, + }, + licenses::{generate_license, license_str}, + package_version::{ + LatestVersion, PreCommitHook, PreCommitHookVersion, PythonPackage, PythonPackageVersion, + }, + project_info::{LicenseType, ProjectInfo, ProjectManager, Pyo3PythonManager}, + python_files::generate_python_files, + rust_files::{save_cargo_toml_file, save_lib_file}, + utils::is_python_312_or_greater, }; -use crate::licenses::{generate_license, license_str}; -use crate::package_version::{ - LatestVersion, PreCommitHook, PreCommitHookVersion, PythonPackage, PythonPackageVersion, -}; -use crate::project_info::{ProjectInfo, ProjectManager, Pyo3PythonManager}; -use crate::python_files::generate_python_files; -use crate::rust_files::{save_cargo_toml_file, save_lib_file}; -use crate::utils::is_python_312_or_greater; + +#[cfg(feature = "fastapi")] +use crate::github_actions::save_deploy_files; fn create_directories(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); let base = project_info.base_dir(); - let src = base.join(module); + let src = project_info.source_dir_path(); create_dir_all(src)?; let github_dir = base.join(".github/workflows"); @@ -254,7 +257,7 @@ fn create_pre_commit_file(download_latest_packages: bool) -> String { } PreCommitHook::Ruff => { let info = format!( - "\n - repo: {}\n rev: {}\n hooks:\n - id: ruff\n args: [--fix, --exit-non-zero-on-fix]\n - id: ruff-format", + "\n - repo: {}\n rev: {}\n hooks:\n - id: ruff-check\n args: [--fix, --exit-non-zero-on-fix]\n - id: ruff-format", hook.repo, hook.rev ); pre_commit_str.push_str(&info); @@ -466,160 +469,240 @@ fn build_latest_dev_dependencies(project_info: &ProjectInfo) -> Result { } fn create_pyproject_toml(project_info: &ProjectInfo) -> Result { - let module = project_info.source_dir.replace([' ', '-'], "_"); + let module = project_info.module_name(); + let min_python_version = &project_info.min_python_version; let pyupgrade_version = &project_info.min_python_version.replace(['.', '^'], ""); + let project_name = &module.replace('_', "-"); + let project_description = &project_info.project_description; + let creator = &project_info.creator; + let creator_email = &project_info.creator_email; + let version = &project_info.version; + let license = &project_info.license; let license_text = license_str(&project_info.license); + let dev_dependencies = build_latest_dev_dependencies(project_info)?; + let max_line_length = &project_info.max_line_length; + let include_docs = project_info.include_docs; + let mut pyproject = match &project_info.project_manager { ProjectManager::Maturin => { if let Some(pyo3_python_manager) = &project_info.pyo3_python_manager { match pyo3_python_manager { - Pyo3PythonManager::Uv => r#"[build-system] + Pyo3PythonManager::Uv => { + let mut pyproject = format!( + r#"[build-system] requires = ["maturin>=1.5,<2.0"] build-backend = "maturin" [project] -name = "{{ project_name }}" -description = "{{ project_description }}" +name = "{project_name}" +description = "{project_description}" authors = [ - { name = "{{ creator }}", email = "{{ creator_email }}" }, -] -{% if license != "NoLicense" -%} -license = { file = "LICENSE" } -{% endif -%} + {{ name = "{creator}", email = "{creator_email}" }}, +]"#, + ); + + if license != &LicenseType::NoLicense { + pyproject.push_str( + r#" +license = { file = "LICENSE" }"#, + ); + } + + pyproject.push_str(&format!( + r#" readme = "README.md" dynamic = ["version"] -requires-python = ">={{ min_python_version }}" +requires-python = ">={min_python_version}" dependencies = [] [dependency-groups] -dev = {{ dev_dependencies }} +dev = {dev_dependencies} [tool.maturin] -module-name = "{{ module }}._{{ module }}" +module-name = "{module}._{module}" binding = "pyo3" features = ["pyo3/extension-module"] -"# - .to_string(), - Pyo3PythonManager::Setuptools => r#"[build-system] +"#, + )); + pyproject + } + Pyo3PythonManager::Setuptools => { + let mut pyproject = format!( + r#"[build-system] requires = ["maturin>=1.5,<2.0"] build-backend = "maturin" [project] -name = "{{ project_name }}" -description = "{{ project_description }}" -authors = [{name = "{{ creator }}", email = "{{ creator_email }}"}] -{% if license != "NoLicense" -%} -license = "{{ license }}" -{% endif -%} +name = "{project_name}" +description = "{project_description}" +authors = [{{name = "{creator}", email = "{creator_email}"}}]"#, + ); + if license != &LicenseType::NoLicense { + pyproject.push_str(&format!( + r#" +license = "{license_text}""#, + )); + } + + pyproject.push_str(&format!( + r#" readme = "README.md" dynamic = ["version"] dependencies = [] [tool.maturin] -module-name = "{{ module }}._{{ module }}" +module-name = "{module}._{module}" binding = "pyo3" features = ["pyo3/extension-module"] -"# - .to_string(), +"#, + )); + pyproject + } } } else { bail!("A PyO3 Python manager is required for maturin projects"); } } - ProjectManager::Poetry => r#"[tool.poetry] -name = "{{ project_name }}" -version = "{{ version }}" -description = "{{ project_description }}" -authors = ["{{ creator }} <{{ creator_email }}>"] -{% if license != "NoLicense" -%} -license = "{{ license }}" -{% endif -%} -readme = "README.md" + ProjectManager::Poetry => { + let mut pyproject = format!( + r#"[tool.poetry] +name = "{project_name}" +version = "{version}" +description = "{project_description}" +authors = ["{creator} <{creator_email}>"] +"# + ); + + if license != &LicenseType::NoLicense { + pyproject.push_str(&format!( + "license = \"{license_text}\" +" + )); + } + + pyproject.push_str(&format!( + r#"readme = "README.md" [tool.poetry.dependencies] -python = "^{{ min_python_version }}" +python = "^{min_python_version}" [tool.poetry.group.dev.dependencies] -{{ dev_dependencies }} +{dev_dependencies} [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" "# - .to_string(), - ProjectManager::Setuptools => r#"[build-system] + )); + pyproject + } + ProjectManager::Setuptools => { + let mut pyproject = format!( + r#"[build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" [project] -name = "{{ project_name }}" -description = "{{ project_description }}" +name = "{project_name}" +description = "{project_description}" authors = [ - { name = "{{ creator }}", email = "{{ creator_email }}" } + {{ name = "{creator}", email = "{creator_email}" }} ] -{% if license != "NoLicense" -%} -license = { text = "{{ license }}" } -{% endif -%} -requires-python = ">={{ min_python_version }}" +"# + ); + + if license != &LicenseType::NoLicense { + pyproject.push_str(&format!( + "license = {{ text = \"{license_text}\" }} +" + )); + } + + pyproject.push_str(&format!( + r#"requires-python = ">={min_python_version}" dynamic = ["version", "readme"] dependencies = [] [tool.setuptools.dynamic] -version = {attr = "{{ module }}.__version__"} -readme = {file = ["README.md"]} +version = {{attr = "{module}.__version__"}} +readme = {{file = ["README.md"]}} [tool.setuptools.packages.find] -include = ["{{ module }}*"] +include = ["{module}*"] [tool.setuptools.package-data] -{{ module }} = ["py.typed"] +{module} = ["py.typed"] -"# - .to_string(), - ProjectManager::Uv => r#"[build-system] +"#, + )); + pyproject + } + ProjectManager::Uv => { + let mut pyproject = format!( + r#"[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] -name = "{{ project_name }}" -description = "{{ project_description }}" +name = "{project_name}" +description = "{project_description}" authors = [ - { name = "{{ creator }}", email = "{{ creator_email }}" } + {{ name = "{creator}", email = "{creator_email}" }} ] -{% if license != "NoLicense" -%} -license = { file = "LICENSE" } -{% endif -%} -readme = "README.md" -requires-python = ">={{ min_python_version }}" +"# + ); + + if license != &LicenseType::NoLicense { + pyproject.push_str( + "license = { file = \"LICENSE\" } +", + ); + } + + pyproject.push_str(&format!( + r#"readme = "README.md" +requires-python = ">={min_python_version}" dynamic = ["version"] dependencies = [] [dependency-groups] -dev = {{ dev_dependencies }} +dev = {dev_dependencies} [tool.hatch.version] -path = "{{ module }}/_version.py" +path = "{module}/_version.py" -"# - .to_string(), - ProjectManager::Pixi => r#"[build-system] +"#, + )); + pyproject + } + ProjectManager::Pixi => { + let mut pyproject = format!( + r#"[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] -name = "{{ project_name }}" -description = "{{ project_description }}" +name = "{project_name}" +description = "{project_description}" authors = [ - { name = "{{ creator }}", email = "{{ creator_email }}" } + {{ name = "{creator}", email = "{creator_email}" }} ] -{% if license != "NoLicense" -%} -license = { file = "LICENSE" } -{% endif -%} -readme = "README.md" -requires-python = ">={{ min_python_version }}" +"# + ); + + if license != &LicenseType::NoLicense { + pyproject.push_str( + "license = { file = \"LICENSE\" } +", + ); + } + + pyproject.push_str(&format!( + r#"readme = "README.md" +requires-python = ">={min_python_version}" dynamic = ["version"] dependencies = [] @@ -628,26 +711,36 @@ channels = ["conda-forge", "bioconda"] platforms = ["linux-64", "osx-arm64", "osx-64", "win-64"] [tool.pixi.feature.dev.tasks] -run-mypy = "mypy {{ module }} tests" -run-ruff-check = "ruff check {{ module }} tests" -run-ruff-format = "ruff format {{ module }} tests" +run-mypy = "mypy {module} tests" +run-ruff-check = "ruff check {module} tests" +run-ruff-format = "ruff format {module} tests" run-pytest = "pytest -x" -{% if include_docs -%} -run-deploy-docs = "mkdocs gh-deploy --force" -{%- endif %} +"#, + )); + + if include_docs { + pyproject.push_str( + "run-deploy-docs = \"mkdocs gh-deploy --force\" +", + ); + } + pyproject.push_str(&format!( + r#" [project.optional-dependencies] -dev = {{ dev_dependencies }} +dev = {dev_dependencies} [tool.pixi.environments] -default = {features = [], solve-group = "default"} -dev = {features = ["dev"], solve-group = "default"} +default = {{features = [], solve-group = "default"}} +dev = {{features = ["dev"], solve-group = "default"}} [tool.hatch.version] -path = "{{ module }}/_version.py" +path = "{module}/_version.py" -"# - .to_string(), +"#, + )); + pyproject + } }; pyproject.push_str( @@ -658,22 +751,45 @@ disallow_untyped_defs = true [[tool.mypy.overrides]] module = ["tests.*"] disallow_untyped_defs = false +"#, + ); + + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + pyproject.push_str( + r#" +[[tool.mypy.overrides]] +module = ["asyncpg.*"] +ignore_missing_imports = true +"#, + ); + } + pyproject.push_str(&format!( + r#" [tool.pytest.ini_options] minversion = "6.0" -addopts = "--cov={{ module }} --cov-report term-missing --no-cov-on-fail" -{%- if is_async_project %} -asyncio_mode = "auto" +addopts = "--cov={module} --cov-report term-missing --no-cov-on-fail" +"#, + )); + + if project_info.is_async_project { + pyproject.push_str( + r#"asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" asyncio_default_test_loop_scope = "function" -{%- endif %} +"#, + ); + } + pyproject.push_str(&format!( + r#" [tool.coverage.report] exclude_lines = ["if __name__ == .__main__.:", "pragma: no cover"] [tool.ruff] -line-length = {{ max_line_length }} -target-version = "py{{ pyupgrade_version }}" +line-length = {max_line_length} +target-version = "py{pyupgrade_version}" fix = true [tool.ruff.lint] @@ -688,10 +804,17 @@ select = [ "T203", # pprint found "RUF022", # Unsorted __all__ "RUF023", # Unforted __slots__ - {%- if is_async_project %} - "ASYNC", # flake8-async - {% endif %} -] +"#, + )); + + if project_info.is_async_project { + pyproject.push_str( + r#" "ASYNC", # flake8-async +"#, + ); + } + pyproject.push_str( + r#"] ignore=[ # Recommended ignores by ruff when using formatter "E501", @@ -710,27 +833,19 @@ ignore=[ "ISC001", "ISC002", ] - "#, ); - Ok(render!( - &pyproject, - project_name => module.replace('_', "-"), - version => project_info.version, - project_description => project_info.project_description, - creator => project_info.creator, - creator_email => project_info.creator_email, - license => license_text, - min_python_version => project_info.min_python_version, - dev_dependencies => build_latest_dev_dependencies(project_info)?, - max_line_length => project_info.max_line_length, - module => module, - is_application => project_info.is_application, - is_async_project => project_info.is_async_project, - include_docs => project_info.include_docs, - pyupgrade_version => pyupgrade_version, - )) + if project_info.project_manager == ProjectManager::Uv && project_info.is_application { + pyproject.push_str( + r#" +[tool.uv] +add-bounds = "exact" +"#, + ); + } + + Ok(pyproject) } fn save_pyproject_toml_file(project_info: &ProjectInfo) -> Result<()> { @@ -864,8 +979,11 @@ fn save_docs_css(project_info: &ProjectInfo) -> Result<()> { Ok(()) } -fn create_poetry_justfile(module: &str) -> String { - format!( +fn create_poetry_justfile(project_info: &ProjectInfo) -> String { + let module = project_info.module_name(); + + #[cfg_attr(not(feature = "fastapi"), allow(unused_mut))] + let mut justfile = format!( r#"@_default: just --list @@ -886,22 +1004,88 @@ fn create_poetry_justfile(module: &str) -> String { @ruff-format: poetry run ruff format {module} tests -@test *args="": - -poetry run pytest {{{{args}}}} - @install: poetry install + +@test *args="": + poetry run pytest {{{{args}}}} "# - ) + ); + + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + justfile.push_str( + r#" +granian_cmd := if os() != "windows" { + "poetry run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload" +} else { + "poetry run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload" +} + +@backend-server: + {{granian_cmd}} + +@test-parallel *args="": + poetry run pytest -n auto {{args}} + +@docker-up: + docker compose up --build + +@docker-up-detached: + docker compose up --build -d + +@docker-up-services: + docker compose up db valkey migrations + +@docker-up-services-detached: + docker compose up db valkey migrations -d + +@docker-down: + docker compose down + +@docker-down-volumes: + docker compose down --volumes + +@docker-pull: + docker compose pull db valkey migrations + +@docker-build: + docker compose build +"#, + ) + } + + justfile } -fn create_pyo3_justfile(module: &str, pyo3_python_manager: &Pyo3PythonManager) -> String { - match pyo3_python_manager { +fn create_pyo3_justfile(project_info: &ProjectInfo) -> Result { + let module = project_info.module_name(); + let pyo3_python_manager = if let Some(manager) = &project_info.pyo3_python_manager { + manager + } else { + bail!("A PyO3 Python manager is required for maturin"); + }; + #[cfg_attr(not(feature = "fastapi"), allow(unused_mut))] + let mut justfile = match pyo3_python_manager { Pyo3PythonManager::Uv => { - format!( + let mut file = format!( r#"@_default: just --list +@lint: + echo cargo check + just --justfile {{{{justfile()}}}} check + echo cargo clippy + just --justfile {{{{justfile()}}}} clippy + echo cargo fmt + just --justfile {{{{justfile()}}}} fmt + echo mypy + just --justfile {{{{justfile()}}}} mypy + echo ruff check + just --justfile {{{{justfile()}}}} ruff-check + echo ruff formatting + just --justfile {{{{justfile()}}}} ruff-format + @lock: uv lock @@ -920,20 +1104,6 @@ fn create_pyo3_justfile(module: &str, pyo3_python_manager: &Pyo3PythonManager) - @install-release: && develop-release uv sync --frozen --all-extras -@lint: - echo cargo check - just --justfile {{{{justfile()}}}} check - echo cargo clippy - just --justfile {{{{justfile()}}}} clippy - echo cargo fmt - just --justfile {{{{justfile()}}}} fmt - echo mypy - just --justfile {{{{justfile()}}}} mypy - echo ruff check - just --justfile {{{{justfile()}}}} ruff-check - echo ruff formatting - just --justfile {{{{justfile()}}}} ruff-format - @check: cargo check @@ -955,25 +1125,34 @@ fn create_pyo3_justfile(module: &str, pyo3_python_manager: &Pyo3PythonManager) - @test *args="": uv run pytest {{{{args}}}} "# - ) + ); + + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + file.push_str( + r#" +@test-parallel *args="": + uv run pytest -n auto {{args}} + +granian_cmd := if os() != "windows" { + "uv run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload" +} else { + "uv run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload" +} + +@backend-server: + {{granian_cmd}} +"#, + ); + } + + file } Pyo3PythonManager::Setuptools => { - format!( + let mut file = format!( r#"@_default: just --list -@develop: - maturin develop - -@develop-release: - maturin develop -r - -@install: && develop - python -m pip install -r requirements-dev.txt - -@install-release: && develop-release - python -m pip install -r requirements-dev.txt - @lint: echo cargo check just --justfile {{{{justfile()}}}} check @@ -988,6 +1167,18 @@ fn create_pyo3_justfile(module: &str, pyo3_python_manager: &Pyo3PythonManager) - echo ruff formatting just --justfile {{{{justfile()}}}} ruff-format +@develop: + python -m maturin develop + +@develop-release: + python -m maturin develop -r + +@install: && develop + python -m pip install -r requirements-dev.txt + +@install-release: && develop-release + python -m pip install -r requirements-dev.txt + @check: cargo check @@ -998,24 +1189,82 @@ fn create_pyo3_justfile(module: &str, pyo3_python_manager: &Pyo3PythonManager) - cargo fmt --all -- --check @mypy: - mypy {module} tests + python -m mypy {module} tests @ruff-check: - ruff check {module} tests --fix + python -m ruff check {module} tests --fix @ruff-format: - ruff format {module} tests + python -m ruff format {module} tests @test *arg="": - pytest {{{{args}}}} + python -m pytest {{{{args}}}} "# - ) + ); + + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + file.push_str( + r#" +@test-parallel *args="": + python -m pytest -n auto {{{{args}}}} + +granian_cmd := if os() != "windows" { + "python -m granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload" +} else { + "python -m granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload" +} + +@backend-server: + {{granian_cmd}} +"#, + ); + } + + file } + }; + + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + justfile.push_str( + r#" +@docker-up: + docker compose up --build + +@docker-up-detached: + docker compose up --build -d + +@docker-up-services: + docker compose up db valkey migrations + +@docker-up-services-detached: + docker compose up db valkey migrations -d + +@docker-down: + docker compose down + +@docker-down-volumes: + docker compose down --volumes + +@docker-pull: + docker compose pull db valkey migrations + +@docker-build: + docker compose build +}}}} +"#, + ) } + + Ok(justfile) } -fn create_setuptools_justfile(module: &str) -> String { - format!( +fn create_setuptools_justfile(project_info: &ProjectInfo) -> String { + let module = project_info.module_name(); + + #[cfg_attr(not(feature = "fastapi"), allow(unused_mut))] + let mut justfile = format!( r#"@_default: just --list @@ -1036,17 +1285,65 @@ fn create_setuptools_justfile(module: &str) -> String { @ruff-format: python -m ruff format {module} tests -@test *args="": - -python -m pytest {{{{args}}}} - @install: python -m pip install -r requirements-dev.txt + +@test *args="": + python -m pytest {{{{args}}}} "# - ) + ); + + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + justfile.push_str( + r#" +@test-parallel *args="": + python -m pytest -n auto {{args}} + +granian_cmd := if os() != "windows" { + "python -m granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload" +} else { + "python -m granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload" } -fn create_uv_justfile(module: &str) -> String { - format!( +@backend-server: + {{granian_cmd}} + +@docker-up: + docker compose up --build + +@docker-up-detached: + docker compose up --build -d + +@docker-up-services: + docker compose up db valkey migrations + +@docker-up-services-detached: + docker compose up db valkey migrations -d + +@docker-down: + docker compose down + +@docker-down-volumes: + docker compose down --volumes + +@docker-pull: + docker compose pull db valkey migrations + +@docker-build: + docker compose build +"#, + ) + } + + justfile +} + +fn create_uv_justfile(project_info: &ProjectInfo) -> String { + let module = project_info.module_name(); + + #[cfg_attr(not(feature = "fastapi"), allow(unused_mut))] + let mut justfile = format!( r#"@_default: just --list @@ -1067,9 +1364,6 @@ fn create_uv_justfile(module: &str) -> String { @ruff-format: uv run ruff format {module} tests -@test *args="": - -uv run pytest {{{{args}}}} - @lock: uv lock @@ -1078,8 +1372,56 @@ fn create_uv_justfile(module: &str) -> String { @install: uv sync --frozen --all-extras + +@test *args="": + uv run pytest {{{{args}}}} "# - ) + ); + + #[cfg(feature = "fastapi")] + if project_info.is_fastapi_project { + justfile.push_str( + r#" +@test-parallel *args="": + uv run pytest -n auto {{args}} + +granian_cmd := if os() != "windows" { + "uv run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload" +} else { + "uv run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload" +} + +@backend-server: + {{granian_cmd}} + +@docker-up: + docker compose up --build + +@docker-up-detached: + docker compose up --build -d + +@docker-up-services: + docker compose up db valkey migrations + +@docker-up-services-detached: + docker compose up db valkey migrations -d + +@docker-down: + docker compose down + +@docker-down-volumes: + docker compose down --volumes + +@docker-pull: + docker compose pull db valkey migrations + +@docker-build: + docker compose build +"#, + ) + } + + justfile } fn create_pixi_justfile() -> String { @@ -1104,7 +1446,7 @@ fn create_pixi_justfile() -> String { pixi run run-ruff-format @test: - -pixi run run-pytest + pixi run run-pytest @install: pixi install @@ -1113,19 +1455,12 @@ fn create_pixi_justfile() -> String { } fn save_justfile(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); let file_path = project_info.base_dir().join("justfile"); let content = match &project_info.project_manager { - ProjectManager::Poetry => create_poetry_justfile(&module), - ProjectManager::Maturin => { - if let Some(pyo3_python_manager) = &project_info.pyo3_python_manager { - create_pyo3_justfile(&module, pyo3_python_manager) - } else { - bail!("A PyO3 Python manager is required for maturin"); - } - } - ProjectManager::Setuptools => create_setuptools_justfile(&module), - ProjectManager::Uv => create_uv_justfile(&module), + ProjectManager::Poetry => create_poetry_justfile(project_info), + ProjectManager::Maturin => create_pyo3_justfile(project_info)?, + ProjectManager::Setuptools => create_setuptools_justfile(project_info), + ProjectManager::Uv => create_uv_justfile(project_info), ProjectManager::Pixi => create_pixi_justfile(), }; @@ -1215,6 +1550,16 @@ pub fn generate_project(project_info: &ProjectInfo) -> Result<()> { _ => (), } + #[cfg(feature = "fastapi")] + if project_info.use_continuous_deployment { + if project_info.is_fastapi_project && save_deploy_files(project_info).is_err() { + bail!("Error creating deploy files"); + } else if save_pypi_publish_file(project_info).is_err() { + bail!("Error creating PyPI publish file"); + } + } + + #[cfg(not(feature = "fastapi"))] if project_info.use_continuous_deployment && save_pypi_publish_file(project_info).is_err() { bail!("Error creating PyPI publish file"); } @@ -1267,6 +1612,9 @@ mod tests { use insta::assert_yaml_snapshot; use tmp_path::tmp_path; + #[cfg(feature = "fastapi")] + use crate::project_info::DatabaseManager; + #[tmp_path] fn project_info_dummy() -> ProjectInfo { ProjectInfo { @@ -1302,6 +1650,12 @@ mod tests { docs_info: None, download_latest_packages: false, project_root_dir: Some(tmp_path), + + #[cfg(feature = "fastapi")] + is_fastapi_project: false, + + #[cfg(feature = "fastapi")] + database_manager: None, } } @@ -1943,6 +2297,60 @@ mod tests { assert_yaml_snapshot!(content); } + #[cfg(feature = "fastapi")] + #[test] + fn test_save_justfile_poetry_fastapi_project() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Poetry; + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(&base).unwrap(); + let expected_file = base.join("justfile"); + save_justfile(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + + #[test] + fn test_save_justfile_uv() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Uv; + let base = project_info.base_dir(); + create_dir_all(&base).unwrap(); + let expected_file = base.join("justfile"); + save_justfile(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + + #[cfg(feature = "fastapi")] + #[test] + fn test_save_justfile_uv_fastapi_project() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Uv; + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(&base).unwrap(); + let expected_file = base.join("justfile"); + save_justfile(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + #[test] fn test_save_justfile_setuptools() { let mut project_info = project_info_dummy(); @@ -1959,6 +2367,25 @@ mod tests { assert_yaml_snapshot!(content); } + #[cfg(feature = "fastapi")] + #[test] + fn test_save_justfile_setuptools_fastapi_project() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Setuptools; + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(&base).unwrap(); + let expected_file = base.join("justfile"); + save_justfile(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + #[test] fn test_save_justfile_maturin() { let mut project_info = project_info_dummy(); @@ -1976,6 +2403,25 @@ mod tests { assert_yaml_snapshot!(content); } + #[cfg(feature = "fastapi")] + #[test] + fn test_save_justfile_maturin_fastapi_project() { + let mut project_info = project_info_dummy(); + project_info.project_manager = ProjectManager::Maturin; + project_info.is_fastapi_project = true; + project_info.database_manager = Some(DatabaseManager::AsyncPg); + let base = project_info.base_dir(); + create_dir_all(&base).unwrap(); + let expected_file = base.join("justfile"); + save_justfile(&project_info).unwrap(); + + assert!(expected_file.is_file()); + + let content = std::fs::read_to_string(expected_file).unwrap(); + + assert_yaml_snapshot!(content); + } + #[test] fn test_save_readme_file() { let project_info = project_info_dummy(); diff --git a/src/project_info.rs b/src/project_info.rs index 08296aae..c367f103 100644 --- a/src/project_info.rs +++ b/src/project_info.rs @@ -11,6 +11,9 @@ use time::OffsetDateTime; use crate::config::Config; +#[cfg(feature = "fastapi")] +use crate::utils::is_allowed_fastapi_python_version; + #[derive(Clone, Debug, Default, Deserialize, Serialize, ValueEnum, PartialEq, Eq)] pub enum DependabotSchedule { #[default] @@ -111,6 +114,40 @@ impl fmt::Display for ProjectManager { } } +#[cfg(feature = "fastapi")] +#[derive(Clone, Debug, Default, Deserialize, Serialize, ValueEnum, PartialEq, Eq)] +pub enum Database { + #[default] + Postgresql, +} + +#[cfg(feature = "fastapi")] +impl fmt::Display for Database { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::Postgresql => write!(f, "PostgreSQL"), + } + } +} + +#[cfg(feature = "fastapi")] +#[derive(Clone, Debug, Default, Deserialize, Serialize, ValueEnum, PartialEq, Eq)] +pub enum DatabaseManager { + #[default] + AsyncPg, + SqlAlchemy, +} + +#[cfg(feature = "fastapi")] +impl fmt::Display for DatabaseManager { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::AsyncPg => write!(f, "asyncpg"), + Self::SqlAlchemy => write!(f, "SQLAlchemy"), + } + } +} + struct Prompt { prompt_text: String, default: Option, @@ -182,6 +219,15 @@ pub struct ProjectInfo { pub docs_info: Option, pub download_latest_packages: bool, pub project_root_dir: Option, + + #[cfg(feature = "fastapi")] + pub is_fastapi_project: bool, + + // Note: For future use when other databases are supported + /* #[cfg(feature = "fastapi")] + pub database: Option, */ + #[cfg(feature = "fastapi")] + pub database_manager: Option, } impl ProjectInfo { @@ -191,6 +237,15 @@ impl ProjectInfo { None => PathBuf::from(&self.project_slug), } } + + pub fn module_name(&self) -> String { + self.source_dir.replace([' ', '-'], "_") + } + + pub fn source_dir_path(&self) -> PathBuf { + let base = self.base_dir(); + base.join(&self.source_dir) + } } /// `selected_default` is the value passed from the saved `default` values. default is used if @@ -284,7 +339,7 @@ fn dependabot_day_prompt(default: Option) -> Result> { None => "1".to_string(), }; let prompt_text = - "Dependabot Day\n 1 - Monday\n 2 - Tuesday\n 3 - Wednesday\n 4 - Thursday\n 5 - Friday\n 6 - Saturday\n 7 - Sunday\n Choose from[1, 2, 3, 4, 5, 6, 7]" + "Dependabot Day\n 1 - Monday\n 2 - Tuesday\n 3 - Wednesday\n 4 - Thursday\n 5 - Friday\n 6 - Saturday\n 7 - Sunday\n Choose from [1, 2, 3, 4, 5, 6, 7]" .to_string(); let prompt = Prompt { prompt_text, @@ -323,7 +378,7 @@ fn dependabot_schedule_prompt( None => "1".to_string(), }; let prompt_text = - "Dependabot Schedule\n 1 - Daily\n 2 - Weekly\n 3 - Monthly\n Choose from[1, 2, 3]" + "Dependabot Schedule\n 1 - Daily\n 2 - Weekly\n 3 - Monthly\n Choose from [1, 2, 3]" .to_string(); let prompt = Prompt { prompt_text, @@ -354,7 +409,7 @@ fn project_manager_prompt(default: Option) -> Result "poetry".to_string(), }; let prompt_text = - "Project Manager\n 1 - uv\n 2 - Poetry\n 3 - Maturin\n 4 - setuptools\n 5 - Pixi\n Choose from[1, 2, 3, 4, 5]" + "Project Manager\n 1 - uv\n 2 - Poetry\n 3 - Maturin\n 4 - setuptools\n 5 - Pixi\n Choose from [1, 2, 3, 4, 5]" .to_string(); let prompt = Prompt { prompt_text, @@ -386,7 +441,7 @@ fn pyo3_python_manager_prompt(default: Option) -> Result "Uv".to_string(), }; let prompt_text = - "PyO3 Python Manager\n 1 - uv\n 2 - setuptools\n Choose from[1, 2]".to_string(); + "PyO3 Python Manager\n 1 - uv\n 2 - setuptools\n Choose from [1, 2]".to_string(); let prompt = Prompt { prompt_text, default: Some(default_str), @@ -453,6 +508,32 @@ fn copyright_year_prompt(license: &LicenseType, default: Option) -> Resu Ok(input) } +/* #[cfg(feature = "fastapi")] +fn database_manager_prompt(default: Option) -> Result { + let default_str = match default { + Some(d) => match d { + DatabaseManager::AsyncPg => "1".to_string(), + DatabaseManager::SqlAlchemy => "2".to_string(), + }, + None => "AsyncPg".to_string(), + }; + let prompt_text = + "Database Manager\n 1 - asyncpg\n 2 - SQLAlchemy Choose from [1, 2]".to_string(); + let prompt = Prompt { + prompt_text, + default: Some(default_str), + }; + let input = prompt.show_prompt()?; + + if input == "1" { + Ok(DatabaseManager::AsyncPg) + } else if input == "2" || input.is_empty() { + Ok(DatabaseManager::SqlAlchemy) + } else { + bail!("Invalid selection"); + } +} */ + pub fn get_project_info(use_defaults: bool) -> Result { let config = Config::default().load_config(); let project_name = string_prompt("Project Name".to_string(), None)?; @@ -503,6 +584,15 @@ pub fn get_project_info(use_defaults: bool) -> Result { let default_version = "0.1.0".to_string(); let version = default_or_prompt_string("Version".to_string(), Some(default_version), use_defaults)?; + + #[cfg(feature = "fastapi")] + let is_fastapi_project = default_or_prompt_bool( + "FastAPI Project\n 1 - Yes\n 2 - No\n Choose from [1, 2]".to_string(), + config.is_fastapi_project, + false, + use_defaults, + )?; + let python_version_default = match config.python_version { Some(python) => python, None => "3.13".to_string(), @@ -513,16 +603,46 @@ pub fn get_project_info(use_defaults: bool) -> Result { python_version_prompt(python_version_default)? }; - let min_python_version_default = match config.min_python_version { - Some(python) => python, - None => "3.9".to_string(), + #[cfg(feature = "fastapi")] + if is_fastapi_project && !is_allowed_fastapi_python_version(&python_version)? { + bail!("The minimum supported Python version for FastAPI projects is 3.11"); + } + + let min_python_version_default = { + #[cfg(feature = "fastapi")] + { + if is_fastapi_project { + match config.min_python_version { + Some(python) => python, + None => "3.11".to_string(), + } + } else { + match config.min_python_version { + Some(python) => python, + None => "3.9".to_string(), + } + } + } + #[cfg(not(feature = "fastapi"))] + { + match config.min_python_version { + Some(python) => python, + None => "3.9".to_string(), + } + } }; + let min_python_version = if use_defaults { min_python_version_default } else { python_min_version_prompt(min_python_version_default)? }; + #[cfg(feature = "fastapi")] + if is_fastapi_project && !is_allowed_fastapi_python_version(&min_python_version)? { + bail!("The minimum supported Python version for FastAPI projects is 3.11"); + } + let github_actions_python_test_version_default = match config.github_actions_python_test_versions { Some(versions) => versions, @@ -543,13 +663,29 @@ pub fn get_project_info(use_defaults: bool) -> Result { versions } } else { - vec![ - "3.9".to_string(), - "3.10".to_string(), - "3.11".to_string(), - "3.12".to_string(), - "3.13".to_string(), - ] + #[cfg(feature = "fastapi")] + if is_fastapi_project { + vec!["3.11".to_string(), "3.12".to_string(), "3.13".to_string()] + } else { + vec![ + "3.9".to_string(), + "3.10".to_string(), + "3.11".to_string(), + "3.12".to_string(), + "3.13".to_string(), + ] + } + + #[cfg(not(feature = "fastapi"))] + { + vec![ + "3.9".to_string(), + "3.10".to_string(), + "3.11".to_string(), + "3.12".to_string(), + "3.13".to_string(), + ] + } } } }; @@ -566,6 +702,11 @@ pub fn get_project_info(use_defaults: bool) -> Result { project_manager_prompt(Some(default))? }; + #[cfg(feature = "fastapi")] + if is_fastapi_project && project_manager == ProjectManager::Pixi { + bail!("Pixi is not currently supported for FastAPI projects"); + } + let pyo3_python_manager = if project_manager == ProjectManager::Maturin { if use_defaults { if let Some(default) = config.pyo3_python_manager { @@ -582,6 +723,7 @@ pub fn get_project_info(use_defaults: bool) -> Result { None }; + #[cfg(not(feature = "fastapi"))] let is_application = default_or_prompt_bool( "Application or Library\n 1 - Application\n 2 - Library\n Choose from [1, 2]" .to_string(), @@ -589,12 +731,57 @@ pub fn get_project_info(use_defaults: bool) -> Result { true, use_defaults, )?; - let is_async_project = default_or_prompt_bool( - "Async Project\n 1 - Yes\n 2 - No\n Choose from [1, 2]".to_string(), - config.is_async_project, - false, - use_defaults, - )?; + + #[cfg(feature = "fastapi")] + let is_application = if is_fastapi_project { + true + } else { + default_or_prompt_bool( + "Application or Library\n 1 - Application\n 2 - Library\n Choose from [1, 2]" + .to_string(), + config.is_application, + true, + use_defaults, + )? + }; + + #[cfg(feature = "fastapi")] + let database_manager = Some(DatabaseManager::AsyncPg); + /* let database_manager = if is_fastapi_project { + if use_defaults { + Some(config.database_manager.unwrap_or_default()) + } else { + let default = config.database_manager.unwrap_or_default(); + Some(database_manager_prompt(Some(default))?) + } + } else { + None + }; */ + + let is_async_project = { + #[cfg(feature = "fastapi")] + { + if is_fastapi_project { + true + } else { + default_or_prompt_bool( + "Async Project\n 1 - Yes\n 2 - No\n Choose from [1, 2]".to_string(), + config.is_async_project, + false, + use_defaults, + )? + } + } + #[cfg(not(feature = "fastapi"))] + { + default_or_prompt_bool( + "Async Project\n 1 - Yes\n 2 - No\n Choose from [1, 2]".to_string(), + config.is_async_project, + false, + use_defaults, + )? + } + }; let max_line_length = if use_defaults { config.max_line_length.unwrap_or(100) @@ -641,6 +828,20 @@ pub fn get_project_info(use_defaults: bool) -> Result { true, use_defaults, )?; + + #[cfg(feature = "fastapi")] + let use_multi_os_ci = if is_fastapi_project { + false + } else { + default_or_prompt_bool( + "Use Multi OS CI\n 1 - Yes\n 2 - No\n Choose from [1, 2]".to_string(), + config.use_multi_os_ci, + true, + use_defaults, + )? + }; + + #[cfg(not(feature = "fastapi"))] let use_multi_os_ci = default_or_prompt_bool( "Use Multi OS CI\n 1 - Yes\n 2 - No\n Choose from [1, 2]".to_string(), config.use_multi_os_ci, @@ -702,6 +903,15 @@ pub fn get_project_info(use_defaults: bool) -> Result { docs_info, download_latest_packages: false, project_root_dir: None, + + #[cfg(feature = "fastapi")] + is_fastapi_project, + + // Note: For future use when other databases are supported + /* #[cfg(feature = "fastapi")] + database: Some(Database::Postgresql), */ + #[cfg(feature = "fastapi")] + database_manager, }) } diff --git a/src/python_files.rs b/src/python_files.rs index ce0ad899..aebc1cbb 100644 --- a/src/python_files.rs +++ b/src/python_files.rs @@ -2,9 +2,11 @@ use std::fs::File; use anyhow::{bail, Result}; -use crate::file_manager::save_file_with_content; -use crate::project_info::{ProjectInfo, ProjectManager}; -use crate::utils::is_python_312_or_greater; +use crate::{ + file_manager::save_file_with_content, + project_info::{ProjectInfo, ProjectManager}, + utils::is_python_312_or_greater, +}; fn create_dunder_main_file(module: &str, is_async_project: bool) -> String { let mut file = "from __future__ import annotations # pragma: no cover\n\n".to_string(); @@ -66,8 +68,8 @@ if __name__ == "__main__": } fn save_main_files(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); - let src = project_info.base_dir().join(&module); + let module = project_info.module_name(); + let src = project_info.source_dir_path(); let main = src.join("main.py"); let main_content = create_main_file(project_info.is_async_project); @@ -104,7 +106,7 @@ def test_main(): } fn save_main_test_file(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); + let module = project_info.module_name(); let file_path = project_info.base_dir().join("tests/test_main.py"); let content = create_main_test_file(&module, project_info.is_async_project); @@ -125,7 +127,7 @@ def test_sum_as_string(): } fn save_pyo3_test_file(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); + let module = project_info.module_name(); let file_path = project_info .base_dir() .join(format!("tests/test_{}.py", &module)); @@ -165,7 +167,7 @@ fn save_test_init_file(project_info: &ProjectInfo) -> Result<()> { } fn save_project_init_file(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); + let module = project_info.module_name(); let file_path = project_info .base_dir() .join(format!("{}/__init__.py", &module)); @@ -187,7 +189,7 @@ def sum_as_string(a: int, b: int) -> str: ... } pub fn save_pyi_file(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); + let module = project_info.module_name(); let file_path = project_info .base_dir() .join(format!("{}/_{}.pyi", &module, &module)); @@ -203,7 +205,7 @@ fn create_version_file(version: &str) -> String { } fn save_version_file(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); + let module = project_info.module_name(); let file_path = project_info .base_dir() .join(format!("{}/_version.py", &module)); @@ -267,7 +269,7 @@ else: } fn save_version_test_file(project_info: &ProjectInfo) -> Result<()> { - let module = project_info.source_dir.replace([' ', '-'], "_"); + let module = project_info.module_name(); let file_path = project_info.base_dir().join("tests/test_version.py"); let content = create_version_test_file( &module, @@ -291,6 +293,7 @@ pub fn generate_python_files(project_info: &ProjectInfo) -> Result<()> { bail!("Error creating __init__.py file"); } + #[cfg(not(feature = "fastapi"))] if project_info.is_application { if save_main_files(project_info).is_err() { bail!("Error creating main files"); @@ -301,6 +304,17 @@ pub fn generate_python_files(project_info: &ProjectInfo) -> Result<()> { } } + #[cfg(feature = "fastapi")] + if project_info.is_application && !project_info.is_fastapi_project { + if save_main_files(project_info).is_err() { + bail!("Error creating main files"); + } + + if save_main_test_file(project_info).is_err() { + bail!("Error creating main test file"); + } + } + if project_info.project_manager != ProjectManager::Maturin && save_version_file(project_info).is_err() { @@ -367,6 +381,12 @@ mod tests { docs_info: None, download_latest_packages: false, project_root_dir: Some(tmp_path), + + #[cfg(feature = "fastapi")] + is_fastapi_project: false, + + #[cfg(feature = "fastapi")] + database_manager: None, } } diff --git a/src/rust_files.rs b/src/rust_files.rs index 7e7b187e..de078186 100644 --- a/src/rust_files.rs +++ b/src/rust_files.rs @@ -2,10 +2,12 @@ use anyhow::Result; use colored::*; use rayon::prelude::*; -use crate::file_manager::save_file_with_content; -use crate::licenses::license_str; -use crate::package_version::{LatestVersion, RustPackageVersion}; -use crate::project_info::{LicenseType, ProjectInfo}; +use crate::{ + file_manager::save_file_with_content, + licenses::license_str, + package_version::{LatestVersion, RustPackageVersion}, + project_info::{LicenseType, ProjectInfo}, +}; fn build_latest_dependencies(download_latest_packages: bool) -> String { let mut version_string = String::new(); @@ -97,8 +99,8 @@ pub fn save_cargo_toml_file(project_info: &ProjectInfo) -> Result<()> { Ok(()) } -fn create_lib_file(source_dir: &str) -> String { - let module = source_dir.replace([' ', '-'], "_"); +fn create_lib_file(project_info: &ProjectInfo) -> String { + let module = project_info.module_name(); format!( r#"use pyo3::prelude::*; @@ -119,7 +121,7 @@ fn _{module}(_py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> {{ pub fn save_lib_file(project_info: &ProjectInfo) -> Result<()> { let file_path = project_info.base_dir().join("src/lib.rs"); - let content = create_lib_file(&project_info.source_dir); + let content = create_lib_file(project_info); save_file_with_content(&file_path, &content)?; @@ -169,6 +171,12 @@ mod tests { docs_info: None, download_latest_packages: false, project_root_dir: Some(tmp_path), + + #[cfg(feature = "fastapi")] + is_fastapi_project: false, + + #[cfg(feature = "fastapi")] + database_manager: None, } } diff --git a/src/snapshots/python_project__github_actions__tests__save_ci_testing_fastapi_file_pyo3.snap b/src/snapshots/python_project__github_actions__tests__save_ci_testing_fastapi_file_pyo3.snap new file mode 100644 index 00000000..81a01a0f --- /dev/null +++ b/src/snapshots/python_project__github_actions__tests__save_ci_testing_fastapi_file_pyo3.snap @@ -0,0 +1,5 @@ +--- +source: src/github_actions.rs +expression: content +--- +"name: Testing\n\non:\n push:\n branches:\n - main\n pull_request:\nenv:\n CARGO_TERM_COLOR: always\n RUST_BACKTRACE: 1\n RUSTFLAGS: \"-D warnings\"\n PYTHON_VERSION: \"3.9\"\n SECRET_KEY: \"someKey\"\n PRODUCTION_MODE: false\n FIRST_SUPERUSER_EMAIL: \"some@email.com\"\n FIRST_SUPERUSER_PASSWORD: \"somePassword1!\"\n FIRST_SUPERUSER_NAME: \"Super User\"\n POSTGRES_HOST: \"127.0.0.1\"\n POSTGRES_USER: \"postgres\"\n POSTGRES_PASSWORD: \"test_password\"\n POSTGRES_DB: \"test_db\"\n VALKEY_HOST: \"127.0.0.1\"\n VALKEY_PASSWORD: \"test_password\"\n MEILISEARCH_HOST: http://127.0.0.1\n STACK_NAME: \"test-stack\"\n DOMAIN: \"127.0.0.1\"\n LOG_LEVEL: \"DEBUG\"\n CI: true\njobs:\n clippy:\n name: Clippy\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install Rust\n uses: dtolnay/rust-toolchain@stable\n - name: Cache dependencies\n uses: Swatinem/rust-cache@v2\n - name: Run cargo clippy\n run: cargo clippy --all-targets -- --deny warnings\n fmt:\n name: Rustfmt\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install Rust\n uses: dtolnay/rust-toolchain@stable\n - name: Cache dependencies\n uses: Swatinem/rust-cache@v2\n - name: Run cargo fmt\n run: cargo fmt --all -- --check\n python-linting:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install uv\n uses: astral-sh/setup-uv@v6\n with:\n enable-cache: true\n - name: Set up Python\n uses: actions/setup-python@v6\n with:\n python-version: ${{ env.PYTHON_VERSION }}\n - name: Install Dependencies\n run: |\n uv sync --frozen\n uv run maturin build\n - name: Ruff format check\n run: uv run ruff format my_project tests --check\n - name: Lint with ruff\n run: uv run ruff check .\n - name: mypy check\n run: uv run mypy my_project tests\n testing:\n strategy:\n fail-fast: false\n matrix:\n python-version: [\"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install Rust\n uses: dtolnay/rust-toolchain@stable\n - name: Cache dependencies\n uses: Swatinem/rust-cache@v2\n - name: Install sqlx-cli\n run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres\n - name: Install uv\n uses: astral-sh/setup-uv@v6\n with:\n enable-cache: true\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v6\n with:\n python-version: ${{ matrix.python-version }}\n - name: Install Dependencies\n run: |\n uv sync --frozen\n uv run maturin build\n - name: make .env\n run: touch .env\n - name: Start docker containers\n run: docker compose up db valkey migrations -d\n - name: Test with pytest\n run: uv run pytest -n auto\n" diff --git a/src/snapshots/python_project__github_actions__tests__save_deploy_files-2.snap b/src/snapshots/python_project__github_actions__tests__save_deploy_files-2.snap new file mode 100644 index 00000000..c3e16eb6 --- /dev/null +++ b/src/snapshots/python_project__github_actions__tests__save_deploy_files-2.snap @@ -0,0 +1,5 @@ +--- +source: src/github_actions.rs +expression: production_content +--- +"name: Deploy to Testing\non:\n push:\n branches:\n - main\n workflow_dispatch:\njobs:\n deploy:\n runs-on:\n - self-hosted\n - testing\n env:\n ENVIRONMENT: testing\n DOMAIN: ${{ secrets.DOMAIN_TESTING }}\n STACK_NAME: ${{ secrets.STACK_NAME_TESTING }}\n SECRET_KEY: ${{ secrets.SECRET_KEY }}\n FIRST_SUPERUSER_EMAIL: ${{ secrets.FIRST_SUPERUSER_EMAIL }}\n FIRST_SUPERUSER_PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }}\n FIRST_SUPERUSER_NAME: ${{ secrets.FIRST_SUPERUSER_NAME }}\n POSTGRES_HOST: ${{ secrets.POSTGRES_HOST }}\n POSTGRES_USER: ${{ secrets.POSTGRES_USER }}\n POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}\n POSTGRES_DB: ${{ secrets.POSTGRES_DB }}\n VALKEY_HOST: ${{ secrets.VALKEY_HOST }}\n VALKEY_PASSWORD: ${{ secrets.VALKEY_PASSWORD }}\n HANDWRITING_OCR_TOKEN: ${{ secrets.HANDWRITING_OCR_TOKEN }}\n USERNAME: ${{ secrets.FIRST_SUPERUSER_EMAIL }}\n PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }}\n EMAIL: ${{ secrets.FIRST_SUPERUSER_EMAIL }}\n ERROR_NOTIFICATION_URL: ${{ secrets.ERROR_NOTIFICATION_URL_TESTING }}\n LOG_LEVEL: \"DEBUG\"\n steps:\n - name: Fix permissions\n run: |\n if [ -d \"./data\" ]; then\n sudo chown -R $USER:$USER ./data\n fi\n - name: Checkout\n uses: actions/checkout@v5\n - name: Create .env file\n run: |\n HASHED_PASSWORD=$(openssl passwd -apr1 \"${PASSWORD}\" | sed 's/\\$/\\$\\$/g')\n cat > .env << EOF\n ENVIRONMENT=${ENVIRONMENT}\n DOMAIN=${DOMAIN}\n STACK_NAME=${STACK_NAME}\n SECRET_KEY=${SECRET_KEY}\n FIRST_SUPERUSER_EMAIL=${FIRST_SUPERUSER_EMAIL}\n FIRST_SUPERUSER_PASSWORD=${FIRST_SUPERUSER_PASSWORD}\n FIRST_SUPERUSER_NAME=${FIRST_SUPERUSER_NAME}\n POSTGRES_HOST=${POSTGRES_HOST}\n POSTGRES_USER=${POSTGRES_USER}\n POSTGRES_PASSWORD=${POSTGRES_PASSWORD}\n POSTGRES_DB=${POSTGRES_DB}\n VALKEY_HOST=${VALKEY_HOST}\n VALKEY_PASSWORD=${VALKEY_PASSWORD}\n HANDWRITING_OCR_TOKEN=${HANDWRITING_OCR_TOKEN}\n USERNAME=${FIRST_SUPERUSER_EMAIL}\n PASSWORD=${FIRST_SUPERUSER_PASSWORD}\n HASHED_PASSWORD=${HASHED_PASSWORD}\n EMAIL=${FIRST_SUPERUSER_EMAIL}\n ERROR_NOTIFICATION_URL=${ERROR_NOTIFICATION_URL}\n LOG_LEVEL=${LOG_LEVEL}\n EOF\n - name: Build and restart containers\n timeout-minutes: 15\n run: |\n docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_TESTING }} build\n docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_TESTING }} up -d\n" diff --git a/src/snapshots/python_project__github_actions__tests__save_deploy_files.snap b/src/snapshots/python_project__github_actions__tests__save_deploy_files.snap new file mode 100644 index 00000000..9df9332c --- /dev/null +++ b/src/snapshots/python_project__github_actions__tests__save_deploy_files.snap @@ -0,0 +1,5 @@ +--- +source: src/github_actions.rs +expression: test_content +--- +"name: Deploy to Testing\non:\n push:\n branches:\n - main\n workflow_dispatch:\njobs:\n deploy:\n runs-on:\n - self-hosted\n - testing\n env:\n ENVIRONMENT: testing\n DOMAIN: ${{ secrets.DOMAIN_TESTING }}\n STACK_NAME: ${{ secrets.STACK_NAME_TESTING }}\n SECRET_KEY: ${{ secrets.SECRET_KEY }}\n FIRST_SUPERUSER_EMAIL: ${{ secrets.FIRST_SUPERUSER_EMAIL }}\n FIRST_SUPERUSER_PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }}\n FIRST_SUPERUSER_NAME: ${{ secrets.FIRST_SUPERUSER_NAME }}\n POSTGRES_HOST: ${{ secrets.POSTGRES_HOST }}\n POSTGRES_USER: ${{ secrets.POSTGRES_USER }}\n POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }}\n POSTGRES_DB: ${{ secrets.POSTGRES_DB }}\n VALKEY_HOST: ${{ secrets.VALKEY_HOST }}\n VALKEY_PASSWORD: ${{ secrets.VALKEY_PASSWORD }}\n HANDWRITING_OCR_TOKEN: ${{ secrets.HANDWRITING_OCR_TOKEN }}\n USERNAME: ${{ secrets.FIRST_SUPERUSER_EMAIL }}\n PASSWORD: ${{ secrets.FIRST_SUPERUSER_PASSWORD }}\n EMAIL: ${{ secrets.FIRST_SUPERUSER_EMAIL }}\n ERROR_NOTIFICATION_URL: ${{ secrets.ERROR_NOTIFICATION_URL_TESTING }}\n LOG_LEVEL: \"DEBUG\"\n steps:\n - name: Fix permissions\n run: |\n if [ -d \"./data\" ]; then\n sudo chown -R $USER:$USER ./data\n fi\n - name: Checkout\n uses: actions/checkout@v5\n - name: Create .env file\n run: |\n HASHED_PASSWORD=$(openssl passwd -apr1 \"${PASSWORD}\" | sed 's/\\$/\\$\\$/g')\n cat > .env << EOF\n ENVIRONMENT=${ENVIRONMENT}\n DOMAIN=${DOMAIN}\n STACK_NAME=${STACK_NAME}\n SECRET_KEY=${SECRET_KEY}\n FIRST_SUPERUSER_EMAIL=${FIRST_SUPERUSER_EMAIL}\n FIRST_SUPERUSER_PASSWORD=${FIRST_SUPERUSER_PASSWORD}\n FIRST_SUPERUSER_NAME=${FIRST_SUPERUSER_NAME}\n POSTGRES_HOST=${POSTGRES_HOST}\n POSTGRES_USER=${POSTGRES_USER}\n POSTGRES_PASSWORD=${POSTGRES_PASSWORD}\n POSTGRES_DB=${POSTGRES_DB}\n VALKEY_HOST=${VALKEY_HOST}\n VALKEY_PASSWORD=${VALKEY_PASSWORD}\n HANDWRITING_OCR_TOKEN=${HANDWRITING_OCR_TOKEN}\n USERNAME=${FIRST_SUPERUSER_EMAIL}\n PASSWORD=${FIRST_SUPERUSER_PASSWORD}\n HASHED_PASSWORD=${HASHED_PASSWORD}\n EMAIL=${FIRST_SUPERUSER_EMAIL}\n ERROR_NOTIFICATION_URL=${ERROR_NOTIFICATION_URL}\n LOG_LEVEL=${LOG_LEVEL}\n EOF\n - name: Build and restart containers\n timeout-minutes: 15\n run: |\n docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_TESTING }} build\n docker compose -f docker-compose.yml --project-name ${{ secrets.STACK_NAME_TESTING }} up -d\n" diff --git a/src/snapshots/python_project__github_actions__tests__save_poetry_ci_testing_fastapi_file.snap b/src/snapshots/python_project__github_actions__tests__save_poetry_ci_testing_fastapi_file.snap new file mode 100644 index 00000000..4d48557a --- /dev/null +++ b/src/snapshots/python_project__github_actions__tests__save_poetry_ci_testing_fastapi_file.snap @@ -0,0 +1,5 @@ +--- +source: src/github_actions.rs +expression: content +--- +"name: Testing\n\non:\n push:\n branches:\n - main\n pull_request:\nenv:\n PYTHON_VERSION: \"3.9\"\n SECRET_KEY: \"someKey\"\n PRODUCTION_MODE: false\n FIRST_SUPERUSER_EMAIL: \"some@email.com\"\n FIRST_SUPERUSER_PASSWORD: \"somePassword1!\"\n FIRST_SUPERUSER_NAME: \"Super User\"\n POSTGRES_HOST: \"127.0.0.1\"\n POSTGRES_USER: \"postgres\"\n POSTGRES_PASSWORD: \"test_password\"\n POSTGRES_DB: \"test_db\"\n VALKEY_HOST: \"127.0.0.1\"\n VALKEY_PASSWORD: \"test_password\"\n MEILISEARCH_HOST: http://127.0.0.1\n STACK_NAME: \"test-stack\"\n DOMAIN: \"127.0.0.1\"\n LOG_LEVEL: \"DEBUG\"\n CI: true\njobs:\n linting:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install Rust\n uses: dtolnay/rust-toolchain@stable\n - name: Cache dependencies\n uses: Swatinem/rust-cache@v2\n - name: Install sqlx-cli\n run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres\n - name: Install Poetry\n run: pipx install poetry\n - name: Configure poetry\n run: |\n poetry config virtualenvs.create true\n poetry config virtualenvs.in-project true\n - name: Set up Python\n uses: actions/setup-python@v6\n with:\n python-version: ${{ env.PYTHON_VERSION }}\n cache: \"poetry\"\n - name: Install Dependencies\n run: poetry install\n - name: Ruff format check\n run: poetry run ruff format my_project tests --check\n - name: Lint with ruff\n run: poetry run ruff check .\n - name: mypy check\n run: poetry run mypy .\n testing:\n strategy:\n fail-fast: false\n matrix:\n python-version: [\"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install Poetry\n run: pipx install poetry\n - name: Configure poetry\n run: |\n poetry config virtualenvs.create true\n poetry config virtualenvs.in-project true\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v6\n with:\n python-version: ${{ matrix.python-version }}\n cache: \"poetry\"\n - name: Install Dependencies\n run: poetry install\n - name: make .env\n run: touch .env\n - name: Start docker containers\n run: docker compose up db valkey migrations -d\n - name: Test with pytest\n run: poetry run pytest -n auto\n" diff --git a/src/snapshots/python_project__github_actions__tests__save_setuptools_ci_fastapi_file.snap b/src/snapshots/python_project__github_actions__tests__save_setuptools_ci_fastapi_file.snap new file mode 100644 index 00000000..8a913e2e --- /dev/null +++ b/src/snapshots/python_project__github_actions__tests__save_setuptools_ci_fastapi_file.snap @@ -0,0 +1,5 @@ +--- +source: src/github_actions.rs +expression: content +--- +"name: Testing\n\non:\n push:\n branches:\n - main\n pull_request:\nenv:\n PYTHON_VERSION: \"3.9\"\n SECRET_KEY: \"someKey\"\n PRODUCTION_MODE: false\n FIRST_SUPERUSER_EMAIL: \"some@email.com\"\n FIRST_SUPERUSER_PASSWORD: \"somePassword1!\"\n FIRST_SUPERUSER_NAME: \"Super User\"\n POSTGRES_HOST: \"127.0.0.1\"\n POSTGRES_USER: \"postgres\"\n POSTGRES_PASSWORD: \"test_password\"\n POSTGRES_DB: \"test_db\"\n VALKEY_HOST: \"127.0.0.1\"\n VALKEY_PASSWORD: \"test_password\"\n MEILISEARCH_HOST: http://127.0.0.1\n STACK_NAME: \"test-stack\"\n DOMAIN: \"127.0.0.1\"\n LOG_LEVEL: \"DEBUG\"\n CI: true\njobs:\n linting:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Set up Python\n uses: actions/setup-python@v6\n with:\n python-version: ${{ env.PYTHON_VERSION }}\n cache: \"pip\"\n - name: Install Dependencies\n run: |\n python -m pip install -U pip\n python -m pip install -r requirements-dev.txt\n - name: Ruff format check\n run: ruff format my_project tests --check\n - name: Lint with ruff\n run: ruff check .\n - name: mypy check\n run: mypy .\n testing:\n strategy:\n fail-fast: false\n matrix:\n python-version: [\"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install Rust\n uses: dtolnay/rust-toolchain@stable\n - name: Cache dependencies\n uses: Swatinem/rust-cache@v2\n - name: Install sqlx-cli\n run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v6\n with:\n python-version: ${{ matrix.python-version }}\n cache: \"pip\"\n - name: Install Dependencies\n run: |\n python -m pip install -U pip\n python -m pip install -r requirements-dev.txt\n - name: make .env\n run: touch .env\n - name: Start docker containers\n run: docker compose up db valkey migrations -d\n - name: Test with pytest\n run: python -m pytest -n auto\n" diff --git a/src/snapshots/python_project__github_actions__tests__save_uv_ci_testing_fastapi_file.snap b/src/snapshots/python_project__github_actions__tests__save_uv_ci_testing_fastapi_file.snap new file mode 100644 index 00000000..87965179 --- /dev/null +++ b/src/snapshots/python_project__github_actions__tests__save_uv_ci_testing_fastapi_file.snap @@ -0,0 +1,5 @@ +--- +source: src/github_actions.rs +expression: content +--- +"name: Testing\n\non:\n push:\n branches:\n - main\n pull_request:\nenv:\n PYTHON_VERSION: \"3.9\"\n SECRET_KEY: \"someKey\"\n PRODUCTION_MODE: false\n FIRST_SUPERUSER_EMAIL: \"some@email.com\"\n FIRST_SUPERUSER_PASSWORD: \"somePassword1!\"\n FIRST_SUPERUSER_NAME: \"Super User\"\n POSTGRES_HOST: \"127.0.0.1\"\n POSTGRES_USER: \"postgres\"\n POSTGRES_PASSWORD: \"test_password\"\n POSTGRES_DB: \"test_db\"\n VALKEY_HOST: \"127.0.0.1\"\n VALKEY_PASSWORD: \"test_password\"\n MEILISEARCH_HOST: http://127.0.0.1\n STACK_NAME: \"test-stack\"\n DOMAIN: \"127.0.0.1\"\n LOG_LEVEL: \"DEBUG\"\n CI: true\njobs:\n linting:\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install uv\n uses: astral-sh/setup-uv@v6\n with:\n enable-cache: true\n - name: Set up Python\n uses: actions/setup-python@v6\n with:\n python-version: ${{ env.PYTHON_VERSION }}\n - name: Install Dependencies\n run: uv sync --frozen\n - name: Ruff format check\n run: uv run ruff format my_project tests --check\n - name: Lint with ruff\n run: uv run ruff check .\n - name: mypy check\n run: uv run mypy .\n testing:\n strategy:\n fail-fast: false\n matrix:\n python-version: [\"3.9\", \"3.10\", \"3.11\", \"3.12\"]\n runs-on: ubuntu-latest\n steps:\n - uses: actions/checkout@v5\n - name: Install Rust\n uses: dtolnay/rust-toolchain@stable\n - name: Cache dependencies\n uses: Swatinem/rust-cache@v2\n - name: Install sqlx-cli\n run: cargo install sqlx-cli --no-default-features -F native-tls -F postgres\n - name: Install uv\n uses: astral-sh/setup-uv@v6\n with:\n enable-cache: true\n - name: Set up Python ${{ matrix.python-version }}\n uses: actions/setup-python@v6\n with:\n python-version: ${{ matrix.python-version }}\n - name: Install Dependencies\n run: uv sync --frozen\n - name: make .env\n run: touch .env\n - name: Start docker containers\n run: docker compose up db valkey migrations -d\n - name: Test with pytest\n run: uv run pytest -n auto\n" diff --git a/src/snapshots/python_project__project_generator__tests__create_pixi_pyproject_toml_mit_lib.snap b/src/snapshots/python_project__project_generator__tests__create_pixi_pyproject_toml_mit_lib.snap index 87f65d45..8972b74e 100644 --- a/src/snapshots/python_project__project_generator__tests__create_pixi_pyproject_toml_mit_lib.snap +++ b/src/snapshots/python_project__project_generator__tests__create_pixi_pyproject_toml_mit_lib.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- -"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[tool.pixi.project]\nchannels = [\"conda-forge\", \"bioconda\"]\nplatforms = [\"linux-64\", \"osx-arm64\", \"osx-64\", \"win-64\"]\n\n[tool.pixi.feature.dev.tasks]\nrun-mypy = \"mypy my_project tests\"\nrun-ruff-check = \"ruff check my_project tests\"\nrun-ruff-format = \"ruff format my_project tests\"\nrun-pytest = \"pytest -x\"\n\n\n[project.optional-dependencies]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.pixi.environments]\ndefault = {features = [], solve-group = \"default\"}\ndev = {features = [\"dev\"], solve-group = \"default\"}\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" +"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[tool.pixi.project]\nchannels = [\"conda-forge\", \"bioconda\"]\nplatforms = [\"linux-64\", \"osx-arm64\", \"osx-64\", \"win-64\"]\n\n[tool.pixi.feature.dev.tasks]\nrun-mypy = \"mypy my_project tests\"\nrun-ruff-check = \"ruff check my_project tests\"\nrun-ruff-format = \"ruff format my_project tests\"\nrun-pytest = \"pytest -x\"\n\n[project.optional-dependencies]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.pixi.environments]\ndefault = {features = [], solve-group = \"default\"}\ndev = {features = [\"dev\"], solve-group = \"default\"}\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__create_poetry_pyproject_toml_mit_lib.snap b/src/snapshots/python_project__project_generator__tests__create_poetry_pyproject_toml_mit_lib.snap index 8220d6e2..cc48d0b2 100644 --- a/src/snapshots/python_project__project_generator__tests__create_poetry_pyproject_toml_mit_lib.snap +++ b/src/snapshots/python_project__project_generator__tests__create_poetry_pyproject_toml_mit_lib.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[tool.poetry]\nname = \"my-project\"\nversion = \"1.0.0\"\ndescription = \"This is a test\"\nauthors = [\"Arthur Dent \"]\nlicense = \"MIT\"\nreadme = \"README.md\"\n\n[tool.poetry.dependencies]\npython = \"^3.9\"\n\n[tool.poetry.group.dev.dependencies]\nmypy = {version = \"1.0.0\", extras = [\"faster-cache\"]}\npre-commit = \"1.0.0\"\npytest = \"1.0.0\"\npytest-cov = \"1.0.0\"\nruff = \"1.0.0\"\ntomli = {version = \"1.0.0\", python = \"<3.11\"}\n\n[build-system]\nrequires = [\"poetry-core>=1.0.0\"]\nbuild-backend = \"poetry.core.masonry.api\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__create_pyproject_toml_async_project.snap b/src/snapshots/python_project__project_generator__tests__create_pyproject_toml_async_project.snap index b8c72a30..1a883717 100644 --- a/src/snapshots/python_project__project_generator__tests__create_pyproject_toml_async_project.snap +++ b/src/snapshots/python_project__project_generator__tests__create_pyproject_toml_async_project.snap @@ -2,4 +2,4 @@ source: src/project_generator.rs expression: content --- -"[tool.poetry]\nname = \"my-project\"\nversion = \"1.0.0\"\ndescription = \"This is a test\"\nauthors = [\"Arthur Dent \"]\nlicense = \"MIT\"\nreadme = \"README.md\"\n\n[tool.poetry.dependencies]\npython = \"^3.9\"\n\n[tool.poetry.group.dev.dependencies]\nmypy = {version = \"1.0.0\", extras = [\"faster-cache\"]}\npre-commit = \"1.0.0\"\npytest = \"1.0.0\"\npytest-asyncio = \"1.0.0\"\npytest-cov = \"1.0.0\"\nruff = \"1.0.0\"\ntomli = {version = \"1.0.0\", python = \"<3.11\"}\n\n[build-system]\nrequires = [\"poetry-core>=1.0.0\"]\nbuild-backend = \"poetry.core.masonry.api\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\nasyncio_mode = \"auto\"\nasyncio_default_fixture_loop_scope = \"function\"\nasyncio_default_test_loop_scope = \"function\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n \"ASYNC\", # flake8-async\n \n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" +"[tool.poetry]\nname = \"my-project\"\nversion = \"1.0.0\"\ndescription = \"This is a test\"\nauthors = [\"Arthur Dent \"]\nlicense = \"MIT\"\nreadme = \"README.md\"\n\n[tool.poetry.dependencies]\npython = \"^3.9\"\n\n[tool.poetry.group.dev.dependencies]\nmypy = {version = \"1.0.0\", extras = [\"faster-cache\"]}\npre-commit = \"1.0.0\"\npytest = \"1.0.0\"\npytest-asyncio = \"1.0.0\"\npytest-cov = \"1.0.0\"\nruff = \"1.0.0\"\ntomli = {version = \"1.0.0\", python = \"<3.11\"}\n\n[build-system]\nrequires = [\"poetry-core>=1.0.0\"]\nbuild-backend = \"poetry.core.masonry.api\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\nasyncio_mode = \"auto\"\nasyncio_default_fixture_loop_scope = \"function\"\nasyncio_default_test_loop_scope = \"function\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n \"ASYNC\", # flake8-async\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__create_setuptools_pyproject_toml_mit_lib.snap b/src/snapshots/python_project__project_generator__tests__create_setuptools_pyproject_toml_mit_lib.snap index f72345d1..718a79a6 100644 --- a/src/snapshots/python_project__project_generator__tests__create_setuptools_pyproject_toml_mit_lib.snap +++ b/src/snapshots/python_project__project_generator__tests__create_setuptools_pyproject_toml_mit_lib.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[build-system]\nrequires = [\"setuptools\", \"wheel\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { text = \"MIT\" }\nrequires-python = \">=3.9\"\ndynamic = [\"version\", \"readme\"]\ndependencies = []\n\n[tool.setuptools.dynamic]\nversion = {attr = \"my_project.__version__\"}\nreadme = {file = [\"README.md\"]}\n\n[tool.setuptools.packages.find]\ninclude = [\"my_project*\"]\n\n[tool.setuptools.package-data]\nmy_project = [\"py.typed\"]\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__create_uv_pyproject_toml_mit_lib.snap b/src/snapshots/python_project__project_generator__tests__create_uv_pyproject_toml_mit_lib.snap index 40dbb799..7558bdf4 100644 --- a/src/snapshots/python_project__project_generator__tests__create_uv_pyproject_toml_mit_lib.snap +++ b/src/snapshots/python_project__project_generator__tests__create_uv_pyproject_toml_mit_lib.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_justfile_maturin.snap b/src/snapshots/python_project__project_generator__tests__save_justfile_maturin.snap index 5d49cad1..cbc2955d 100644 --- a/src/snapshots/python_project__project_generator__tests__save_justfile_maturin.snap +++ b/src/snapshots/python_project__project_generator__tests__save_justfile_maturin.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- -"@_default:\n just --list\n\n@lock:\n uv lock\n\n@lock-upgrade:\n uv lock --upgrade\n\n@develop:\n uv run maturin develop --uv\n\n@develop-release:\n uv run maturin develop -r --uv\n\n@install: && develop\n uv sync --frozen --all-extras\n\n@install-release: && develop-release\n uv sync --frozen --all-extras\n\n@lint:\n echo cargo check\n just --justfile {{justfile()}} check\n echo cargo clippy\n just --justfile {{justfile()}} clippy\n echo cargo fmt\n just --justfile {{justfile()}} fmt\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff check\n just --justfile {{justfile()}} ruff-check\n echo ruff formatting\n just --justfile {{justfile()}} ruff-format\n\n@check:\n cargo check\n\n@clippy:\n cargo clippy --all-targets\n\n@fmt:\n cargo fmt --all -- --check\n\n@mypy:\n uv run mypy my_project tests\n\n@ruff-check:\n uv run ruff check my_project tests --fix\n\n@ruff-format:\n uv run ruff format my_project tests\n\n@test *args=\"\":\n uv run pytest {{args}}\n" +"@_default:\n just --list\n\n@lint:\n echo cargo check\n just --justfile {{justfile()}} check\n echo cargo clippy\n just --justfile {{justfile()}} clippy\n echo cargo fmt\n just --justfile {{justfile()}} fmt\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff check\n just --justfile {{justfile()}} ruff-check\n echo ruff formatting\n just --justfile {{justfile()}} ruff-format\n\n@lock:\n uv lock\n\n@lock-upgrade:\n uv lock --upgrade\n\n@develop:\n uv run maturin develop --uv\n\n@develop-release:\n uv run maturin develop -r --uv\n\n@install: && develop\n uv sync --frozen --all-extras\n\n@install-release: && develop-release\n uv sync --frozen --all-extras\n\n@check:\n cargo check\n\n@clippy:\n cargo clippy --all-targets\n\n@fmt:\n cargo fmt --all -- --check\n\n@mypy:\n uv run mypy my_project tests\n\n@ruff-check:\n uv run ruff check my_project tests --fix\n\n@ruff-format:\n uv run ruff format my_project tests\n\n@test *args=\"\":\n uv run pytest {{args}}\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_justfile_maturin_fastapi_project.snap b/src/snapshots/python_project__project_generator__tests__save_justfile_maturin_fastapi_project.snap new file mode 100644 index 00000000..55b45165 --- /dev/null +++ b/src/snapshots/python_project__project_generator__tests__save_justfile_maturin_fastapi_project.snap @@ -0,0 +1,5 @@ +--- +source: src/project_generator.rs +expression: content +--- +"@_default:\n just --list\n\n@lint:\n echo cargo check\n just --justfile {{justfile()}} check\n echo cargo clippy\n just --justfile {{justfile()}} clippy\n echo cargo fmt\n just --justfile {{justfile()}} fmt\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff check\n just --justfile {{justfile()}} ruff-check\n echo ruff formatting\n just --justfile {{justfile()}} ruff-format\n\n@lock:\n uv lock\n\n@lock-upgrade:\n uv lock --upgrade\n\n@develop:\n uv run maturin develop --uv\n\n@develop-release:\n uv run maturin develop -r --uv\n\n@install: && develop\n uv sync --frozen --all-extras\n\n@install-release: && develop-release\n uv sync --frozen --all-extras\n\n@check:\n cargo check\n\n@clippy:\n cargo clippy --all-targets\n\n@fmt:\n cargo fmt --all -- --check\n\n@mypy:\n uv run mypy my_project tests\n\n@ruff-check:\n uv run ruff check my_project tests --fix\n\n@ruff-format:\n uv run ruff format my_project tests\n\n@test *args=\"\":\n uv run pytest {{args}}\n\n@test-parallel *args=\"\":\n uv run pytest -n auto {{args}}\n\ngranian_cmd := if os() != \"windows\" {\n \"uv run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload\"\n} else {\n \"uv run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload\"\n}\n\n@backend-server:\n {{granian_cmd}}\n\n@docker-up:\n docker compose up --build\n\n@docker-up-detached:\n docker compose up --build -d\n\n@docker-up-services:\n docker compose up db valkey migrations\n\n@docker-up-services-detached:\n docker compose up db valkey migrations -d\n\n@docker-down:\n docker compose down\n\n@docker-down-volumes:\n docker compose down --volumes\n\n@docker-pull:\n docker compose pull db valkey migrations\n\n@docker-build:\n docker compose build\n}}}}\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_justfile_poetry.snap b/src/snapshots/python_project__project_generator__tests__save_justfile_poetry.snap index 139ad07c..277ca7fe 100644 --- a/src/snapshots/python_project__project_generator__tests__save_justfile_poetry.snap +++ b/src/snapshots/python_project__project_generator__tests__save_justfile_poetry.snap @@ -2,4 +2,4 @@ source: src/project_generator.rs expression: content --- -"@_default:\n just --list\n\n@lint:\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff-check\n just --justfile {{justfile()}} ruff-check\n echo ruff-format\n just --justfile {{justfile()}} ruff-format\n\n@mypy:\n poetry run mypy my_project tests\n\n@ruff-check:\n poetry run ruff check my_project tests\n\n@ruff-format:\n poetry run ruff format my_project tests\n\n@test *args=\"\":\n -poetry run pytest {{args}}\n\n@install:\n poetry install\n" +"@_default:\n just --list\n\n@lint:\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff-check\n just --justfile {{justfile()}} ruff-check\n echo ruff-format\n just --justfile {{justfile()}} ruff-format\n\n@mypy:\n poetry run mypy my_project tests\n\n@ruff-check:\n poetry run ruff check my_project tests\n\n@ruff-format:\n poetry run ruff format my_project tests\n\n@install:\n poetry install\n\n@test *args=\"\":\n poetry run pytest {{args}}\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_justfile_poetry_fastapi_project.snap b/src/snapshots/python_project__project_generator__tests__save_justfile_poetry_fastapi_project.snap new file mode 100644 index 00000000..4e3eb20c --- /dev/null +++ b/src/snapshots/python_project__project_generator__tests__save_justfile_poetry_fastapi_project.snap @@ -0,0 +1,5 @@ +--- +source: src/project_generator.rs +expression: content +--- +"@_default:\n just --list\n\n@lint:\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff-check\n just --justfile {{justfile()}} ruff-check\n echo ruff-format\n just --justfile {{justfile()}} ruff-format\n\n@mypy:\n poetry run mypy my_project tests\n\n@ruff-check:\n poetry run ruff check my_project tests\n\n@ruff-format:\n poetry run ruff format my_project tests\n\n@install:\n poetry install\n\n@test *args=\"\":\n poetry run pytest {{args}}\n\ngranian_cmd := if os() != \"windows\" {\n \"poetry run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload\"\n} else {\n \"poetry run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload\"\n}\n\n@backend-server:\n {{granian_cmd}}\n\n@test-parallel *args=\"\":\n poetry run pytest -n auto {{args}}\n\n@docker-up:\n docker compose up --build\n\n@docker-up-detached:\n docker compose up --build -d\n\n@docker-up-services:\n docker compose up db valkey migrations\n\n@docker-up-services-detached:\n docker compose up db valkey migrations -d\n\n@docker-down:\n docker compose down\n\n@docker-down-volumes:\n docker compose down --volumes\n\n@docker-pull:\n docker compose pull db valkey migrations\n\n@docker-build:\n docker compose build\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_justfile_setuptools.snap b/src/snapshots/python_project__project_generator__tests__save_justfile_setuptools.snap index b813d9e2..3683d610 100644 --- a/src/snapshots/python_project__project_generator__tests__save_justfile_setuptools.snap +++ b/src/snapshots/python_project__project_generator__tests__save_justfile_setuptools.snap @@ -2,4 +2,4 @@ source: src/project_generator.rs expression: content --- -"@_default:\n just --list\n\n@lint:\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff-check\n just --justfile {{justfile()}} ruff-check\n echo ruff-format\n just --justfile {{justfile()}} ruff-format\n\n@mypy:\n python -m mypy my_project tests\n\n@ruff-check:\n python -m ruff check my_project tests\n\n@ruff-format:\n python -m ruff format my_project tests\n\n@test *args=\"\":\n -python -m pytest {{args}}\n\n@install:\n python -m pip install -r requirements-dev.txt\n" +"@_default:\n just --list\n\n@lint:\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff-check\n just --justfile {{justfile()}} ruff-check\n echo ruff-format\n just --justfile {{justfile()}} ruff-format\n\n@mypy:\n python -m mypy my_project tests\n\n@ruff-check:\n python -m ruff check my_project tests\n\n@ruff-format:\n python -m ruff format my_project tests\n\n@install:\n python -m pip install -r requirements-dev.txt\n\n@test *args=\"\":\n python -m pytest {{args}}\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_justfile_setuptools_fastapi_project.snap b/src/snapshots/python_project__project_generator__tests__save_justfile_setuptools_fastapi_project.snap new file mode 100644 index 00000000..a5b69c6a --- /dev/null +++ b/src/snapshots/python_project__project_generator__tests__save_justfile_setuptools_fastapi_project.snap @@ -0,0 +1,5 @@ +--- +source: src/project_generator.rs +expression: content +--- +"@_default:\n just --list\n\n@lint:\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff-check\n just --justfile {{justfile()}} ruff-check\n echo ruff-format\n just --justfile {{justfile()}} ruff-format\n\n@mypy:\n python -m mypy my_project tests\n\n@ruff-check:\n python -m ruff check my_project tests\n\n@ruff-format:\n python -m ruff format my_project tests\n\n@install:\n python -m pip install -r requirements-dev.txt\n\n@test *args=\"\":\n python -m pytest {{args}}\n\n@test-parallel *args=\"\":\n python -m pytest -n auto {{args}}\n\ngranian_cmd := if os() != \"windows\" {\n \"python -m granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload\"\n} else {\n \"python -m granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload\"\n}\n\n@backend-server:\n {{granian_cmd}}\n\n@docker-up:\n docker compose up --build\n\n@docker-up-detached:\n docker compose up --build -d\n\n@docker-up-services:\n docker compose up db valkey migrations\n\n@docker-up-services-detached:\n docker compose up db valkey migrations -d\n\n@docker-down:\n docker compose down\n\n@docker-down-volumes:\n docker compose down --volumes\n\n@docker-pull:\n docker compose pull db valkey migrations\n\n@docker-build:\n docker compose build\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_justfile_uv.snap b/src/snapshots/python_project__project_generator__tests__save_justfile_uv.snap new file mode 100644 index 00000000..751f30c0 --- /dev/null +++ b/src/snapshots/python_project__project_generator__tests__save_justfile_uv.snap @@ -0,0 +1,5 @@ +--- +source: src/project_generator.rs +expression: content +--- +"@_default:\n just --list\n\n@lint:\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff-check\n just --justfile {{justfile()}} ruff-check\n echo ruff-format\n just --justfile {{justfile()}} ruff-format\n\n@mypy:\n uv run mypy my_project tests\n\n@ruff-check:\n uv run ruff check my_project tests\n\n@ruff-format:\n uv run ruff format my_project tests\n\n@lock:\n uv lock\n\n@lock-upgrade:\n uv lock --upgrade\n\n@install:\n uv sync --frozen --all-extras\n\n@test *args=\"\":\n uv run pytest {{args}}\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_justfile_uv_fastapi_project.snap b/src/snapshots/python_project__project_generator__tests__save_justfile_uv_fastapi_project.snap new file mode 100644 index 00000000..ee350017 --- /dev/null +++ b/src/snapshots/python_project__project_generator__tests__save_justfile_uv_fastapi_project.snap @@ -0,0 +1,5 @@ +--- +source: src/project_generator.rs +expression: content +--- +"@_default:\n just --list\n\n@lint:\n echo mypy\n just --justfile {{justfile()}} mypy\n echo ruff-check\n just --justfile {{justfile()}} ruff-check\n echo ruff-format\n just --justfile {{justfile()}} ruff-format\n\n@mypy:\n uv run mypy my_project tests\n\n@ruff-check:\n uv run ruff check my_project tests\n\n@ruff-format:\n uv run ruff format my_project tests\n\n@lock:\n uv lock\n\n@lock-upgrade:\n uv lock --upgrade\n\n@install:\n uv sync --frozen --all-extras\n\n@test *args=\"\":\n uv run pytest {{args}}\n\n@test-parallel *args=\"\":\n uv run pytest -n auto {{args}}\n\ngranian_cmd := if os() != \"windows\" {\n \"uv run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --loop uvloop --reload\"\n} else {\n \"uv run granian app.main:app --host 127.0.0.1 --port 8000 --interface asgi --no-ws --runtime-mode st --reload\"\n}\n\n@backend-server:\n {{granian_cmd}}\n\n@docker-up:\n docker compose up --build\n\n@docker-up-detached:\n docker compose up --build -d\n\n@docker-up-services:\n docker compose up db valkey migrations\n\n@docker-up-services-detached:\n docker compose up db valkey migrations -d\n\n@docker-down:\n docker compose down\n\n@docker-down-volumes:\n docker compose down --volumes\n\n@docker-pull:\n docker compose pull db valkey migrations\n\n@docker-build:\n docker compose build\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_apache_application.snap b/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_apache_application.snap index 87f65d45..8972b74e 100644 --- a/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_apache_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_apache_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- -"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[tool.pixi.project]\nchannels = [\"conda-forge\", \"bioconda\"]\nplatforms = [\"linux-64\", \"osx-arm64\", \"osx-64\", \"win-64\"]\n\n[tool.pixi.feature.dev.tasks]\nrun-mypy = \"mypy my_project tests\"\nrun-ruff-check = \"ruff check my_project tests\"\nrun-ruff-format = \"ruff format my_project tests\"\nrun-pytest = \"pytest -x\"\n\n\n[project.optional-dependencies]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.pixi.environments]\ndefault = {features = [], solve-group = \"default\"}\ndev = {features = [\"dev\"], solve-group = \"default\"}\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" +"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[tool.pixi.project]\nchannels = [\"conda-forge\", \"bioconda\"]\nplatforms = [\"linux-64\", \"osx-arm64\", \"osx-64\", \"win-64\"]\n\n[tool.pixi.feature.dev.tasks]\nrun-mypy = \"mypy my_project tests\"\nrun-ruff-check = \"ruff check my_project tests\"\nrun-ruff-format = \"ruff format my_project tests\"\nrun-pytest = \"pytest -x\"\n\n[project.optional-dependencies]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.pixi.environments]\ndefault = {features = [], solve-group = \"default\"}\ndev = {features = [\"dev\"], solve-group = \"default\"}\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_mit_application.snap b/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_mit_application.snap index 87f65d45..8972b74e 100644 --- a/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_mit_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_mit_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- -"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[tool.pixi.project]\nchannels = [\"conda-forge\", \"bioconda\"]\nplatforms = [\"linux-64\", \"osx-arm64\", \"osx-64\", \"win-64\"]\n\n[tool.pixi.feature.dev.tasks]\nrun-mypy = \"mypy my_project tests\"\nrun-ruff-check = \"ruff check my_project tests\"\nrun-ruff-format = \"ruff format my_project tests\"\nrun-pytest = \"pytest -x\"\n\n\n[project.optional-dependencies]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.pixi.environments]\ndefault = {features = [], solve-group = \"default\"}\ndev = {features = [\"dev\"], solve-group = \"default\"}\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" +"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[tool.pixi.project]\nchannels = [\"conda-forge\", \"bioconda\"]\nplatforms = [\"linux-64\", \"osx-arm64\", \"osx-64\", \"win-64\"]\n\n[tool.pixi.feature.dev.tasks]\nrun-mypy = \"mypy my_project tests\"\nrun-ruff-check = \"ruff check my_project tests\"\nrun-ruff-format = \"ruff format my_project tests\"\nrun-pytest = \"pytest -x\"\n\n[project.optional-dependencies]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.pixi.environments]\ndefault = {features = [], solve-group = \"default\"}\ndev = {features = [\"dev\"], solve-group = \"default\"}\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_no_license_application.snap b/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_no_license_application.snap index 07a943fa..005e73c0 100644 --- a/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_no_license_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_pixi_pyproject_toml_file_no_license_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- -"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[tool.pixi.project]\nchannels = [\"conda-forge\", \"bioconda\"]\nplatforms = [\"linux-64\", \"osx-arm64\", \"osx-64\", \"win-64\"]\n\n[tool.pixi.feature.dev.tasks]\nrun-mypy = \"mypy my_project tests\"\nrun-ruff-check = \"ruff check my_project tests\"\nrun-ruff-format = \"ruff format my_project tests\"\nrun-pytest = \"pytest -x\"\n\n\n[project.optional-dependencies]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.pixi.environments]\ndefault = {features = [], solve-group = \"default\"}\ndev = {features = [\"dev\"], solve-group = \"default\"}\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" +"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[tool.pixi.project]\nchannels = [\"conda-forge\", \"bioconda\"]\nplatforms = [\"linux-64\", \"osx-arm64\", \"osx-64\", \"win-64\"]\n\n[tool.pixi.feature.dev.tasks]\nrun-mypy = \"mypy my_project tests\"\nrun-ruff-check = \"ruff check my_project tests\"\nrun-ruff-format = \"ruff format my_project tests\"\nrun-pytest = \"pytest -x\"\n\n[project.optional-dependencies]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.pixi.environments]\ndefault = {features = [], solve-group = \"default\"}\ndev = {features = [\"dev\"], solve-group = \"default\"}\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_apache_application.snap b/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_apache_application.snap index 2dd687a1..4d4e2e7a 100644 --- a/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_apache_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_apache_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[tool.poetry]\nname = \"my-project\"\nversion = \"1.0.0\"\ndescription = \"This is a test\"\nauthors = [\"Arthur Dent \"]\nlicense = \"Apache-2.0\"\nreadme = \"README.md\"\n\n[tool.poetry.dependencies]\npython = \"^3.9\"\n\n[tool.poetry.group.dev.dependencies]\nmypy = {version = \"1.0.0\", extras = [\"faster-cache\"]}\npre-commit = \"1.0.0\"\npytest = \"1.0.0\"\npytest-cov = \"1.0.0\"\nruff = \"1.0.0\"\ntomli = {version = \"1.0.0\", python = \"<3.11\"}\n\n[build-system]\nrequires = [\"poetry-core>=1.0.0\"]\nbuild-backend = \"poetry.core.masonry.api\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_mit_application.snap b/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_mit_application.snap index 8220d6e2..cc48d0b2 100644 --- a/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_mit_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_mit_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[tool.poetry]\nname = \"my-project\"\nversion = \"1.0.0\"\ndescription = \"This is a test\"\nauthors = [\"Arthur Dent \"]\nlicense = \"MIT\"\nreadme = \"README.md\"\n\n[tool.poetry.dependencies]\npython = \"^3.9\"\n\n[tool.poetry.group.dev.dependencies]\nmypy = {version = \"1.0.0\", extras = [\"faster-cache\"]}\npre-commit = \"1.0.0\"\npytest = \"1.0.0\"\npytest-cov = \"1.0.0\"\nruff = \"1.0.0\"\ntomli = {version = \"1.0.0\", python = \"<3.11\"}\n\n[build-system]\nrequires = [\"poetry-core>=1.0.0\"]\nbuild-backend = \"poetry.core.masonry.api\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_no_license_application.snap b/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_no_license_application.snap index 6725dc89..0e35dc6c 100644 --- a/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_no_license_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_poetry_pyproject_toml_file_no_license_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[tool.poetry]\nname = \"my-project\"\nversion = \"1.0.0\"\ndescription = \"This is a test\"\nauthors = [\"Arthur Dent \"]\nreadme = \"README.md\"\n\n[tool.poetry.dependencies]\npython = \"^3.9\"\n\n[tool.poetry.group.dev.dependencies]\nmypy = {version = \"1.0.0\", extras = [\"faster-cache\"]}\npre-commit = \"1.0.0\"\npytest = \"1.0.0\"\npytest-cov = \"1.0.0\"\nruff = \"1.0.0\"\ntomli = {version = \"1.0.0\", python = \"<3.11\"}\n\n[build-system]\nrequires = [\"poetry-core>=1.0.0\"]\nbuild-backend = \"poetry.core.masonry.api\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_pre_commit_file.snap b/src/snapshots/python_project__project_generator__tests__save_pre_commit_file.snap index fea6547b..97fa053e 100644 --- a/src/snapshots/python_project__project_generator__tests__save_pre_commit_file.snap +++ b/src/snapshots/python_project__project_generator__tests__save_pre_commit_file.snap @@ -2,4 +2,4 @@ source: src/project_generator.rs expression: content --- -"repos:\n - repo: https://github.com/pre-commit/pre-commit-hooks\n rev: v1.0.0\n hooks:\n - id: check-added-large-files\n - id: check-toml\n - id: check-yaml\n - id: debug-statements\n - id: end-of-file-fixer\n - id: trailing-whitespace\n - repo: https://github.com/pre-commit/mirrors-mypy\n rev: v1.0.0\n hooks:\n - id: mypy\n - repo: https://github.com/astral-sh/ruff-pre-commit\n rev: v1.0.0\n hooks:\n - id: ruff\n args: [--fix, --exit-non-zero-on-fix]\n - id: ruff-format\n" +"repos:\n - repo: https://github.com/pre-commit/pre-commit-hooks\n rev: v1.0.0\n hooks:\n - id: check-added-large-files\n - id: check-toml\n - id: check-yaml\n - id: debug-statements\n - id: end-of-file-fixer\n - id: trailing-whitespace\n - repo: https://github.com/pre-commit/mirrors-mypy\n rev: v1.0.0\n hooks:\n - id: mypy\n - repo: https://github.com/astral-sh/ruff-pre-commit\n rev: v1.0.0\n hooks:\n - id: ruff-check\n args: [--fix, --exit-non-zero-on-fix]\n - id: ruff-format\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_apache_pyo3.snap b/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_apache_pyo3.snap index 8c57a3ac..e9ddda40 100644 --- a/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_apache_pyo3.snap +++ b/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_apache_pyo3.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[build-system]\nrequires = [\"maturin>=1.5,<2.0\"]\nbuild-backend = \"maturin\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" },\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\ndynamic = [\"version\"]\nrequires-python = \">=3.9\"\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"maturin==1.0.0\",\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.maturin]\nmodule-name = \"my_project._my_project\"\nbinding = \"pyo3\"\nfeatures = [\"pyo3/extension-module\"]\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_mit_pyo3.snap b/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_mit_pyo3.snap index 8c57a3ac..e9ddda40 100644 --- a/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_mit_pyo3.snap +++ b/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_mit_pyo3.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[build-system]\nrequires = [\"maturin>=1.5,<2.0\"]\nbuild-backend = \"maturin\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" },\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\ndynamic = [\"version\"]\nrequires-python = \">=3.9\"\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"maturin==1.0.0\",\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.maturin]\nmodule-name = \"my_project._my_project\"\nbinding = \"pyo3\"\nfeatures = [\"pyo3/extension-module\"]\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_no_license_pyo3.snap b/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_no_license_pyo3.snap index 1a9f169d..90aee5a6 100644 --- a/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_no_license_pyo3.snap +++ b/src/snapshots/python_project__project_generator__tests__save_pyproject_toml_file_no_license_pyo3.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[build-system]\nrequires = [\"maturin>=1.5,<2.0\"]\nbuild-backend = \"maturin\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" },\n]\nreadme = \"README.md\"\ndynamic = [\"version\"]\nrequires-python = \">=3.9\"\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"maturin==1.0.0\",\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.maturin]\nmodule-name = \"my_project._my_project\"\nbinding = \"pyo3\"\nfeatures = [\"pyo3/extension-module\"]\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_apache_application.snap b/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_apache_application.snap index afebf5f1..97f6fff5 100644 --- a/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_apache_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_apache_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[build-system]\nrequires = [\"setuptools\", \"wheel\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { text = \"Apache-2.0\" }\nrequires-python = \">=3.9\"\ndynamic = [\"version\", \"readme\"]\ndependencies = []\n\n[tool.setuptools.dynamic]\nversion = {attr = \"my_project.__version__\"}\nreadme = {file = [\"README.md\"]}\n\n[tool.setuptools.packages.find]\ninclude = [\"my_project*\"]\n\n[tool.setuptools.package-data]\nmy_project = [\"py.typed\"]\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_mit_application.snap b/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_mit_application.snap index f72345d1..718a79a6 100644 --- a/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_mit_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_mit_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[build-system]\nrequires = [\"setuptools\", \"wheel\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { text = \"MIT\" }\nrequires-python = \">=3.9\"\ndynamic = [\"version\", \"readme\"]\ndependencies = []\n\n[tool.setuptools.dynamic]\nversion = {attr = \"my_project.__version__\"}\nreadme = {file = [\"README.md\"]}\n\n[tool.setuptools.packages.find]\ninclude = [\"my_project*\"]\n\n[tool.setuptools.package-data]\nmy_project = [\"py.typed\"]\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_no_license_application.snap b/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_no_license_application.snap index db56281a..e7505c30 100644 --- a/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_no_license_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_setuptools_pyproject_toml_file_no_license_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- "[build-system]\nrequires = [\"setuptools\", \"wheel\"]\nbuild-backend = \"setuptools.build_meta\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nrequires-python = \">=3.9\"\ndynamic = [\"version\", \"readme\"]\ndependencies = []\n\n[tool.setuptools.dynamic]\nversion = {attr = \"my_project.__version__\"}\nreadme = {file = [\"README.md\"]}\n\n[tool.setuptools.packages.find]\ninclude = [\"my_project*\"]\n\n[tool.setuptools.package-data]\nmy_project = [\"py.typed\"]\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_apache_application.snap b/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_apache_application.snap index 40dbb799..1c2a571b 100644 --- a/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_apache_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_apache_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- -"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" +"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n\n[tool.uv]\nadd-bounds = \"exact\"\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_mit_application.snap b/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_mit_application.snap index 40dbb799..1c2a571b 100644 --- a/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_mit_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_mit_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- -"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" +"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nlicense = { file = \"LICENSE\" }\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n\n[tool.uv]\nadd-bounds = \"exact\"\n" diff --git a/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_no_license_application.snap b/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_no_license_application.snap index 23dc1d0a..50890f41 100644 --- a/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_no_license_application.snap +++ b/src/snapshots/python_project__project_generator__tests__save_uv_pyproject_toml_file_no_license_application.snap @@ -1,6 +1,5 @@ --- source: src/project_generator.rs expression: content -snapshot_kind: text --- -"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n" +"[build-system]\nrequires = [\"hatchling\"]\nbuild-backend = \"hatchling.build\"\n\n[project]\nname = \"my-project\"\ndescription = \"This is a test\"\nauthors = [\n { name = \"Arthur Dent\", email = \"authur@heartofgold.com\" }\n]\nreadme = \"README.md\"\nrequires-python = \">=3.9\"\ndynamic = [\"version\"]\ndependencies = []\n\n[dependency-groups]\ndev = [\n \"mypy[faster-cache]==1.0.0\",\n \"pre-commit==1.0.0\",\n \"pytest==1.0.0\",\n \"pytest-cov==1.0.0\",\n \"ruff==1.0.0\",\n]\n\n[tool.hatch.version]\npath = \"my_project/_version.py\"\n\n[tool.mypy]\ncheck_untyped_defs = true\ndisallow_untyped_defs = true\n\n[[tool.mypy.overrides]]\nmodule = [\"tests.*\"]\ndisallow_untyped_defs = false\n\n[tool.pytest.ini_options]\nminversion = \"6.0\"\naddopts = \"--cov=my_project --cov-report term-missing --no-cov-on-fail\"\n\n[tool.coverage.report]\nexclude_lines = [\"if __name__ == .__main__.:\", \"pragma: no cover\"]\n\n[tool.ruff]\nline-length = 100\ntarget-version = \"py39\"\nfix = true\n\n[tool.ruff.lint]\nselect = [\n \"E\", # pycodestyle\n \"B\", # flake8-bugbear\n \"W\", # Warning\n \"F\", # pyflakes\n \"UP\", # pyupgrade\n \"I001\", # unsorted-imports\n \"T201\", # print found\n \"T203\", # pprint found\n \"RUF022\", # Unsorted __all__\n \"RUF023\", # Unforted __slots__\n]\nignore=[\n # Recommended ignores by ruff when using formatter\n \"E501\",\n \"W191\",\n \"E111\",\n \"E114\",\n \"E117\",\n \"D206\",\n \"D300\",\n \"Q000\",\n \"Q001\",\n \"Q002\",\n \"Q003\",\n \"COM812\",\n \"COM819\",\n \"ISC001\",\n \"ISC002\",\n]\n\n[tool.uv]\nadd-bounds = \"exact\"\n" diff --git a/src/utils.rs b/src/utils.rs index c0acd945..0dbfbc4f 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,19 +1,38 @@ -use anyhow::Result; +use anyhow::{bail, Result}; pub fn is_python_312_or_greater(version: &str) -> Result { - let mut split_version = version.split('.'); - if let Some(v) = split_version.nth(1) { - let min = v.parse::()?; - if min >= 12 { - Ok(true) - } else { - Ok(false) - } + let version_parts = split_version(version)?; + + if version_parts.1 >= 12 { + Ok(true) + } else { + Ok(false) + } +} + +#[cfg(feature = "fastapi")] +pub fn is_allowed_fastapi_python_version(version: &str) -> Result { + let version_parts = split_version(version)?; + + if version_parts.0 >= 3 && version_parts.1 >= 11 { + Ok(true) } else { Ok(false) } } +fn split_version(version: &str) -> Result<(i32, i32)> { + let split_version: Vec<&str> = version.split('.').collect(); + if split_version.len() < 2 { + bail!("Major and minor versions not found"); + } + + let major = split_version[0].parse::()?; + let minor = split_version[1].parse::()?; + + Ok((major, minor)) +} + #[cfg(test)] mod tests { use super::*; @@ -35,4 +54,25 @@ mod tests { let result = is_python_312_or_greater("3.11").unwrap(); assert!(!result); } + + #[cfg(feature = "fastapi")] + #[test] + fn test_is_allowed_fastapi_python_version() { + let result = is_allowed_fastapi_python_version("3.11").unwrap(); + assert!(result); + } + + #[cfg(feature = "fastapi")] + #[test] + fn test_is_unallowed_major_fastapi_python_version() { + let result = is_allowed_fastapi_python_version("2.11").unwrap(); + assert!(!result); + } + + #[cfg(feature = "fastapi")] + #[test] + fn test_is_unallowed_minor_fastapi_python_version() { + let result = is_allowed_fastapi_python_version("3.10").unwrap(); + assert!(!result); + } }