diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b02b1658..50734be4 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,6 +1,6 @@ { - "name": "Poetry Cookiecutter", - "image": "mcr.microsoft.com/vscode/devcontainers/python:3.10", + "name": "Baseline app Cookiecutter", + "image": "mcr.microsoft.com/vscode/devcontainers/python:3.12", "onCreateCommand": "pip install commitizen cruft pre-commit && pre-commit install --install-hooks", "remoteUser": "vscode", "customizations": { diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..a6696ddf --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @davebulaval @dpothier diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..302a98db --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,42 @@ +version: 2 + +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: monthly + commit-message: + prefix: "ci" + include: scope + groups: + ci-dependencies: + patterns: + - "*" + update-types: + - "minor" + - "patch" + ci-major-updates: + patterns: + - "*" + update-types: + - "major" + - package-ecosystem: pip + directory: / + schedule: + interval: monthly + commit-message: + prefix: "chore" + prefix-development: "build" + include: scope + groups: + dependencies: + patterns: + - "*" + update-types: + - "minor" + - "patch" + major-updates: + patterns: + - "*" + update-types: + - "major" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..d884a3b4 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,14 @@ +## Summary + +- + +## Test plan + +- [ ] `pytest tests/ -v` passes locally +- [ ] YAML files are valid +- [ ] Generated project passes `ruff check` and `ruff format --check` + +## Checklist + +- [ ] I have performed a self-review of my changes +- [ ] PR title follows [Conventional Commits](https://www.conventionalcommits.org/) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..03beb0f9 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,30 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.12", "3.13"] + + name: Unit tests — Python ${{ matrix.python-version }} + + steps: + - uses: actions/checkout@v6 + + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: pip + + - name: Install dependencies + run: pip install cookiecutter pytest pyyaml + + - name: Run tests + run: pytest tests/ -v diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml new file mode 100644 index 00000000..e1779dfa --- /dev/null +++ b/.github/workflows/pr.yml @@ -0,0 +1,19 @@ +name: PR + +on: + pull_request: + types: [edited, opened, reopened, synchronize] + +jobs: + title: + runs-on: ubuntu-latest + name: Check PR title + steps: + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Check PR title + run: | + pip install commitizen + cz check --message "${{ github.event.pull_request.title }}" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5f19accb..797ee6d4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,4 +1,4 @@ -name: Test +name: Integration on: push: @@ -13,34 +13,45 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.9", "3.12"] - project-type: ["app", "package"] + python-version: ["3.12", "3.13"] + include: + - python-version: "3.12" + variant: full + extra-context: '{"project_type": "app", "project_name": "My Project", "python_version": "3.12", "with_fastapi_api": "1", "with_typer_cli": "1", "with_sentry": "0"}' + - python-version: "3.13" + variant: minimal + extra-context: '{"project_type": "app", "project_name": "My Project", "python_version": "3.13", "with_fastapi_api": "0", "with_typer_cli": "0", "with_sentry": "0"}' - name: Python ${{ matrix.python-version }} ${{ matrix.project-type }} + name: Python ${{ matrix.python-version }} — ${{ matrix.variant }} steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: path: template - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.12" + cache: pip - name: Scaffold Python project run: | pip install --no-input cruft - cruft create --no-input --extra-context '{"project_type": "${{ matrix.project-type }}", "project_name": "My Project", "python_version": "3.9", "__docker_image":"radixai/python-gpu:$PYTHON_VERSION-cuda11.8", "with_fastapi_api": "1", "with_typer_cli": "1"}' ./template/ + cruft create --no-input --extra-context '${{ matrix.extra-context }}' ./template/ + + - name: Verify cruft link + run: cruft check + working-directory: ./my-project/ - name: Set up Node.js uses: actions/setup-node@v4 with: - node-version: 21 + node-version: 22 - name: Install @devcontainers/cli - run: npm install --location=global @devcontainers/cli@0.58.0 + run: npm install --location=global @devcontainers/cli@latest - name: Start Dev Container with Python ${{ matrix.python-version }} run: | @@ -56,13 +67,3 @@ jobs: - name: Test project run: devcontainer exec --workspace-folder my-project poe test - - - name: Build app Docker image - if: ${{ matrix.project-type == 'app' }} - uses: docker/build-push-action@v5 - with: - build-args: | - SOURCE_BRANCH=${{ env.GITHUB_REF }} - SOURCE_COMMIT=${{ env.GITHUB_SHA }} - context: ./my-project/ - target: app diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 45ed1878..d440b9e5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,6 +3,19 @@ default_install_hook_types: [commit-msg, pre-commit] default_stages: [commit, manual] fail_fast: true repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-yaml + args: [--allow-multiple-documents, --unsafe] + - id: check-toml + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/rhysd/actionlint + rev: v1.7.11 + hooks: + - id: actionlint + files: ^\.github/workflows/ - repo: local hooks: - id: commitizen @@ -12,3 +25,17 @@ repos: require_serial: true language: system stages: [commit-msg] + - id: ruff-check + name: ruff check + entry: ruff check + args: ["--force-exclude"] + language: system + types_or: [python, pyi] + pass_filenames: false + - id: ruff-format + name: ruff format + entry: ruff format + args: [--force-exclude, --check] + language: system + types_or: [python, pyi] + pass_filenames: false diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..85bb0174 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,80 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/), +and this project adheres to [Semantic Versioning](https://semver.org/). + +## [Unreleased] + +### Added + +- `CLAUDE.md` template for AI-assisted development in generated projects +- `pull_request_template.md` for the cookiecutter repo itself +- `CODEOWNERS` file (@davebulaval, @dpothier) +- Fast CI workflow (`ci.yml`) — unit tests on Python 3.12 + 3.13 (~20s) +- PR title check workflow (`pr.yml`) — conventional commits validation +- Integration test matrix — full (FastAPI + Typer) and minimal (bare) variants +- Cruft link verification step in integration tests +- codespell linter in pre-commit and pyproject.toml +- MkDocs Material for documentation (replaces pdoc) +- PR title check workflow for generated projects (`pr.yml`) +- `detect-secrets` (Yelp) pre-commit hook to block accidental credential commits +- `actionlint` pre-commit hook for GitHub Actions validation +- `ruff-check` and `ruff-format` pre-commit hooks for template code +- `pre-commit-hooks` (check-yaml, check-toml, end-of-file-fixer, trailing-whitespace) +- 71 unit tests for template generation (up from 0) + +### Changed + +- Conditionalized generated README sections (API, CLI, Docker) with Jinja +- Rewrote root README with CI badges, project structure, and developer guide +- Rewrote generated README: concise, dynamic, references MkDocs +- Modernized integration workflow: checkout v6, pip cache, renamed to "Integration" +- Fixed generated CONTRIBUTING.md typos and added codespell to tools list +- Fixed `.env.sample` reference to `.env.example` in generated README + +### Fixed + +- ruff lint errors in generated code (FURB171, PLC0415, PLR2004, PLR6201, B007, PERF102) +- codespell false positives (Jupyter) and real typos (developpement, developpers, formater) +- Jinja whitespace in cli.py imports causing ruff format failure +- Unused `import sys` and stale noqa comments (S310, BLE001) in cli.py +- Coverage failure in minimal config (added settings test) + +--- + +## Sprint 2 — Cookiecutter Enhancements + +### Added + +- pydantic-settings integration (replaces python-decouple) with `.env.example` +- Pydantic models (`models.py`) and service layer (`services.py`) stubs +- Rich API stubs: health endpoint, CRUD items, exception handlers, request logging middleware +- Rich CLI stubs: `info` command with Rich table, `greet` command with `Annotated` +- `.editorconfig` for cross-IDE consistency +- `.vscode/launch.json` with FastAPI, pytest, and current-file debug configs +- `CHANGELOG.md` for generated projects (Keep a Changelog format) +- `pre_gen_project.py` hook for input validation +- Sentry SDK integration (`with_sentry` parameter) +- Multi-Python CI matrix (tests on selected version + 3.13) +- Docker `HEALTHCHECK` instruction (conditional on FastAPI) +- pytest-asyncio support (conditional on FastAPI) + +### Changed + +- Replaced python-decouple with pydantic-settings +- Rewrote `api.py` with dependency injection, structured error handling, and logging +- Rewrote `cli.py` with `@app.callback()` and `Annotated` pattern +- Rewrote BDD test stubs for new API and CLI +- Renamed `.env_sample` to `.env.example` +- Removed Teamwork integration (workflow + PR template link) + +### Fixed + +- 5 P0 bugs that broke template generation +- pytest-bdd feature file paths +- pre-commit hook compatibility (pygrep-hooks tag, `--pytest-test-first`) +- ruff preview mode compliance (DOC201, DOC501, FAST001, PLR6301, etc.) +- mypy strict mode compliance +- typeguard conflict with typer.Context diff --git a/README.md b/README.md index 6bce2462..4f41e3af 100644 --- a/README.md +++ b/README.md @@ -1,88 +1,135 @@ -[![Open in Dev Containers](https://img.shields.io/static/v1?label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/radix-ai/poetry-cookiecutter) [![Open in GitHub Codespaces](https://img.shields.io/static/v1?label=GitHub%20Codespaces&message=Open&color=blue&logo=github)](https://github.com/codespaces/new?hide_repo_select=true&ref=main&repo=444870763) - -# 🍪 Poetry Cookiecutter - -A modern [Cookiecutter](https://github.com/cookiecutter/cookiecutter) template for scaffolding Python packages and apps. - -## 🍿 Demo - -See 👖 [Conformal Tights](https://github.com/radix-ai/conformal-tights) for an example of a Python package that is scaffolded with this template. Contributing to this package can be done with a single click by [starting a GitHub Codespace](https://github.com/codespaces/new?hide_repo_select=true&ref=main&repo=765698489&skip_quickstart=true) or [starting a Dev Container](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/radix-ai/conformal-tights). - -## 🎁 Features - -- 🧑‍💻 Quick and reproducible development environments with VS Code's [Dev Containers](https://code.visualstudio.com/docs/devcontainers/containers), PyCharm's [Docker Compose interpreter](https://www.jetbrains.com/help/pycharm/using-docker-compose-as-a-remote-interpreter.html#docker-compose-remote), and [GitHub Codespaces](https://github.com/features/codespaces) -- 🌈 Cross-platform support for Linux, macOS (Apple silicon and Intel), and Windows -- 🐚 Modern shell prompt with [Starship](https://github.com/starship/starship) -- 📦 Packaging and dependency management with [Poetry](https://github.com/python-poetry/poetry) -- 🚚 Installing from and publishing to private package repositories and [PyPI](https://pypi.org/) -- ⚡️ Task running with [Poe the Poet](https://github.com/nat-n/poethepoet) -- ✍️ Code formatting with [Ruff](https://github.com/charliermarsh/ruff) -- ✅ Code linting with [Pre-commit](https://pre-commit.com/), [Mypy](https://github.com/python/mypy), and [Ruff](https://github.com/charliermarsh/ruff) -- 🏷 Optionally follows the [Conventional Commits](https://www.conventionalcommits.org/) standard to automate [Semantic Versioning](https://semver.org/) and [Keep A Changelog](https://keepachangelog.com/) with [Commitizen](https://github.com/commitizen-tools/commitizen) -- 💌 Verified commits with [GPG](https://gnupg.org/) -- ♻️ Continuous integration with [GitHub Actions](https://docs.github.com/en/actions) or [GitLab CI/CD](https://docs.gitlab.com/ee/ci/) -- 🧪 Test coverage with [Coverage.py](https://github.com/nedbat/coveragepy) -- 🏗 Scaffolding updates with [Cookiecutter](https://github.com/cookiecutter/cookiecutter) and [Cruft](https://github.com/cruft/cruft) -- 🧰 Dependency updates with [Dependabot](https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/about-dependabot-version-updates) - -## ✨ Using +[![CI](https://github.com/Baseline-quebec/baseline-app-cookiecutter/actions/workflows/ci.yml/badge.svg)](https://github.com/Baseline-quebec/baseline-app-cookiecutter/actions/workflows/ci.yml) [![Integration](https://github.com/Baseline-quebec/baseline-app-cookiecutter/actions/workflows/test.yml/badge.svg)](https://github.com/Baseline-quebec/baseline-app-cookiecutter/actions/workflows/test.yml) [![Open in Dev Containers](https://img.shields.io/static/v1?label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/Baseline-quebec/baseline-app-cookiecutter) [![Open in GitHub Codespaces](https://img.shields.io/static/v1?label=GitHub%20Codespaces&message=Open&color=blue&logo=github)](https://github.com/codespaces/new?hide_repo_select=true&ref=main&repo=Baseline-quebec/baseline-app-cookiecutter) + +# Baseline App Cookiecutter + +A modern [Cookiecutter](https://github.com/cookiecutter/cookiecutter) template for scaffolding Python packages and apps at [Baseline](https://github.com/Baseline-quebec). + +## Features + +- Quick and reproducible development environments with VS Code's [Dev Containers](https://code.visualstudio.com/docs/devcontainers/containers), PyCharm's [Docker Compose interpreter](https://www.jetbrains.com/help/pycharm/using-docker-compose-as-a-remote-interpreter.html#docker-compose-remote), and [GitHub Codespaces](https://github.com/features/codespaces) +- Cross-platform support for Linux, macOS (Apple silicon and Intel), and Windows +- Packaging and dependency management with [Poetry](https://github.com/python-poetry/poetry) +- Task running with [Poe the Poet](https://github.com/nat-n/poethepoet) +- Code formatting and linting with [Ruff](https://github.com/charliermarsh/ruff), [Mypy](https://github.com/python/mypy), and [Pre-commit](https://pre-commit.com/) +- Spell checking with [codespell](https://github.com/codespell-project/codespell) +- Documentation with [MkDocs Material](https://squidfunk.github.io/mkdocs-material/) +- Optional [Conventional Commits](https://www.conventionalcommits.org/) with [Commitizen](https://github.com/commitizen-tools/commitizen) +- Optional [FastAPI](https://github.com/tiangolo/fastapi) REST API with health check, CRUD stubs, and Sentry integration +- Optional [Typer](https://github.com/tiangolo/typer) CLI with Rich output +- Optional [pytest-bdd](https://github.com/pytest-dev/pytest-bdd) for BDD-style tests with Gherkin feature files +- Continuous integration with [GitHub Actions](https://docs.github.com/en/actions) +- Test coverage with [Coverage.py](https://github.com/nedbat/coveragepy) +- Scaffolding updates with [Cruft](https://github.com/cruft/cruft) +- Dependency updates with [Dependabot](https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/about-dependabot-version-updates) +- [Architecture Decision Records](https://cognitect.com/blog/2011/11/15/documenting-architecture-decisions) (ADR) template +- Claude Code instructions (`CLAUDE.md`) for AI-assisted development + +## Using ### Creating a new Python project -To create a new Python project with this template: - -1. Install the latest [Cruft](https://github.com/cruft/cruft) and [Cookiecutter](https://github.com/cookiecutter/cookiecutter) in your [Python environment](https://github.com/pyenv/pyenv-virtualenv) with: +1. Install [Cruft](https://github.com/cruft/cruft) and [Cookiecutter](https://github.com/cookiecutter/cookiecutter): ```sh pip install --upgrade "cruft>=2.12.0" "cookiecutter>=2.1.1" ``` -2. [Create a new repository](https://github.com/new) for your Python project, then clone it locally. -3. Run the following command in the parent directory of the cloned repository to apply the Poetry Cookiecutter template: +2. [Create a new repository](https://github.com/new) and clone it locally. + +3. Run the following command in the **parent directory** of the cloned repository: ```sh - cruft create -f https://github.com/radix-ai/poetry-cookiecutter + cruft create -f https://github.com/Baseline-quebec/baseline-app-cookiecutter ```
+ If your repository name differs from the project's slugified name - ⚠️ If your repository name ≠ the project's slugified name - - If your repository name differs from your project's slugified name (see `project_name` in the [Template parameters](https://github.com/radix-ai/poetry-cookiecutter#-template-parameters) below), you will need to copy the scaffolded project into the repository with: - - ```sh - cp -r {project-name}/ {repository-name}/ - ``` - -
- -### Updating your Python project - -To update your Python project to the latest template version: - -1. Update the project while verifying the existing template parameters and setting any new parameters, if there are any: + Copy the scaffolded project into the repository: ```sh - cruft update --cookiecutter-input + cp -r {project-name}/ {repository-name}/ ``` -2. If any of the file updates failed, resolve them by inspecting the corresponding `.rej` files. - -## 🤓 Template parameters - -| Parameter | Description | -| ------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `project_type`
["package", "app"] | Whether the project is a publishable Python package or a deployable Python app. | -| `project_name`
"Spline Reticulator" | The name of the project. Will be slugified to `snake_case` for importing and `kebab-case` for installing. For example, `My Package` will be `my_package` for importing and `my-package` for installing. | -| `project_description`
"A Python package that reticulates splines." | A single-line description of the project. | -| `project_url`
"" | The URL to the project's repository. | -| `author_name`
"John Smith" | The full name of the primary author of the project. | -| `author_email`
"" | The email address of the primary author of the project. | -| `python_version`
"3.10" | The minimum Python version that the project requires. | -| `development_environment`
["simple", "strict"] | Whether to configure the development environment with a focus on simplicity or with a focus on strictness. In strict mode, additional [Ruff rules](https://beta.ruff.rs/docs/rules/) are added, and tools such as [Mypy](https://github.com/python/mypy) and [Pytest](https://github.com/pytest-dev/pytest) are set to strict mode. | -| `with_conventional_commits`
["0", "1"] | If "1", [Commitizen](https://github.com/commitizen-tools/commitizen) will verify that your commits follow the [Conventional Commits](https://www.conventionalcommits.org/) standard. In return, `cz bump` may be used to automate [Semantic Versioning](https://semver.org/) and [Keep A Changelog](https://keepachangelog.com/). | -| `with_fastapi_api`
["0", "1"] | If "1", [FastAPI](https://github.com/tiangolo/fastapi) is added as a run time dependency, FastAPI API stubs and tests are added, a `poe api` command for serving the API is added. | -| `with_typer_cli`
["0", "1"] | If "1", [Typer](https://github.com/tiangolo/typer) is added as a run time dependency, Typer CLI stubs and tests are added, the package itself is registered as a CLI. | -| `continuous_integration`
["GitHub", "GitLab"] | Whether to include a [GitHub Actions](https://docs.github.com/en/actions) or a [GitLab CI/CD](https://docs.gitlab.com/ee/ci/) continuous integration workflow for testing the project, and publishing the package or deploying the app. | -| `private_package_repository_name`
"Private Package Repository" | Optional name of a private package repository to install packages from and publish this package to. | -| `private_package_repository_url`
"" | Optional URL of a private package repository to install packages from and publish this package to. Make sure to include the `/simple` suffix. For instance, when using a GitLab Package Registry this value should be of the form `https://gitlab.com/api/v4/projects/` `{project_id}` `/packages/pypi/simple`. | + + +4. Add the remote origin and push. + +### Updating an existing project + +```sh +cruft update --cookiecutter-input +``` + +If any file updates failed, resolve conflicts by inspecting the `.rej` files. + +## Developing this template + +### Quick reference + +| Command | Description | +|---------|-------------| +| `pip install cookiecutter pytest pyyaml` | Install test dependencies | +| `pytest tests/ -v` | Run unit tests (~60 tests, ~7s) | +| `pre-commit run --all-files` | Run linting on template code | + +### CI/CD + +This repository has three CI workflows: + +| Workflow | Trigger | What it does | +|----------|---------|-------------| +| **CI** (`ci.yml`) | Push / PR | Runs unit tests on Python 3.12 + 3.13 (~20s) | +| **PR** (`pr.yml`) | PR | Validates PR title follows conventional commits | +| **Integration** (`test.yml`) | Push / PR | Scaffolds a project, starts a devcontainer, runs `poe lint` + `poe test` (~3 min) | + +### Project structure + +``` +baseline-app-cookiecutter/ +├── cookiecutter.json # Template parameters +├── hooks/ +│ ├── pre_gen_project.py # Input validation +│ └── post_gen_project.py # Conditional file removal +├── tests/ +│ └── test_cookiecutter.py # Unit tests for the template +├── {{ cookiecutter.__project_name_kebab_case }}/ +│ ├── .devcontainer/ # Dev Container config +│ ├── .github/workflows/ # CI for generated projects +│ ├── src/{{ ... }}/ # Source code stubs +│ ├── tests/ # Test stubs +│ ├── pyproject.toml # Poetry config +│ └── ... +├── .github/ +│ ├── workflows/ci.yml # Unit tests +│ ├── workflows/pr.yml # PR title check +│ ├── workflows/test.yml # Integration tests +│ ├── dependabot.yml # Dependency updates +│ └── CODEOWNERS # Code owners +└── .pre-commit-config.yaml # Linting for template code +``` + +## Upstream sync + +This template is a fork of [superlinear-ai/substrate](https://github.com/superlinear-ai/substrate). The upstream has since migrated to [uv](https://github.com/astral-sh/uv) (replacing Poetry), [Copier](https://copier.readthedocs.io/) (replacing Cookiecutter), and [ty](https://github.com/astral-sh/ty) (replacing Mypy). These are major structural changes that would require reworking the entire Baseline toolchain. + +We intentionally stay on **Poetry + Cookiecutter + Mypy** to maintain compatibility with existing Baseline projects. Instead of a full upstream merge, we cherry-pick individual improvements that are independent of the build system migration. + +## Template parameters + +| Parameter | Description | +|-----------|-------------| +| `project_name`
"my-app" | The name of the project. Slugified to `snake_case` for importing and `kebab-case` for installing. | +| `project_description`
"A Python app that..." | A single-line description of the project. | +| `github_org`
"Baseline-quebec" | The GitHub organization or user that owns the repository. | +| `project_url`
auto | Automatically constructed from `github_org` and `project_name`. | +| `author_name`
"John Smith" | The full name of the primary author. | +| `author_email`
"john@example.com" | The email address of the primary author. | +| `license`
["Proprietary", "MIT", "Apache-2.0"] | The license. Generates a LICENSE file for MIT and Apache-2.0. | +| `python_version`
"3.12" | The minimum Python version. | +| `development_environment`
["strict", "simple"] | Strict mode enables additional Ruff rules, strict Mypy, and strict Pytest. | +| `with_conventional_commits`
["0", "1"] | Adds Commitizen for conventional commits. Auto-enabled in strict mode. | +| `with_fastapi_api`
["0", "1"] | Adds FastAPI with health endpoint, CRUD stubs, Pydantic models, and `poe api`. | +| `with_typer_cli`
["0", "1"] | Adds Typer CLI with `info`, `config`, and `health` commands. | +| `with_pytest_bdd`
["0", "1"] | Adds pytest-bdd with Gherkin feature files. Default: plain pytest. | +| `with_sentry`
["0", "1"] | Adds Sentry SDK with FastAPI integration. Requires `with_fastapi_api=1`. | diff --git a/cookiecutter.json b/cookiecutter.json index f548f9a9..c10869fa 100644 --- a/cookiecutter.json +++ b/cookiecutter.json @@ -1,29 +1,24 @@ { - "project_type": [ - "package", - "app" - ], - "project_name": "{% if cookiecutter.project_type == 'app' %}My App{% else %}My Package{% endif %}", + "project_type": "app", + "project_name": "my-app", "project_description": "A Python {{ cookiecutter.project_type }} that reticulates splines.", - "project_url": "https://github.com/user/my-{{ cookiecutter.project_type }}", + "github_org": "Baseline-quebec", + "project_url": "https://github.com/{{ cookiecutter.github_org }}/{{ cookiecutter.project_name|slugify }}", "author_name": "John Smith", "author_email": "john@example.com", - "python_version": "{% if cookiecutter.project_type == 'app' %}3.12{% else %}3.10{% endif %}", + "license": ["Proprietary", "MIT", "Apache-2.0"], + "python_version": "3.12", "development_environment": [ - "simple", - "strict" + "strict", + "simple" ], "with_conventional_commits": "{% if cookiecutter.development_environment == 'simple' %}0{% else %}1{% endif %}", - "with_fastapi_api": "0", - "with_typer_cli": "0", - "continuous_integration": [ - "GitHub", - "GitLab" - ], - "private_package_repository_name": "", - "private_package_repository_url": "", + "with_fastapi_api": "1", + "with_typer_cli": "1", + "with_pytest_bdd": "0", + "with_sentry": "0", "__docker_image": "python:$PYTHON_VERSION-slim", - "__docstring_style": "NumPy", + "__docstring_style": "Google", "__project_name_kebab_case": "{{ cookiecutter.project_name|slugify }}", "__project_name_snake_case": "{{ cookiecutter.project_name|slugify(separator='_') }}" } \ No newline at end of file diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 91428943..b56ba016 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -1,13 +1,20 @@ +"""Post-generation hook: remove files based on cookiecutter options.""" + import os import shutil # Read Cookiecutter configuration. project_name = "{{ cookiecutter.__project_name_snake_case }}" development_environment = "{{ cookiecutter.development_environment }}" +with_conventional_commits = int("{{ cookiecutter.with_conventional_commits }}") with_fastapi_api = int("{{ cookiecutter.with_fastapi_api }}") with_typer_cli = int("{{ cookiecutter.with_typer_cli }}") -continuous_integration = "{{ cookiecutter.continuous_integration }}" -is_application = "{{ cookiecutter.project_type == 'app' }}" == "True" +with_pytest_bdd = int("{{ cookiecutter.with_pytest_bdd }}") +license_choice = "{{ cookiecutter.license }}" + +# Remove PR title check workflow if conventional commits is disabled. +if not with_conventional_commits: + os.remove(".github/workflows/pr.yml") # Remove py.typed and Dependabot if not in strict mode. if development_environment != "strict": @@ -17,22 +24,27 @@ # Remove FastAPI if not selected. if not with_fastapi_api: os.remove(f"src/{project_name}/api.py") + os.remove(f"src/{project_name}/models.py") + os.remove(f"src/{project_name}/services.py") os.remove("tests/test_api.py") + if with_pytest_bdd: + os.remove("tests/features/api.feature") # Remove Typer if not selected. if not with_typer_cli: os.remove(f"src/{project_name}/cli.py") os.remove("tests/test_cli.py") + if with_pytest_bdd: + os.remove("tests/features/cli.feature") + +# Remove .vscode/ directory if not using FastAPI (no launch.json needed). +if not with_fastapi_api and not with_typer_cli: + shutil.rmtree(".vscode", ignore_errors=True) + +# Remove BDD test infrastructure when pytest-bdd is not selected. +if not with_pytest_bdd: + shutil.rmtree("tests/features", ignore_errors=True) -# Remove the continuous integration provider that is not selected. -if continuous_integration != "GitHub": - shutil.rmtree(".github/") -elif continuous_integration != "GitLab": - os.remove(".gitlab-ci.yml") - -# Remove unused GitHub Actions workflows. -if continuous_integration == "GitHub": - if is_application: - os.remove(".github/workflows/publish.yml") - else: - os.remove(".github/workflows/deploy.yml") +# Remove LICENSE file for proprietary projects. +if license_choice == "Proprietary": + os.remove("LICENSE") diff --git a/hooks/pre_gen_project.py b/hooks/pre_gen_project.py new file mode 100644 index 00000000..575397da --- /dev/null +++ b/hooks/pre_gen_project.py @@ -0,0 +1,34 @@ +"""Pre-generation hook: validate cookiecutter inputs.""" + +import re +import sys + +project_name = "{{ cookiecutter.project_name }}" +python_version = "{{ cookiecutter.python_version }}" +with_sentry = int("{{ cookiecutter.with_sentry }}") +with_fastapi_api = int("{{ cookiecutter.with_fastapi_api }}") + +# Validate project_name: letters, digits, spaces, hyphens. +if not re.match(r"^[A-Za-z0-9 -]+$", project_name): + print( + f"ERROR: Invalid project_name '{project_name}'. " + "Only letters, digits, spaces, and hyphens are allowed." + ) + sys.exit(1) + +# Validate python_version >= 3.10. +try: + major, minor = (int(x) for x in python_version.split(".")[:2]) + if (major, minor) < (3, 10): + print(f"ERROR: python_version must be >= 3.10, got '{python_version}'.") + sys.exit(1) +except ValueError: + print(f"ERROR: Invalid python_version '{python_version}'.") + sys.exit(1) + +# Warn if Sentry is enabled without FastAPI. +if with_sentry and not with_fastapi_api: + print( + "WARNING: with_sentry=1 has no effect without with_fastapi_api=1. " + "Sentry integration requires FastAPI." + ) diff --git a/tests/test_cookiecutter.py b/tests/test_cookiecutter.py new file mode 100644 index 00000000..d44ee8f4 --- /dev/null +++ b/tests/test_cookiecutter.py @@ -0,0 +1,722 @@ +"""Tests for the cookiecutter template generation. + +These tests validate that the template generates correctly with various +combinations of parameters and that post-generation hooks work as expected. +""" + +import os +import subprocess +from pathlib import Path + +import pytest +from cookiecutter.main import cookiecutter + + +TEMPLATE_DIR = str(Path(__file__).parent.parent) + + +@pytest.fixture +def output_dir(tmp_path: Path) -> Path: + """Provide a temporary output directory.""" + return tmp_path + + +def bake(output_dir: Path, **extra_context: str) -> Path: + """Run cookiecutter and return the generated project path.""" + defaults = { + "project_name": "test-project", + "github_org": "TestOrg", + "license": "MIT", + "python_version": "3.12", + "development_environment": "strict", + "with_fastapi_api": "1", + "with_typer_cli": "1", + "with_pytest_bdd": "0", + "with_sentry": "0", + } + defaults.update(extra_context) + cookiecutter( + TEMPLATE_DIR, + no_input=True, + output_dir=str(output_dir), + extra_context=defaults, + ) + return output_dir / "test-project" + + +# --------------------------------------------------------------------------- +# Basic generation tests +# --------------------------------------------------------------------------- + + +class TestBasicGeneration: + """Verify that the template generates without errors.""" + + def test_default_options(self, output_dir: Path) -> None: + """Template generates with default options.""" + project = bake(output_dir) + assert project.is_dir() + assert (project / "pyproject.toml").is_file() + assert (project / "src" / "test_project" / "__init__.py").is_file() + + def test_project_name_slugified(self, output_dir: Path) -> None: + """Project name is correctly slugified.""" + project = bake(output_dir, project_name="My Cool App") + expected = output_dir / "my-cool-app" + assert expected.is_dir() + assert (expected / "src" / "my_cool_app" / "__init__.py").is_file() + + +# --------------------------------------------------------------------------- +# License tests +# --------------------------------------------------------------------------- + + +class TestLicense: + """Verify license parameter behavior.""" + + def test_mit_license(self, output_dir: Path) -> None: + """MIT license generates a LICENSE file with MIT text.""" + project = bake(output_dir, license="MIT") + license_file = project / "LICENSE" + assert license_file.is_file() + content = license_file.read_text() + assert "MIT License" in content + assert "John Smith" in content + + def test_apache_license(self, output_dir: Path) -> None: + """Apache-2.0 license generates a LICENSE file with Apache text.""" + project = bake(output_dir, license="Apache-2.0") + license_file = project / "LICENSE" + assert license_file.is_file() + content = license_file.read_text() + assert "Apache License" in content + + def test_proprietary_no_license_file(self, output_dir: Path) -> None: + """Proprietary license removes the LICENSE file.""" + project = bake(output_dir, license="Proprietary") + assert not (project / "LICENSE").exists() + + def test_license_in_pyproject(self, output_dir: Path) -> None: + """License value is set in pyproject.toml.""" + project = bake(output_dir, license="MIT") + content = (project / "pyproject.toml").read_text() + assert 'license = "MIT"' in content + + +# --------------------------------------------------------------------------- +# github_org tests +# --------------------------------------------------------------------------- + + +class TestGithubOrg: + """Verify github_org parameter behavior.""" + + def test_github_org_in_repository_url(self, output_dir: Path) -> None: + """github_org is used in the repository URL.""" + project = bake(output_dir, github_org="MyOrg") + content = (project / "pyproject.toml").read_text() + assert "github.com/MyOrg/test-project" in content + + +# --------------------------------------------------------------------------- +# pytest-bdd tests +# --------------------------------------------------------------------------- + + +class TestPytestBdd: + """Verify pytest-bdd optional behavior.""" + + def test_bdd_off_no_features_dir(self, output_dir: Path) -> None: + """When pytest-bdd is off, tests/features/ does not exist.""" + project = bake(output_dir, with_pytest_bdd="0") + assert not (project / "tests" / "features").exists() + + def test_bdd_off_no_dep(self, output_dir: Path) -> None: + """When pytest-bdd is off, pytest-bdd is not in dependencies.""" + project = bake(output_dir, with_pytest_bdd="0") + content = (project / "pyproject.toml").read_text() + assert "pytest-bdd" not in content + + def test_bdd_off_plain_tests(self, output_dir: Path) -> None: + """When pytest-bdd is off, test files use plain pytest.""" + project = bake(output_dir, with_pytest_bdd="0") + test_import = (project / "tests" / "test_import.py").read_text() + assert "def test_import" in test_import + assert "pytest_bdd" not in test_import + + def test_bdd_on_features_dir(self, output_dir: Path) -> None: + """When pytest-bdd is on, tests/features/ exists.""" + project = bake(output_dir, with_pytest_bdd="1") + assert (project / "tests" / "features").is_dir() + assert (project / "tests" / "features" / "import.feature").is_file() + + def test_bdd_on_dep_present(self, output_dir: Path) -> None: + """When pytest-bdd is on, pytest-bdd is in dependencies.""" + project = bake(output_dir, with_pytest_bdd="1") + content = (project / "pyproject.toml").read_text() + assert "pytest-bdd" in content + + def test_bdd_on_bdd_tests(self, output_dir: Path) -> None: + """When pytest-bdd is on, test files use BDD style.""" + project = bake(output_dir, with_pytest_bdd="1") + test_import = (project / "tests" / "test_import.py").read_text() + assert "from pytest_bdd import" in test_import + assert "scenarios(" in test_import + + +# --------------------------------------------------------------------------- +# FastAPI / Typer toggle tests +# --------------------------------------------------------------------------- + + +class TestFastapiToggle: + """Verify with_fastapi_api parameter behavior.""" + + def test_fastapi_on(self, output_dir: Path) -> None: + """FastAPI files are present when enabled.""" + project = bake(output_dir, with_fastapi_api="1") + assert (project / "src" / "test_project" / "api.py").is_file() + assert (project / "src" / "test_project" / "models.py").is_file() + assert (project / "src" / "test_project" / "services.py").is_file() + assert (project / "tests" / "test_api.py").is_file() + + def test_fastapi_off(self, output_dir: Path) -> None: + """FastAPI files are absent when disabled.""" + project = bake(output_dir, with_fastapi_api="0") + assert not (project / "src" / "test_project" / "api.py").exists() + assert not (project / "src" / "test_project" / "models.py").exists() + assert not (project / "tests" / "test_api.py").exists() + + def test_fastapi_deps(self, output_dir: Path) -> None: + """FastAPI dependency is in pyproject.toml when enabled.""" + project = bake(output_dir, with_fastapi_api="1") + content = (project / "pyproject.toml").read_text() + assert "fastapi" in content + assert "uvicorn" in content + assert "gunicorn" in content + + +class TestTyperToggle: + """Verify with_typer_cli parameter behavior.""" + + def test_typer_on(self, output_dir: Path) -> None: + """Typer CLI files are present when enabled.""" + project = bake(output_dir, with_typer_cli="1") + assert (project / "src" / "test_project" / "cli.py").is_file() + assert (project / "tests" / "test_cli.py").is_file() + + def test_typer_off(self, output_dir: Path) -> None: + """Typer CLI files are absent when disabled.""" + project = bake(output_dir, with_typer_cli="0") + assert not (project / "src" / "test_project" / "cli.py").exists() + assert not (project / "tests" / "test_cli.py").exists() + + +# --------------------------------------------------------------------------- +# Sentry tests +# --------------------------------------------------------------------------- + + +class TestSentry: + """Verify with_sentry parameter behavior.""" + + def test_sentry_on(self, output_dir: Path) -> None: + """Sentry dependency is present when enabled.""" + project = bake(output_dir, with_sentry="1", with_fastapi_api="1") + content = (project / "pyproject.toml").read_text() + assert "sentry-sdk" in content + + def test_sentry_off(self, output_dir: Path) -> None: + """Sentry dependency is absent when disabled.""" + project = bake(output_dir, with_sentry="0") + content = (project / "pyproject.toml").read_text() + assert "sentry-sdk" not in content + + +# --------------------------------------------------------------------------- +# Development environment tests +# --------------------------------------------------------------------------- + + +class TestDevelopmentEnvironment: + """Verify strict vs simple mode.""" + + def test_strict_has_dependabot(self, output_dir: Path) -> None: + """Strict mode includes dependabot.yml.""" + project = bake(output_dir, development_environment="strict") + assert (project / ".github" / "dependabot.yml").is_file() + + def test_simple_no_dependabot(self, output_dir: Path) -> None: + """Simple mode removes dependabot.yml.""" + project = bake(output_dir, development_environment="simple") + assert not (project / ".github" / "dependabot.yml").exists() + + def test_strict_has_safety(self, output_dir: Path) -> None: + """Strict mode includes safety in dependencies.""" + project = bake(output_dir, development_environment="strict") + content = (project / "pyproject.toml").read_text() + assert "safety" in content + + def test_simple_no_safety(self, output_dir: Path) -> None: + """Simple mode does not include safety.""" + project = bake(output_dir, development_environment="simple") + content = (project / "pyproject.toml").read_text() + assert "safety" not in content + + +# --------------------------------------------------------------------------- +# Dockerfile tests +# --------------------------------------------------------------------------- + + +class TestDockerfile: + """Verify Dockerfile structure.""" + + def test_three_stages(self, output_dir: Path) -> None: + """Dockerfile has exactly 3 stages: base, dev, app.""" + project = bake(output_dir) + content = (project / "Dockerfile").read_text() + from_lines = [l.strip() for l in content.splitlines() if l.startswith("FROM")] + assert len(from_lines) == 3 + assert "AS base" in from_lines[0] + assert "AS dev" in from_lines[1] + assert "AS app" in from_lines[2] + + def test_poetry_version(self, output_dir: Path) -> None: + """Dockerfile uses Poetry 2.3.x.""" + project = bake(output_dir) + content = (project / "Dockerfile").read_text() + assert "POETRY_VERSION=2.3." in content + + def test_healthcheck_with_fastapi(self, output_dir: Path) -> None: + """Dockerfile has HEALTHCHECK when FastAPI is enabled.""" + project = bake(output_dir, with_fastapi_api="1") + content = (project / "Dockerfile").read_text() + assert "HEALTHCHECK" in content + + def test_no_healthcheck_without_fastapi(self, output_dir: Path) -> None: + """Dockerfile has no HEALTHCHECK when FastAPI is disabled.""" + project = bake(output_dir, with_fastapi_api="0") + content = (project / "Dockerfile").read_text() + assert "HEALTHCHECK" not in content + + +# --------------------------------------------------------------------------- +# CI workflow tests +# --------------------------------------------------------------------------- + + +class TestCIWorkflow: + """Verify CI workflow structure.""" + + def test_workflow_valid_yaml(self, output_dir: Path) -> None: + """CI workflow is valid YAML.""" + import yaml + + project = bake(output_dir) + content = (project / ".github" / "workflows" / "test.yml").read_text() + parsed = yaml.safe_load(content) + assert parsed["name"] == "Test" + assert "test" in parsed["jobs"] + + def test_docker_cache_step(self, output_dir: Path) -> None: + """CI workflow has Docker layer caching.""" + project = bake(output_dir) + content = (project / ".github" / "workflows" / "test.yml").read_text() + assert "actions/cache@v4" in content + assert "setup-buildx-action" in content + + +# --------------------------------------------------------------------------- +# CLI tests +# --------------------------------------------------------------------------- + + +class TestCLI: + """Verify CLI stub content.""" + + def test_cli_has_info_command(self, output_dir: Path) -> None: + """CLI has info command.""" + project = bake(output_dir, with_typer_cli="1") + content = (project / "src" / "test_project" / "cli.py").read_text() + assert "def info(" in content + + def test_cli_has_config_command(self, output_dir: Path) -> None: + """CLI has config command.""" + project = bake(output_dir, with_typer_cli="1") + content = (project / "src" / "test_project" / "cli.py").read_text() + assert "def config(" in content + + def test_cli_has_health_with_fastapi(self, output_dir: Path) -> None: + """CLI has health command when FastAPI is enabled.""" + project = bake(output_dir, with_typer_cli="1", with_fastapi_api="1") + content = (project / "src" / "test_project" / "cli.py").read_text() + assert "def health(" in content + + def test_cli_no_health_without_fastapi(self, output_dir: Path) -> None: + """CLI has no health command when FastAPI is disabled.""" + project = bake(output_dir, with_typer_cli="1", with_fastapi_api="0") + content = (project / "src" / "test_project" / "cli.py").read_text() + assert "def health(" not in content + + def test_cli_no_greet_command(self, output_dir: Path) -> None: + """CLI does not have the old greet command.""" + project = bake(output_dir, with_typer_cli="1") + content = (project / "src" / "test_project" / "cli.py").read_text() + assert "def greet(" not in content + + +# --------------------------------------------------------------------------- +# ADR / docs tests +# --------------------------------------------------------------------------- + + +class TestDocs: + """Verify documentation files.""" + + def test_adr_template_exists(self, output_dir: Path) -> None: + """ADR template exists.""" + project = bake(output_dir) + assert (project / "docs" / "decisions" / "adr_template.md").is_file() + + def test_first_adr_exists(self, output_dir: Path) -> None: + """First ADR (0001) exists.""" + project = bake(output_dir) + adr = project / "docs" / "decisions" / "0001-record-architecture-decisions.md" + assert adr.is_file() + assert "Record architecture decisions" in adr.read_text() + + def test_readme_has_docs_section(self, output_dir: Path) -> None: + """Generated README has Documentation section.""" + project = bake(output_dir) + content = (project / "README.md").read_text() + assert "## Documentation" in content + assert "Architecture Decision Records" in content + + +# --------------------------------------------------------------------------- +# Poe tasks tests +# --------------------------------------------------------------------------- + + +class TestPoeTasks: + """Verify poe tasks in pyproject.toml.""" + + def test_poe_update_task(self, output_dir: Path) -> None: + """poe update task is present.""" + project = bake(output_dir) + content = (project / "pyproject.toml").read_text() + assert "[tool.poe.tasks.update]" in content + assert "cruft update" in content + + def test_poe_lint_task(self, output_dir: Path) -> None: + """poe lint task is present.""" + project = bake(output_dir) + content = (project / "pyproject.toml").read_text() + assert "[tool.poe.tasks.lint]" in content + + def test_poe_test_task(self, output_dir: Path) -> None: + """poe test task is present.""" + project = bake(output_dir) + content = (project / "pyproject.toml").read_text() + assert "[tool.poe.tasks.test]" in content + + +# --------------------------------------------------------------------------- +# Full combination matrix tests +# --------------------------------------------------------------------------- + + +class TestCombinations: + """Test specific combinations that are likely to cause issues.""" + + def test_minimal_no_api_no_cli_no_bdd(self, output_dir: Path) -> None: + """Minimal project: no FastAPI, no Typer, no BDD.""" + project = bake( + output_dir, + with_fastapi_api="0", + with_typer_cli="0", + with_pytest_bdd="0", + with_sentry="0", + development_environment="simple", + ) + assert project.is_dir() + assert (project / "pyproject.toml").is_file() + assert (project / "tests" / "test_import.py").is_file() + # No feature files, no API/CLI tests + assert not (project / "tests" / "features").exists() + assert not (project / "tests" / "test_api.py").exists() + assert not (project / "tests" / "test_cli.py").exists() + + def test_full_everything_enabled(self, output_dir: Path) -> None: + """Full project: all options enabled.""" + project = bake( + output_dir, + license="MIT", + with_fastapi_api="1", + with_typer_cli="1", + with_pytest_bdd="1", + with_sentry="1", + development_environment="strict", + ) + assert project.is_dir() + assert (project / "LICENSE").is_file() + assert (project / "tests" / "features").is_dir() + assert (project / "tests" / "test_api.py").is_file() + assert (project / "tests" / "test_cli.py").is_file() + content = (project / "pyproject.toml").read_text() + assert "pytest-bdd" in content + assert "sentry-sdk" in content + assert "commitizen" in content + + def test_bdd_on_but_no_fastapi(self, output_dir: Path) -> None: + """BDD on but no FastAPI: api.feature should not exist.""" + project = bake( + output_dir, + with_fastapi_api="0", + with_typer_cli="1", + with_pytest_bdd="1", + ) + assert (project / "tests" / "features" / "import.feature").is_file() + assert (project / "tests" / "features" / "cli.feature").is_file() + assert not (project / "tests" / "features" / "api.feature").exists() + + def test_bdd_on_but_no_typer(self, output_dir: Path) -> None: + """BDD on but no Typer: cli.feature should not exist.""" + project = bake( + output_dir, + with_fastapi_api="1", + with_typer_cli="0", + with_pytest_bdd="1", + ) + assert (project / "tests" / "features" / "import.feature").is_file() + assert (project / "tests" / "features" / "api.feature").is_file() + assert not (project / "tests" / "features" / "cli.feature").exists() + + def test_pyproject_toml_valid_toml(self, output_dir: Path) -> None: + """Generated pyproject.toml is valid TOML.""" + import tomllib + + project = bake(output_dir) + content = (project / "pyproject.toml").read_bytes() + parsed = tomllib.loads(content.decode()) + assert "tool" in parsed + assert "poetry" in parsed["tool"] + + +# --------------------------------------------------------------------------- +# Sprint 4: codespell tests +# --------------------------------------------------------------------------- + + +class TestCodespell: + """Verify codespell hook and configuration.""" + + def test_codespell_in_pre_commit(self, output_dir: Path) -> None: + """codespell hook is present in pre-commit config.""" + project = bake(output_dir) + content = (project / ".pre-commit-config.yaml").read_text() + assert "id: codespell" in content + assert "entry: codespell" in content + + def test_codespell_dep_in_pyproject(self, output_dir: Path) -> None: + """codespell dependency is in test dependencies.""" + project = bake(output_dir) + content = (project / "pyproject.toml").read_text() + assert 'codespell = ">=2.4.0"' in content + + def test_codespell_config_in_pyproject(self, output_dir: Path) -> None: + """codespell configuration section exists in pyproject.toml.""" + import tomllib + + project = bake(output_dir) + parsed = tomllib.loads((project / "pyproject.toml").read_bytes().decode()) + assert "codespell" in parsed["tool"] + assert parsed["tool"]["codespell"]["check-filenames"] is True + + +# --------------------------------------------------------------------------- +# Sprint 4: PR title check workflow tests +# --------------------------------------------------------------------------- + + +class TestPRWorkflow: + """Verify PR title conventional commit check workflow.""" + + def test_pr_yml_exists_with_conventional_commits(self, output_dir: Path) -> None: + """pr.yml exists when conventional commits is enabled.""" + project = bake(output_dir, development_environment="strict") + assert (project / ".github" / "workflows" / "pr.yml").is_file() + + def test_pr_yml_absent_without_conventional_commits(self, output_dir: Path) -> None: + """pr.yml does not exist when conventional commits is disabled.""" + project = bake( + output_dir, + development_environment="simple", + with_conventional_commits="0", + ) + assert not (project / ".github" / "workflows" / "pr.yml").exists() + + def test_pr_yml_has_commitizen(self, output_dir: Path) -> None: + """pr.yml uses commitizen to check PR title.""" + project = bake(output_dir, development_environment="strict") + content = (project / ".github" / "workflows" / "pr.yml").read_text() + assert "commitizen" in content + assert "cz check" in content + + def test_pr_yml_valid_yaml(self, output_dir: Path) -> None: + """pr.yml is valid YAML.""" + import yaml + + project = bake(output_dir, development_environment="strict") + content = (project / ".github" / "workflows" / "pr.yml").read_text() + parsed = yaml.safe_load(content) + assert parsed["name"] == "PR" + assert "title" in parsed["jobs"] + + +# --------------------------------------------------------------------------- +# Sprint 4: actions/checkout v6 tests +# --------------------------------------------------------------------------- + + +class TestCheckoutVersion: + """Verify actions/checkout version bump.""" + + def test_checkout_v6(self, output_dir: Path) -> None: + """test.yml uses actions/checkout@v6.""" + project = bake(output_dir) + content = (project / ".github" / "workflows" / "test.yml").read_text() + assert "actions/checkout@v6" in content + assert "actions/checkout@v5" not in content + + +# --------------------------------------------------------------------------- +# Sprint 4: MkDocs Material tests +# --------------------------------------------------------------------------- + + +class TestMkDocs: + """Verify MkDocs Material replaces pdoc.""" + + def test_mkdocs_yml_exists(self, output_dir: Path) -> None: + """mkdocs.yml is generated.""" + project = bake(output_dir) + assert (project / "mkdocs.yml").is_file() + + def test_mkdocs_yml_has_project_name(self, output_dir: Path) -> None: + """mkdocs.yml contains the project name.""" + project = bake(output_dir) + content = (project / "mkdocs.yml").read_text() + assert "test-project" in content + + def test_mkdocs_material_dep(self, output_dir: Path) -> None: + """mkdocs-material is in dev dependencies.""" + project = bake(output_dir) + content = (project / "pyproject.toml").read_text() + assert "mkdocs-material" in content + + def test_pdoc_absent(self, output_dir: Path) -> None: + """pdoc is not in dependencies.""" + project = bake(output_dir) + content = (project / "pyproject.toml").read_text() + assert "pdoc" not in content + + def test_poe_docs_uses_mkdocs(self, output_dir: Path) -> None: + """poe docs task uses mkdocs.""" + project = bake(output_dir) + content = (project / "pyproject.toml").read_text() + assert "mkdocs" in content + assert "[tool.poe.tasks.docs]" in content + assert "--serve" in content + + def test_docs_index_md_exists(self, output_dir: Path) -> None: + """docs/index.md is generated.""" + project = bake(output_dir) + assert (project / "docs" / "index.md").is_file() + + +# --------------------------------------------------------------------------- +# CLAUDE.md tests +# --------------------------------------------------------------------------- + + +class TestClaudeMd: + """Verify CLAUDE.md is generated with correct content.""" + + def test_claude_md_exists(self, output_dir: Path) -> None: + """CLAUDE.md is generated.""" + project = bake(output_dir) + assert (project / "CLAUDE.md").is_file() + + def test_claude_md_has_project_name(self, output_dir: Path) -> None: + """CLAUDE.md contains the project name.""" + project = bake(output_dir) + content = (project / "CLAUDE.md").read_text() + assert "test-project" in content + + def test_claude_md_has_snake_case_import(self, output_dir: Path) -> None: + """CLAUDE.md references the correct import path.""" + project = bake(output_dir) + content = (project / "CLAUDE.md").read_text() + assert "test_project" in content + + def test_claude_md_has_fastapi_when_enabled(self, output_dir: Path) -> None: + """CLAUDE.md mentions FastAPI when enabled.""" + project = bake(output_dir, with_fastapi_api="1") + content = (project / "CLAUDE.md").read_text() + assert "FastAPI" in content + assert "poe api" in content + + def test_claude_md_no_fastapi_when_disabled(self, output_dir: Path) -> None: + """CLAUDE.md does not mention poe api when FastAPI is disabled.""" + project = bake(output_dir, with_fastapi_api="0") + content = (project / "CLAUDE.md").read_text() + assert "poe api" not in content + + +# --------------------------------------------------------------------------- +# README conditionalization tests +# --------------------------------------------------------------------------- + + +class TestReadmeConditional: + """Verify README sections are conditionalized correctly.""" + + def test_readme_has_api_section_with_fastapi(self, output_dir: Path) -> None: + """README has API section when FastAPI is enabled.""" + project = bake(output_dir, with_fastapi_api="1") + content = (project / "README.md").read_text() + assert "poe api" in content + + def test_readme_no_api_section_without_fastapi(self, output_dir: Path) -> None: + """README has no API section when FastAPI is disabled.""" + project = bake(output_dir, with_fastapi_api="0") + content = (project / "README.md").read_text() + assert "poe api" not in content + + def test_readme_has_cli_section_with_typer(self, output_dir: Path) -> None: + """README has CLI section when Typer is enabled.""" + project = bake(output_dir, with_typer_cli="1") + content = (project / "README.md").read_text() + assert "### CLI" in content + + def test_readme_no_cli_section_without_typer(self, output_dir: Path) -> None: + """README has no CLI section when Typer is disabled.""" + project = bake(output_dir, with_typer_cli="0") + content = (project / "README.md").read_text() + assert "### CLI" not in content + + def test_readme_has_env_example(self, output_dir: Path) -> None: + """README references .env.example (not .env.sample).""" + project = bake(output_dir) + content = (project / "README.md").read_text() + assert ".env.example" in content + assert ".env.sample" not in content + + def test_readme_has_mkdocs(self, output_dir: Path) -> None: + """README references MkDocs.""" + project = bake(output_dir) + content = (project / "README.md").read_text() + assert "poe docs" in content diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.devcontainer/devcontainer.json b/{{ cookiecutter.__project_name_kebab_case }}/.devcontainer/devcontainer.json index 0b4dd7e2..fa70925d 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/.devcontainer/devcontainer.json +++ b/{{ cookiecutter.__project_name_kebab_case }}/.devcontainer/devcontainer.json @@ -10,18 +10,16 @@ "vscode": { "extensions": [ "charliermarsh.ruff", - {%- if cookiecutter.continuous_integration == "GitHub" %} "GitHub.vscode-github-actions", "GitHub.vscode-pull-request-github", - {%- elif cookiecutter.continuous_integration == "GitLab" %} - "GitLab.gitlab-workflow", - {%- endif %} "ms-python.mypy-type-checker", "ms-python.python", "ms-toolsai.jupyter", "ryanluker.vscode-coverage-gutters", "tamasfe.even-better-toml", - "visualstudioexptteam.vscodeintellicode" + "visualstudioexptteam.vscodeintellicode", + "GitHub.copilot", + "GitHub.copilot-chat" ], "settings": { "coverage-gutters.coverageFileNames": [ @@ -39,11 +37,14 @@ "editor.formatOnSave": false }, "editor.rulers": [ - 100 + 99 ], "files.autoSave": "onFocusChange", - "jupyter.kernels.excludePythonEnvironments": ["/usr/local/bin/python"], + "jupyter.kernels.excludePythonEnvironments": [ + "/usr/local/bin/python" + ], "mypy-type-checker.importStrategy": "fromEnvironment", + "mypy-type-checker.preferDaemon": true, "notebook.codeActionsOnSave": { "notebook.source.fixAll": "explicit", "notebook.source.organizeImports": "explicit" @@ -53,9 +54,7 @@ "python.terminal.activateEnvironment": false, "python.testing.pytestEnabled": true, "ruff.importStrategy": "fromEnvironment", - {%- if cookiecutter.development_environment == "strict" %} - "ruff.logLevel": "warn", - {%- endif %} + "ruff.logLevel": "warning", "terminal.integrated.defaultProfile.linux": "zsh", "terminal.integrated.profiles.linux": { "zsh": { @@ -65,4 +64,4 @@ } } } -} \ No newline at end of file +} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.editorconfig b/{{ cookiecutter.__project_name_kebab_case }}/.editorconfig new file mode 100644 index 00000000..f9eca780 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/.editorconfig @@ -0,0 +1,29 @@ +# https://editorconfig.org +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*.py] +indent_style = space +indent_size = 4 +max_line_length = 99 + +[*.{yml,yaml,json,toml,css,html,js,jsx,ts,tsx}] +indent_style = space +indent_size = 2 + +[Makefile] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +[*.sh] +end_of_line = lf + +[*.bat] +end_of_line = crlf diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.env.example b/{{ cookiecutter.__project_name_kebab_case }}/.env.example new file mode 100644 index 00000000..80056188 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/.env.example @@ -0,0 +1,17 @@ +# --- General --- +APP_NAME={{ cookiecutter.project_name }} +LOG_LEVEL=INFO +DEBUG=false +{%- if cookiecutter.with_fastapi_api|int %} + +# --- FastAPI --- +API_HOST=0.0.0.0 +API_PORT=8000 +{%- endif %} +{%- if cookiecutter.with_sentry|int %} + +# --- Sentry --- +SENTRY_DSN= +SENTRY_ENVIRONMENT=development +SENTRY_TRACES_SAMPLE_RATE=0.1 +{%- endif %} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.gitattributes b/{{ cookiecutter.__project_name_kebab_case }}/.gitattributes new file mode 100644 index 00000000..219e1432 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/.gitattributes @@ -0,0 +1,28 @@ +# Auto detect text files and normalize line endings +* text=auto + +# Force LF for text files +*.py text eol=lf +*.sh text eol=lf +*.yml text eol=lf +*.yaml text eol=lf +*.md text eol=lf +*.txt text eol=lf +*.json text eol=lf +*.toml text eol=lf +Dockerfile text eol=lf + +# Windows scripts keep CRLF +*.ps1 text eol=crlf +*.bat text eol=crlf + +# Binary files +*.png binary +*.jpg binary +*.jpeg binary +*.gif binary +*.ico binary +*.zip binary +*.gz binary +*.tar binary +*.whl binary diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.github/dependabot.yml b/{{ cookiecutter.__project_name_kebab_case }}/.github/dependabot.yml index b402007e..7c976899 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/.github/dependabot.yml +++ b/{{ cookiecutter.__project_name_kebab_case }}/.github/dependabot.yml @@ -12,7 +12,15 @@ updates: groups: ci-dependencies: patterns: - - "*" + - "*" + update-types: + - "minor" + - "patch" + ci-major-updates: + patterns: + - "*" + update-types: + - "major" - package-ecosystem: pip directory: / schedule: @@ -22,15 +30,22 @@ updates: prefix-development: "build" include: scope allow: - {%- if cookiecutter.project_type == "app" %} - dependency-type: production - {%- endif %} - dependency-type: development versioning-strategy: increase groups: - {%- if cookiecutter.project_type == "app" %} runtime-dependencies: dependency-type: production - {%- endif %} development-dependencies: dependency-type: development + dependencies: + patterns: + - "*" + update-types: + - "minor" + - "patch" + major-updates: + patterns: + - "*" + update-types: + - "major" diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.github/pull_request_template.md b/{{ cookiecutter.__project_name_kebab_case }}/.github/pull_request_template.md new file mode 100644 index 00000000..322cc558 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/.github/pull_request_template.md @@ -0,0 +1,14 @@ +## Describe your changes + +## Checklist before opening a pull request +- [ ] I have merged or rebased main into my branch. I also ensure the origin of the PR is from main. +- [ ] I have run pytest and the tests pass. + +## Checklist before requesting a review +- [ ] I have performed a self-review of my code. +- [ ] I have added thorough tests. +- [ ] I checked that all docstrings and documentations are complete and up to date. + +## Checklist before merging into main +- [ ] I have bumped the version if needed. +- [ ] I pushed the tag to the repository. diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/deploy.yml b/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/deploy.yml deleted file mode 100644 index 8771a41c..00000000 --- a/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/deploy.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: Deploy - -on: - push: - tags: - - "v*.*.*" - workflow_dispatch: - inputs: - environment: - required: true - description: Deployment environment - default: development - type: choice - options: - - feature - - development - - test - - acceptance - - production - -env: - DEFAULT_DEPLOYMENT_ENVIRONMENT: feature - DOCKER_REGISTRY: ghcr.io - -jobs: - deploy: - runs-on: ubuntu-latest - - if: startsWith(github.ref, 'refs/tags/v') - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Log in to the Docker registry - uses: docker/login-action@v3 - with: - registry: {% raw %}${{ env.DOCKER_REGISTRY }}{% endraw %} - username: {% raw %}${{ github.actor }}{% endraw %} - password: {% raw %}${{ secrets.GITHUB_TOKEN }}{% endraw %} - - - name: Set Docker image tag - run: echo "GIT_TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - - name: Build and push Docker image - uses: docker/build-push-action@v5 - with: - context: . - push: true - {%- if cookiecutter.private_package_repository_name %} - secrets: | - "poetry_auth=[http-basic.{{ cookiecutter.private_package_repository_name|slugify }}] - username = ""{% raw %}${{{% endraw %} secrets.POETRY_HTTP_BASIC_{{ cookiecutter.private_package_repository_name|slugify(separator="_")|upper }}_USERNAME }}"" - password = ""{% raw %}${{{% endraw %} secrets.POETRY_HTTP_BASIC_{{ cookiecutter.private_package_repository_name|slugify(separator="_")|upper }}_PASSWORD }}"" - " - {%- endif %} - tags: | - {% raw %}${{ env.DOCKER_REGISTRY }}/${{ github.repository_owner }}/${{ github.repository }}:${{ github.event.inputs.environment || env.DEFAULT_DEPLOYMENT_ENVIRONMENT }}{% endraw %} - {% raw %}${{ env.DOCKER_REGISTRY }}/${{ github.repository_owner }}/${{ github.repository }}:${{ env.GIT_TAG }}{% endraw %} - target: app diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/pr.yml b/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/pr.yml new file mode 100644 index 00000000..ae833c2e --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/pr.yml @@ -0,0 +1,22 @@ +{%- if cookiecutter.with_conventional_commits|int %} +name: PR + +on: + pull_request: + types: [edited, opened, reopened, synchronize] + +jobs: + title: + runs-on: ubuntu-latest + name: Check PR title + steps: + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "{{ cookiecutter.python_version }}" + + - name: Check PR title + run: | + pip install commitizen + cz check --message "{% raw %}${{ github.event.pull_request.title }}{% endraw %}" +{%- endif %} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/publish.yml b/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/publish.yml deleted file mode 100644 index 503f9f6c..00000000 --- a/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/publish.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: Publish - -on: - release: - types: - - created - -jobs: - publish: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "{{ cookiecutter.python_version }}" - - - name: Install Poetry - run: pip install --no-input poetry - - - name: Publish package - run: | - {%- if cookiecutter.private_package_repository_name %} - poetry config repositories.private "{{ cookiecutter.private_package_repository_url.replace('simple/', '').replace('simple', '') }}" - poetry config http-basic.private "{% raw %}${{{% endraw %} secrets.POETRY_HTTP_BASIC_{{ cookiecutter.private_package_repository_name|slugify(separator="_")|upper }}_USERNAME }}" "{% raw %}${{{% endraw %} secrets.POETRY_HTTP_BASIC_{{ cookiecutter.private_package_repository_name|slugify(separator="_")|upper }}_PASSWORD }}" - poetry publish --build --repository private - {%- else %} - poetry config pypi-token.pypi "{% raw %}${{ secrets.POETRY_PYPI_TOKEN_PYPI }}{% endraw %}" - poetry publish --build - {%- endif %} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/test.yml b/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/test.yml index 2d5e58d6..d7e9ee36 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/test.yml +++ b/{{ cookiecutter.__project_name_kebab_case }}/.github/workflows/test.yml @@ -6,6 +6,7 @@ on: - main - master pull_request: + workflow_dispatch: jobs: test: @@ -14,26 +15,37 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["{{ cookiecutter.python_version }}"] + python-version: ["{{ cookiecutter.python_version }}"{% if cookiecutter.python_version != "3.13" %}, "3.13"{% endif %}] name: Python {% raw %}${{{% endraw %} matrix.python-version }} steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up Node.js uses: actions/setup-node@v4 with: - node-version: 21 + node-version: 22 - name: Install @devcontainers/cli - run: npm install --location=global @devcontainers/cli@0.58.0 + run: npm install --location=global @devcontainers/cli@latest + + - name: Cache Docker layers + uses: actions/cache@v4 + with: + path: /tmp/.buildx-cache + key: {% raw %}${{{% endraw %} runner.os }}-docker-{% raw %}${{{% endraw %} hashFiles('**/poetry.lock') }} + restore-keys: | + {% raw %}${{{% endraw %} runner.os }}-docker- + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 - name: Start Dev Container run: | git config --global init.defaultBranch main - PYTHON_VERSION={% raw %}${{{% endraw %} matrix.python-version }} devcontainer up --workspace-folder . + PYTHON_VERSION={% raw %}${{{% endraw %} matrix.python-version }} BUILDKIT_INLINE_CACHE=1 devcontainer up --workspace-folder . - name: Lint {{ cookiecutter.project_type }} run: devcontainer exec --workspace-folder . poe lint @@ -41,7 +53,9 @@ jobs: - name: Test {{ cookiecutter.project_type }} run: devcontainer exec --workspace-folder . poe test - - name: Upload coverage - uses: codecov/codecov-action@v4 + - name: Upload Coverage Report + uses: actions/upload-artifact@v4 with: - files: reports/coverage.xml + path: reports/htmlcov/ + name: coverage-report-{% raw %}${{{% endraw %} matrix.python-version }} + retention-days: 7 diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.gitignore b/{{ cookiecutter.__project_name_kebab_case }}/.gitignore index 3b903be6..e9050843 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/.gitignore +++ b/{{ cookiecutter.__project_name_kebab_case }}/.gitignore @@ -61,4 +61,5 @@ __pycache__/ .terraform/ # VS Code -.vscode/ +.vscode/* +!.vscode/launch.json diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.gitlab-ci.yml b/{{ cookiecutter.__project_name_kebab_case }}/.gitlab-ci.yml deleted file mode 100644 index cebce889..00000000 --- a/{{ cookiecutter.__project_name_kebab_case }}/.gitlab-ci.yml +++ /dev/null @@ -1,148 +0,0 @@ -stages: - - build - - test - - {% if cookiecutter.project_type == "package" %}publish{% else %}deploy{% endif %} - -variables: - DOCKER_TLS_CERTDIR: "/certs" - -.python_matrix: - parallel: - matrix: - - PYTHON_VERSION: ["{{ cookiecutter.python_version }}"] - -.install_devcontainers_cli: - cache: - paths: - - .apk_cache - - .npm_cache - before_script: - - mkdir -p .apk_cache && apk add --cache-dir .apk_cache npm - - npm install --cache .npm_cache --global --prefer-offline @devcontainers/cli@0.58.0 - -# Build the Dev Container. -Build: - extends: - - .python_matrix - - .install_devcontainers_cli - stage: build - image: docker:latest - services: - - docker:dind - script: - - | - # Log in to the Docker registry. - echo "$CI_REGISTRY_PASSWORD" | docker login --username "$CI_REGISTRY_USER" --password-stdin "$CI_REGISTRY" - - # Compute a hash for the Dev Container image. - export CI_IMAGE_SHA="$(sha1sum Dockerfile poetry.lock pyproject.toml | sha1sum | cut -c 1-8)" - echo "CI_IMAGE_SHA=$CI_IMAGE_SHA" >> .env - - # Build and push the Dev Container image, unless it already exists. - IMAGE_NAME="$CI_REGISTRY_IMAGE/devcontainer:$PYTHON_VERSION-$CI_IMAGE_SHA" - IMAGE_EXISTS=${IMAGE_EXISTS:-$(timeout 2s docker pull "$IMAGE_NAME" >/dev/null 2>&1 && echo $? || echo $?)} - if [ "$IMAGE_EXISTS" -ne 1 ]; then - echo "$IMAGE_NAME exists, skipping this job..." - else - {%- if cookiecutter.private_package_repository_name %} - echo "[http-basic.{{ cookiecutter.private_package_repository_name|slugify }}]" >> auth.toml - echo "username = \"gitlab-ci-token\"" >> auth.toml - echo "password = \"$CI_JOB_TOKEN\"" >> auth.toml - export $POETRY_AUTH_TOML_PATH=$(pwd)/auth.toml - {%- endif %} - devcontainer build --image-name "$IMAGE_NAME" --workspace-folder . - docker push "$IMAGE_NAME" - fi - artifacts: - reports: - dotenv: .env - -# Lint and test the {{ cookiecutter.project_type }}. -Test: - extends: - - .python_matrix - - .install_devcontainers_cli - stage: test - image: docker:latest - services: - - docker:dind - script: - - | - devcontainer up --cache-from "type=registry,ref=$CI_REGISTRY_IMAGE/devcontainer:$PYTHON_VERSION-$CI_IMAGE_SHA" --workspace-folder . - devcontainer exec --workspace-folder . git config --global --add safe.directory /workspaces/{{ cookiecutter.__project_name_kebab_case }} - devcontainer exec --workspace-folder . poe lint - devcontainer exec --workspace-folder . poe test - coverage: '/^TOTAL.*\s+(\d+(?:\.\d+)?)%/' - artifacts: - reports: - coverage_report: - coverage_format: cobertura - path: reports/coverage.xml - junit: - - reports/mypy.xml - - reports/pytest.xml - untracked: true - when: always - -{% if cookiecutter.project_type == "package" -%} -# Publish this package version to {% if cookiecutter.private_package_repository_name %}a private package repository{% else %}PyPI{% endif %}. -Publish: - stage: publish - image: $CI_REGISTRY_IMAGE/devcontainer:{{ cookiecutter.python_version }}-$CI_IMAGE_SHA - script: - {%- if cookiecutter.private_package_repository_name %} - - poetry config repositories.private "{{ cookiecutter.private_package_repository_url.replace('simple/', '').replace('simple', '') }}" - - poetry config http-basic.private "gitlab-ci-token" "$CI_JOB_TOKEN" - - poetry publish --build --repository private - {%- else %} - - poetry config pypi-token.pypi "$POETRY_PYPI_TOKEN_PYPI" - - poetry publish --build - {%- endif %} - only: - - tags -{%- else -%} -# Deploy the app to the Docker registry. -Deploy: - stage: deploy - image: docker:latest - services: - - docker:dind - script: - - | - # Log in to the Docker registry. - echo "$CI_REGISTRY_PASSWORD" | docker login --username "$CI_REGISTRY_USER" --password-stdin "$CI_REGISTRY" - - # Compile a list of tags for the image. - DOCKER_TAGS="" - if [ "$CI_COMMIT_BRANCH" = "$CI_DEFAULT_BRANCH" ]; then DOCKER_TAGS="$DOCKER_TAGS latest"; fi - if [ -n "$CI_COMMIT_TAG" ]; then DOCKER_TAGS="$DOCKER_TAGS $CI_COMMIT_TAG"; fi - if [ -n "$CI_ENVIRONMENT_NAME" ]; then DOCKER_TAGS="$DOCKER_TAGS $CI_ENVIRONMENT_NAME"; fi - DOCKER_TAGS_JOINED="" - for DOCKER_TAG in $DOCKER_TAGS; do - DOCKER_TAGS_JOINED="$DOCKER_TAGS_JOINED --tag $CI_REGISTRY_IMAGE:$DOCKER_TAG" - done - - # Build the app image. - {%- if cookiecutter.private_package_repository_name %} - echo "[http-basic.{{ cookiecutter.private_package_repository_name|slugify }}]" >> auth.toml - echo "username = \"gitlab-ci-token\"" >> auth.toml - echo "password = \"$CI_JOB_TOKEN\"" >> auth.toml - {%- endif %} - docker build \ - --cache-from "type=registry,ref=$CI_REGISTRY_IMAGE/devcontainer:{{ cookiecutter.python_version }}-$CI_IMAGE_SHA" \ - --pull \ - {%- if cookiecutter.private_package_repository_name %} - --secret id=poetry-auth,src=auth.toml \ - {%- endif %} - --target app \ - $DOCKER_TAGS_JOINED \ - . - - # Push the tags to the Docker registry. - for DOCKER_TAG in $DOCKER_TAGS; do - docker push "$CI_REGISTRY_IMAGE:$DOCKER_TAG" - done - only: - - tags - when: manual -{%- endif %} \ No newline at end of file diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.pre-commit-config.yaml b/{{ cookiecutter.__project_name_kebab_case }}/.pre-commit-config.yaml index 3da7ccef..a1c5cefd 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/.pre-commit-config.yaml +++ b/{{ cookiecutter.__project_name_kebab_case }}/.pre-commit-config.yaml @@ -1,6 +1,6 @@ # https://pre-commit.com default_install_hook_types: [commit-msg, pre-commit] -default_stages: [commit, manual] +default_stages: [pre-commit, manual] fail_fast: true repos: - repo: meta @@ -16,9 +16,10 @@ repos: - id: rst-inline-touching-normal - id: text-unicode-replacement-char - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v6.0.0 hooks: - id: check-added-large-files + args: ['--maxkb=50000'] - id: check-ast - id: check-builtin-literals - id: check-case-conflict @@ -34,14 +35,17 @@ repos: - id: debug-statements - id: destroyed-symlinks - id: detect-private-key + - repo: https://github.com/Yelp/detect-secrets + rev: v1.5.0 + hooks: + - id: detect-secrets + args: ['--baseline', '.secrets.baseline'] - id: end-of-file-fixer - types: [python] - id: fix-byte-order-marker - id: mixed-line-ending - id: name-tests-test args: [--pytest-test-first] - id: trailing-whitespace - types: [python] - repo: local hooks: {%- if cookiecutter.with_conventional_commits|int %} @@ -53,13 +57,26 @@ repos: language: system stages: [commit-msg] {%- endif %} + # Development mode: {{ cookiecutter.development_environment }} +{%- if cookiecutter.development_environment == "strict" %} + - id: ruff-check + name: ruff check + entry: ruff check + args: ["--force-exclude", "--extend-fixable=ERA001,F401,F841,T201,T203"] + require_serial: true + language: system + types_or: [python, pyi] + pass_filenames: false +{%- else %} - id: ruff-check name: ruff check entry: ruff check - args: ["--force-exclude", "--extend-fixable={% if cookiecutter.development_environment == "strict" %}ERA001,F401,F841,T201,T203{% else %}F401,F841{% endif %}"{% if cookiecutter.development_environment == "simple" %}, "--fix-only"{% endif %}] + args: ["--force-exclude", "--extend-fixable=F401,F841", "--fix-only"] require_serial: true language: system types_or: [python, pyi] + pass_filenames: false +{%- endif %} - id: ruff-format name: ruff format entry: ruff format @@ -67,6 +84,7 @@ repos: require_serial: true language: system types_or: [python, pyi] + pass_filenames: false {%- if cookiecutter.development_environment == "strict" %} - id: shellcheck name: shellcheck @@ -74,6 +92,7 @@ repos: args: [--check-sourced] language: system types: [shell] + pass_filenames: false {%- endif %} - id: poetry-check name: poetry check @@ -85,3 +104,10 @@ repos: entry: mypy language: system types: [python] + pass_filenames: false + - id: codespell + name: codespell + entry: codespell + require_serial: true + language: system + types_or: [markdown, python, pyi, toml, yaml] diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.secrets.baseline b/{{ cookiecutter.__project_name_kebab_case }}/.secrets.baseline new file mode 100644 index 00000000..ec6b4306 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/.secrets.baseline @@ -0,0 +1,127 @@ +{ + "version": "1.5.0", + "plugins_used": [ + { + "name": "ArtifactoryDetector" + }, + { + "name": "AWSKeyDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "name": "Base64HighEntropyString", + "limit": 4.5 + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "CloudantDetector" + }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "name": "GitLabTokenDetector" + }, + { + "name": "HexHighEntropyString", + "limit": 3.0 + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "IPPublicDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "name": "KeywordDetector", + "keyword_exclude": "" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "OpenAIDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "PypiTokenDetector" + }, + { + "name": "SendGridDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TelegramBotTokenDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "filters_used": [ + { + "path": "detect_secrets.filters.allowlist.is_line_allowlisted" + }, + { + "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", + "min_level": 2 + }, + { + "path": "detect_secrets.filters.heuristic.is_indirect_reference" + }, + { + "path": "detect_secrets.filters.heuristic.is_likely_id_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_lock_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_potential_uuid" + }, + { + "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" + }, + { + "path": "detect_secrets.filters.heuristic.is_sequential_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_swagger_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_templated_secret" + } + ], + "results": {}, + "generated_at": "2026-02-24T15:29:44Z" +} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/.vscode/launch.json b/{{ cookiecutter.__project_name_kebab_case }}/.vscode/launch.json new file mode 100644 index 00000000..8acb5228 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/.vscode/launch.json @@ -0,0 +1,36 @@ +{ + "version": "0.2.0", + "configurations": [ +{%- if cookiecutter.with_fastapi_api|int %} + { + "name": "FastAPI (uvicorn --reload)", + "type": "debugpy", + "request": "launch", + "module": "uvicorn", + "args": [ + "{{ cookiecutter.__project_name_snake_case }}.api:app", + "--reload", + "--host", "0.0.0.0", + "--port", "8000" + ], + "jinja": true, + "envFile": "${workspaceFolder}/.env" + }, +{%- endif %} + { + "name": "pytest", + "type": "debugpy", + "request": "launch", + "module": "pytest", + "args": ["--no-header", "-vv"], + "jinja": true + }, + { + "name": "Current File", + "type": "debugpy", + "request": "launch", + "program": "${file}", + "jinja": true + } + ] +} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/CHANGELOG.md b/{{ cookiecutter.__project_name_kebab_case }}/CHANGELOG.md new file mode 100644 index 00000000..37473ff1 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/CHANGELOG.md @@ -0,0 +1,12 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Added + +- Initial project scaffolding with baseline-app-cookiecutter. diff --git a/{{ cookiecutter.__project_name_kebab_case }}/CLAUDE.md b/{{ cookiecutter.__project_name_kebab_case }}/CLAUDE.md new file mode 100644 index 00000000..115a50e9 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/CLAUDE.md @@ -0,0 +1,64 @@ +# {{ cookiecutter.project_name }} + +{{ cookiecutter.project_description }} + +## Conventions + +This project follows the [Baseline development conventions](https://github.com/Baseline-quebec/agents/blob/main/programmation/knowledge/conventions-baseline.md). + +### Key rules + +- **Language**: Code, commits, branches, and PRs in English. User-facing docs in French. +- **Naming**: `snake_case` (code), `kebab-case` (repos), `PascalCase` (classes), `UPPER_SNAKE_CASE` (constants) +- **Imports**: Absolute only (`from {{ cookiecutter.__project_name_snake_case }}.module import X`) +- **Type hints**: Required on all function signatures +- **Docstrings**: Google convention +- **Line length**: 99 characters max +- **Config**: Everything in `pyproject.toml` (no separate `.mypy.ini`, `.pylintrc`, etc.) + +## Project layout + +``` +src/{{ cookiecutter.__project_name_snake_case }}/ # source code (src layout) +tests/ # test suite +docs/ # MkDocs documentation + ADRs +pyproject.toml # Poetry config, tool settings +``` + +## Commands + +Always use `poetry run` — never `poetry shell`. + +```bash +poetry install # install dependencies +poetry run poe lint # ruff + mypy + pre-commit +poetry run poe test # pytest with coverage +poetry run poe docs --serve # serve MkDocs locally +{%- if cookiecutter.with_fastapi_api|int %} +poetry run poe api --dev # start FastAPI dev server +{%- endif %} +``` + +## Git workflow + +- **Branches**: `feat/`, `fix/`, `refactor/`, `docs/` + kebab-case description +- **Commits**: [Conventional Commits](https://www.conventionalcommits.org/) — `feat(scope): description` +- **PRs**: Squash merge only. PR title = final commit message on `main`. +- Never commit directly to `main`. + +## Tech stack + +- **Package manager**: [Poetry](https://python-poetry.org/) +- **Task runner**: [Poe the Poet](https://github.com/nat-n/poethepoet) +- **Linting**: [Ruff](https://docs.astral.sh/ruff/) (linting + formatting) +- **Type checking**: [Mypy](https://mypy.readthedocs.io/) (strict mode) +- **Testing**: [pytest](https://docs.pytest.org/){% if cookiecutter.with_pytest_bdd|int %} + [pytest-bdd](https://pytest-bdd.readthedocs.io/){% endif %} + +- **CI**: GitHub Actions in devcontainer +{%- if cookiecutter.with_fastapi_api|int %} +- **API**: [FastAPI](https://fastapi.tiangolo.com/) + [Pydantic](https://docs.pydantic.dev/) +{%- endif %} +{%- if cookiecutter.with_typer_cli|int %} +- **CLI**: [Typer](https://typer.tiangolo.com/) + [Rich](https://rich.readthedocs.io/) +{%- endif %} +- **Config**: [pydantic-settings](https://docs.pydantic.dev/latest/concepts/pydantic_settings/) with `.env` file diff --git a/{{ cookiecutter.__project_name_kebab_case }}/CONTRIBUTING.md b/{{ cookiecutter.__project_name_kebab_case }}/CONTRIBUTING.md new file mode 100644 index 00000000..e7b7ea6e --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/CONTRIBUTING.md @@ -0,0 +1,174 @@ +# Contributing to {{ cookiecutter.project_name }} project + +## Using +{% if cookiecutter.with_fastapi_api|int -%} +To serve this app, run: + +```sh +docker compose up app +``` + +and open [localhost:8000](http://localhost:8000) in your browser. + +Within the Dev Container this is equivalent to: + +```sh +poe api +``` +{%- else -%} +To install and use this project: + +```sh +poetry install +poe test +``` +{%- endif %} + +## Contributing + +### Prerequisites + +
+1. Set up Git to use SSH + +1. [Generate an SSH key](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent#generating-a-new-ssh-key) and [add the SSH key to your GitHub account](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account). +1. Configure SSH to automatically load your SSH keys: + + ```sh + cat << EOF >> ~/.ssh/config + + Host * + AddKeysToAgent yes + IgnoreUnknown UseKeychain + UseKeychain yes + ForwardAgent yes + EOF + ``` + +
+ +
+2. Install Docker + +1. [Install Docker Desktop](https://www.docker.com/get-started). + - _Linux only_: + - Export your user's user id and group id so that [files created in the Dev Container are owned by your user](https://github.com/moby/moby/issues/3206): + + ```sh + cat << EOF >> ~/.bashrc + + export UID=$(id --user) + export GID=$(id --group) + EOF + ``` + +
+ +
+3. Install VS Code or PyCharm + +1. [Install VS Code](https://code.visualstudio.com/) and [VS Code's Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers). Alternatively, install [PyCharm](https://www.jetbrains.com/pycharm/download/). +2. _Optional:_ install a [Nerd Font](https://www.nerdfonts.com/font-downloads) such as [FiraCode Nerd Font](https://github.com/ryanoasis/nerd-fonts/tree/master/patched-fonts/FiraCode) and [configure VS Code](https://github.com/tonsky/FiraCode/wiki/VS-Code-Instructions) or [configure PyCharm](https://github.com/tonsky/FiraCode/wiki/Intellij-products-instructions) to use it. + +
+ +### Development environments + +
+Dev container environments + +You can develop "remotely" inside a container using one of the following development environments: + +1. ⭐️ _GitHub Codespaces_: click on _Code_ and select _Create codespace_ to start a Dev Container with [GitHub Codespaces](https://github.com/features/codespaces). +1. ⭐️ _Dev Container (with container volume)_: click on "Open in Dev Containers" to clone this repository in a container volume and create a Dev Container with VS Code. +1. _Dev Container_: clone this repository, open it with VS Code, and run Ctrl/⌘ + + P → _Dev Containers: Reopen in Container_. +1. _PyCharm_: clone this repository, open it with PyCharm, and [configure Docker Compose as a remote interpreter](https://www.jetbrains.com/help/pycharm/using-docker-compose-as-a-remote-interpreter.html#docker-compose-remote) with the `dev` service. +1. _Terminal_: clone this repository, open it with your terminal, and run `docker compose up --detach dev` to start a Dev Container in the background, and then run `docker compose exec dev zsh` to open a shell prompt in the Dev Container. + +
+ +
+Local development + +To develop locally, you'll have to install manually some tools. +First, initialize a virtual environment for the project with +```poetry shell``` +and then install the dependencies and the project with +```poetry install``` + +
+ +
+Extra steps + +After setting up the development environment, you'll have to create an .env file at the root of the project, copy the contents of .env.example into this new file and fill all of the variables with the appropriate values. + +
+ +### Development tools + +The following tools will be automatically installed by poetry to support development: + +- _commitizen_: This project follows the [Conventional Commits](https://www.conventionalcommits.org/) standard to automate [Semantic Versioning](https://semver.org/) and [Keep A Changelog](https://keepachangelog.com/) with [Commitizen](https://github.com/commitizen-tools/commitizen). + +- _pre-commit_: This project uses pre-commit hooks that enforces the submitted code to respect conventions and high quality standards. This includes [detect-secrets](https://github.com/Yelp/detect-secrets) which prevents accidentally committing credentials (API keys, tokens, passwords). If a secret is detected, the commit is blocked. To update the baseline after a false positive: `detect-secrets scan --baseline .secrets.baseline`. + +- _pytest_: This project uses the `pytest` framework for unit testing. + +- _ruff_: This project uses `ruff` to lint and automatically format code in order to maintain consistent code across all projects and developers. + +- _mypy_: This project uses the static type checker `mypy` to enforce type annotation and spot bugs before they can happen. + +- _codespell_: This project uses `codespell` to detect common spelling mistakes in code and documentation. + +We use the following integration tools: + +- _GitHub Actions_: This project uses GitHub Actions to execute verifications in the cloud before allowing to merge with the main branch. + +### Development workflow + +1. Open new branch. +1. Write code. +1. Write tests. +1. Run tests and linter with `poe test` and `poe lint`. +1. Commit code with `cz commit` and follow the instructions. + 1. Retry failed commit (after fixing) with `cz commit --retry`. + 1. _Only in emergencies_, commit with `git commit --no-verify`. +1. Push commits with `git push`. +1. When branch is fully functional, open a pull requests on GitHub and ask for a review. + 1. When approved, bump the versions with `cz bump`. + 1. Build the docs with `poe docs`. + 1. Push to GitHub one last time before merging. +1. Repeat. + +### Good development practices + +- Most if not all functions and methods should be tested. + +- All functions, methods, modules and classes should have proper documentation. + +- All functions and methods should be properly annotated. + +- Commits should be atomic. + +- Dependency injection should be favored, and objects instantiation should be made at the latest possible moment. + +- Always keep Separation of Concerns in mind. + +### Developing tips and tricks + +- Run `poe` from within the development environment to print a list of [Poe the Poet](https://github.com/nat-n/poethepoet) tasks available to run on this project. + +- Run `poetry add {package}` from within the development environment to install a run time dependency and add it to `pyproject.toml` and `poetry.lock`. Add `--group test` or `--group dev` to install a CI or development dependency, respectively. + +- Run `poetry update` from within the development environment to upgrade all dependencies to the latest versions allowed by `pyproject.toml`. + +- Run `cz bump` to bump the app's version, update the `CHANGELOG.md`, and create a git tag. + +- Many VSCode extensions exists to help you code better and faster. We recommend the following ones: + - The "Python" extension and its suite ("Python", "Pylance", "Python Debugger") + - The "ruff" extension: Automatically shows which linting and formatting rules are failing directly into the code. Tips: bind keyboard shortcuts to format and fix linting errors easily and quickly. + - The "mypy" extension: Automatically shows which type annotation are invalid directly into the code. N.B.: the extension is fairly slow, at least on Windows. + - The "Docker" extension. + - The "DevContainer" extension. + - The "Jupyter" extension: Allows you to edit and run notebooks directly in VS Code diff --git a/{{ cookiecutter.__project_name_kebab_case }}/Dockerfile b/{{ cookiecutter.__project_name_kebab_case }}/Dockerfile index 3ba3ea85..61ade24e 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/Dockerfile +++ b/{{ cookiecutter.__project_name_kebab_case }}/Dockerfile @@ -4,14 +4,26 @@ FROM {{ cookiecutter.__docker_image }} AS base # Remove docker-clean so we can keep the apt cache in Docker build cache. RUN rm /etc/apt/apt.conf.d/docker-clean -{%- if cookiecutter.development_environment == "strict" %} # Configure Python to print tracebacks on crash [1], and to not buffer stdout and stderr [2]. # [1] https://docs.python.org/3/using/cmdline.html#envvar-PYTHONFAULTHANDLER # [2] https://docs.python.org/3/using/cmdline.html#envvar-PYTHONUNBUFFERED -ENV PYTHONFAULTHANDLER 1 -ENV PYTHONUNBUFFERED 1 -{%- endif %} +ENV PYTHONFAULTHANDLER=1 +ENV PYTHONUNBUFFERED=1 + +# Install Poetry in a separate venv so it doesn't pollute the main venv. +ENV POETRY_VERSION=2.3.2 +ENV POETRY_VIRTUAL_ENV=/opt/poetry-env +RUN --mount=type=cache,target=/root/.cache/pip/ \ + python -m venv $POETRY_VIRTUAL_ENV && \ + $POETRY_VIRTUAL_ENV/bin/pip install poetry~=$POETRY_VERSION && \ + ln -s $POETRY_VIRTUAL_ENV/bin/poetry /usr/local/bin/poetry + +# Install compilers that may be required for certain packages or platforms. +RUN --mount=type=cache,target=/var/cache/apt/ \ + --mount=type=cache,target=/var/lib/apt/ \ + apt-get update && \ + apt-get install --no-install-recommends --yes build-essential # Create a non-root user and switch to it [1]. # [1] https://code.visualstudio.com/remote/advancedcontainers/add-nonroot-user @@ -23,48 +35,23 @@ RUN groupadd --gid $GID user && \ USER user # Create and activate a virtual environment. -ENV VIRTUAL_ENV /opt/{{ cookiecutter.__project_name_kebab_case }}-env -ENV PATH $VIRTUAL_ENV/bin:$PATH +ENV VIRTUAL_ENV=/opt/{{ cookiecutter.__project_name_kebab_case }}-env +ENV PATH=$VIRTUAL_ENV/bin:$PATH RUN python -m venv $VIRTUAL_ENV # Set the working directory. WORKDIR /workspaces/{{ cookiecutter.__project_name_kebab_case }}/ - - -FROM base as poetry - -USER root - -# Install Poetry in separate venv so it doesn't pollute the main venv. -ENV POETRY_VERSION 1.8.0 -ENV POETRY_VIRTUAL_ENV /opt/poetry-env -RUN --mount=type=cache,target=/root/.cache/pip/ \ - python -m venv $POETRY_VIRTUAL_ENV && \ - $POETRY_VIRTUAL_ENV/bin/pip install poetry~=$POETRY_VERSION && \ - ln -s $POETRY_VIRTUAL_ENV/bin/poetry /usr/local/bin/poetry - -# Install compilers that may be required for certain packages or platforms. -RUN --mount=type=cache,target=/var/cache/apt/ \ - --mount=type=cache,target=/var/lib/apt/ \ - apt-get update && \ - apt-get install --no-install-recommends --yes build-essential - -USER user - # Install the run time Python dependencies in the virtual environment. COPY --chown=user:user poetry.lock* pyproject.toml /workspaces/{{ cookiecutter.__project_name_kebab_case }}/ RUN mkdir -p /home/user/.cache/pypoetry/ && mkdir -p /home/user/.config/pypoetry/ && \ mkdir -p src/{{ cookiecutter.__project_name_snake_case }}/ && touch src/{{ cookiecutter.__project_name_snake_case }}/__init__.py && touch README.md RUN --mount=type=cache,uid=$UID,gid=$GID,target=/home/user/.cache/pypoetry/ \ - {%- if cookiecutter.private_package_repository_name %} - --mount=type=secret,id=poetry-auth,uid=$UID,gid=$GID,target=/home/user/.config/pypoetry/auth.toml \ - {%- endif %} poetry install --only main --all-extras --no-interaction -FROM poetry as dev +FROM base AS dev # Install development tools: curl, git, gpg, ssh, starship, sudo, vim, and zsh. USER root @@ -72,6 +59,7 @@ RUN --mount=type=cache,target=/var/cache/apt/ \ --mount=type=cache,target=/var/lib/apt/ \ apt-get update && \ apt-get install --no-install-recommends --yes curl git gnupg ssh sudo vim zsh && \ + git config --system safe.directory '*' && \ sh -c "$(curl -fsSL https://starship.rs/install.sh)" -- "--yes" && \ usermod --shell /usr/bin/zsh user && \ echo 'user ALL=(root) NOPASSWD:ALL' > /etc/sudoers.d/user && chmod 0440 /etc/sudoers.d/user @@ -79,9 +67,6 @@ USER user # Install the development Python dependencies in the virtual environment. RUN --mount=type=cache,uid=$UID,gid=$GID,target=/home/user/.cache/pypoetry/ \ - {%- if cookiecutter.private_package_repository_name %} - --mount=type=secret,id=poetry-auth,uid=$UID,gid=$GID,target=/home/user/.config/pypoetry/auth.toml \ - {%- endif %} poetry install --all-extras --no-interaction # Persist output generated during docker build so that we can restore it in the dev container. @@ -91,7 +76,7 @@ RUN mkdir -p /opt/build/poetry/ && cp poetry.lock /opt/build/poetry/ && \ mkdir -p /opt/build/git/ && cp .git/hooks/commit-msg .git/hooks/pre-commit /opt/build/git/ # Configure the non-root user's shell. -ENV ANTIDOTE_VERSION 1.8.6 +ENV ANTIDOTE_VERSION=1.8.6 RUN git clone --branch v$ANTIDOTE_VERSION --depth=1 https://github.com/mattmc3/antidote.git ~/.antidote/ && \ echo 'zsh-users/zsh-syntax-highlighting' >> ~/.zsh_plugins.txt && \ echo 'zsh-users/zsh-autosuggestions' >> ~/.zsh_plugins.txt && \ @@ -106,29 +91,20 @@ RUN git clone --branch v$ANTIDOTE_VERSION --depth=1 https://github.com/mattmc3/a echo 'bindkey "^[[B" history-beginning-search-forward' >> ~/.zshrc && \ mkdir ~/.history/ && \ zsh -c 'source ~/.zshrc' -{%- if cookiecutter.private_package_repository_name %} - -# Enable Poetry to read the private package repository credentials. -RUN ln -s /run/secrets/poetry-auth /home/user/.config/pypoetry/auth.toml -{%- endif %} -{%- if cookiecutter.project_type == "app" %} - - FROM base AS app -# Copy the virtual environment from the poetry stage. -COPY --from=poetry $VIRTUAL_ENV $VIRTUAL_ENV - # Copy the {{ cookiecutter.project_type }} source code to the working directory. -COPY --chown=user:user . . +COPY --chown=user:user ./src ./src +COPY --chown=user:user ./pyproject.toml . +COPY --chown=user:user ./poetry.lock . +{%- if cookiecutter.with_fastapi_api|int %} + +# Health check using stdlib only (no curl in slim images). +HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \ + CMD python -c "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')" # Expose the app. -{%- if cookiecutter.with_typer_cli|int %} -ENTRYPOINT ["/opt/{{ cookiecutter.__project_name_kebab_case }}-env/bin/{{ cookiecutter.__project_name_kebab_case }}"] -CMD [] -{%- else %} ENTRYPOINT ["/opt/{{ cookiecutter.__project_name_kebab_case }}-env/bin/poe"] -CMD [{% if cookiecutter.with_fastapi_api|int %}"api"{% else %}"app"{% endif %}] -{%- endif %} +CMD ["api"] {%- endif %} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/LICENSE b/{{ cookiecutter.__project_name_kebab_case }}/LICENSE new file mode 100644 index 00000000..6b358420 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/LICENSE @@ -0,0 +1,46 @@ +{% if cookiecutter.license == "MIT" -%} +MIT License + +Copyright (c) {{ cookiecutter.author_name }} + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +{% elif cookiecutter.license == "Apache-2.0" -%} + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + Copyright {{ cookiecutter.author_name }} +{% elif cookiecutter.license == "Proprietary" -%} +Copyright {{ cookiecutter.author_name }}. All rights reserved. + +This software is proprietary and confidential. Unauthorized copying, distribution, +or use of this software, via any medium, is strictly prohibited. +{% endif -%} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/README.md b/{{ cookiecutter.__project_name_kebab_case }}/README.md index 586039f4..90b29bee 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/README.md +++ b/{{ cookiecutter.__project_name_kebab_case }}/README.md @@ -1,171 +1,97 @@ -[![Open in Dev Containers](https://img.shields.io/static/v1?label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url={{ cookiecutter.project_url.replace("https://", "git@").replace(".com/", ".com:") if cookiecutter.private_package_repository_url else cookiecutter.project_url }}) - # {{ cookiecutter.project_name }} {{ cookiecutter.project_description }} -{%- if cookiecutter.project_type == "package" or cookiecutter.with_typer_cli|int %} -## Installing +## Documentation -To install this package, run: +- [Architecture Decision Records](docs/decisions/) +- [MkDocs](https://{{ cookiecutter.github_org }}.github.io/{{ cookiecutter.__project_name_kebab_case }}/) (run `poe docs --serve` locally) -```sh -{% if cookiecutter.private_package_repository_name %}poetry add{% else %}pip install{% endif %} {{ cookiecutter.__project_name_kebab_case }} -``` -{%- endif %} +## Setup -## Using -{%- if cookiecutter.with_typer_cli|int %} +### Environment variables -To view the CLI help information, run: +Create a copy of `.env.example` and fill in the values: -```sh -{{ cookiecutter.__project_name_kebab_case }} --help +```bash +cp .env.example .env ``` -{%- elif cookiecutter.project_type == "app" %} -To serve this app, run: - -```sh -docker compose up app -``` -{%- if cookiecutter.with_fastapi_api|int %} +### Local setup -and open [localhost:8000](http://localhost:8000) in your browser. -{%- endif %} +Requirements: -Within the Dev Container this is equivalent to: +- [Python {{ cookiecutter.python_version }}](https://www.python.org/downloads/) +- [Poetry](https://python-poetry.org/docs/#installation) -```sh -poe {% if cookiecutter.with_fastapi_api|int %}api{% else %}app{% endif %} +```bash +poetry install ``` -{%- else %} -Example usage: +### Container setup -```python -import {{ cookiecutter.__project_name_snake_case }} +Requirements: [Docker](https://docs.docker.com/get-docker/) -... +```bash +docker compose up --build ``` -{%- endif %} -## Contributing +## Usage -
-Prerequisites +Run `poe` to see all available tasks. +{% if cookiecutter.with_fastapi_api|int %} +### API +{%- if cookiecutter.with_fastapi_api|int %} -
-1. Set up Git to use SSH +```bash +poe api --dev +``` -{% if cookiecutter.continuous_integration == "GitLab" -%} -1. [Generate an SSH key](https://docs.gitlab.com/ee/user/ssh.html#generate-an-ssh-key-pair) and [add the SSH key to your GitLab account](https://docs.gitlab.com/ee/user/ssh.html#add-an-ssh-key-to-your-gitlab-account). -{%- else -%} -1. [Generate an SSH key](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/generating-a-new-ssh-key-and-adding-it-to-the-ssh-agent#generating-a-new-ssh-key) and [add the SSH key to your GitHub account](https://docs.github.com/en/authentication/connecting-to-github-with-ssh/adding-a-new-ssh-key-to-your-github-account). -{%- endif %} -1. Configure SSH to automatically load your SSH keys: - ```sh - cat << EOF >> ~/.ssh/config - - Host * - AddKeysToAgent yes - IgnoreUnknown UseKeychain - UseKeychain yes - ForwardAgent yes - EOF - ``` - -
- -
-2. Install Docker - -1. [Install Docker Desktop](https://www.docker.com/get-started). - - _Linux only_: - - Export your user's user id and group id so that [files created in the Dev Container are owned by your user](https://github.com/moby/moby/issues/3206): - ```sh - cat << EOF >> ~/.bashrc - - export UID=$(id --user) - export GID=$(id --group) - {%- if cookiecutter.private_package_repository_name %} - export POETRY_AUTH_TOML_PATH="~/.config/pypoetry/auth.toml" - {%- endif %} - EOF - ``` - {%- if cookiecutter.private_package_repository_name %} - - _Windows only_: - - Export the location of your private package repository credentials so that Docker Compose can load these as a [build and run time secret](https://docs.docker.com/compose/compose-file/compose-file-v3/#secrets-configuration-reference): - ```bat - setx POETRY_AUTH_TOML_PATH %APPDATA%\pypoetry\auth.toml - ``` - {%- endif %} - -
- -
-3. Install VS Code or PyCharm - -1. [Install VS Code](https://code.visualstudio.com/) and [VS Code's Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers). Alternatively, install [PyCharm](https://www.jetbrains.com/pycharm/download/). -2. _Optional:_ install a [Nerd Font](https://www.nerdfonts.com/font-downloads) such as [FiraCode Nerd Font](https://github.com/ryanoasis/nerd-fonts/tree/master/patched-fonts/FiraCode) and [configure VS Code](https://github.com/tonsky/FiraCode/wiki/VS-Code-Instructions) or [configure PyCharm](https://github.com/tonsky/FiraCode/wiki/Intellij-products-instructions) to use it. - -
-{%- if cookiecutter.private_package_repository_name %} - -
-4. Configure Poetry to use the private package repository - -{% if cookiecutter.continuous_integration == "GitLab" -%} -1. [Create a personal access token](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#create-a-personal-access-token) with the `api` scope and use it to [add your private package repository credentials to your Poetry's `auth.toml` file](https://python-poetry.org/docs/repositories/#configuring-credentials): - ```toml - # Linux: ~/.config/pypoetry/auth.toml - # macOS: ~/Library/Application Support/pypoetry/auth.toml - # Windows: C:\Users\%USERNAME%\AppData\Roaming\pypoetry\auth.toml - [http-basic.{{ cookiecutter.private_package_repository_name|slugify }}] - username = "{personal access token name}" - password = "{personal access token}" - ``` -{%- else -%} -1. [Add your private package repository credentials to your Poetry's `auth.toml` file](https://python-poetry.org/docs/repositories/#configuring-credentials): - ```toml - # Linux: ~/.config/pypoetry/auth.toml - # macOS: ~/Library/Application Support/pypoetry/auth.toml - # Windows: C:\Users\%USERNAME%\AppData\Roaming\pypoetry\auth.toml - [http-basic.{{ cookiecutter.private_package_repository_name|slugify }}] - username = "{username}" - password = "{password}" - ``` +Access the API at [localhost:8000](http://localhost:8000) and the docs at [localhost:8000/docs](http://localhost:8000/docs). {%- endif %} +{% endif %} +{%- if cookiecutter.with_typer_cli|int %} -
-{%- endif %} +### CLI -
+```bash +poetry run {{ cookiecutter.__project_name_kebab_case }} info +poetry run {{ cookiecutter.__project_name_kebab_case }} config +{%- if cookiecutter.with_fastapi_api|int %} +poetry run {{ cookiecutter.__project_name_kebab_case }} health +{%- endif %} +``` +{%- endif %} -
-Development environments +### Common tasks -The following development environments are supported: -{% if cookiecutter.continuous_integration == "GitHub" %} -1. ⭐️ _GitHub Codespaces_: click on _Code_ and select _Create codespace_ to start a Dev Container with [GitHub Codespaces](https://github.com/features/codespaces). -{%- endif %} -1. ⭐️ _Dev Container (with container volume)_: click on [Open in Dev Containers](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url={{ cookiecutter.project_url.replace("https://", "git@").replace(".com/", ".com:") if cookiecutter.private_package_repository_url else cookiecutter.project_url }}) to clone this repository in a container volume and create a Dev Container with VS Code. -1. _Dev Container_: clone this repository, open it with VS Code, and run Ctrl/⌘ + + P → _Dev Containers: Reopen in Container_. -1. _PyCharm_: clone this repository, open it with PyCharm, and [configure Docker Compose as a remote interpreter](https://www.jetbrains.com/help/pycharm/using-docker-compose-as-a-remote-interpreter.html#docker-compose-remote) with the `dev` service. -1. _Terminal_: clone this repository, open it with your terminal, and run `docker compose up --detach dev` to start a Dev Container in the background, and then run `docker compose exec dev zsh` to open a shell prompt in the Dev Container. +```bash +poe test # run tests +poe lint # run linting +poe docs --serve # serve documentation locally +``` -
+## Project structure -
-Developing -{% if cookiecutter.with_conventional_commits|int %} -- This project follows the [Conventional Commits](https://www.conventionalcommits.org/) standard to automate [Semantic Versioning](https://semver.org/) and [Keep A Changelog](https://keepachangelog.com/) with [Commitizen](https://github.com/commitizen-tools/commitizen). +``` +{{ cookiecutter.__project_name_kebab_case }}/ +├── src/{{ cookiecutter.__project_name_snake_case }}/ # source code +│ ├── settings.py # pydantic-settings config +{%- if cookiecutter.with_fastapi_api|int %} +│ ├── api.py # FastAPI application {%- endif %} -- Run `poe` from within the development environment to print a list of [Poe the Poet](https://github.com/nat-n/poethepoet) tasks available to run on this project. -- Run `poetry add {package}` from within the development environment to install a run time dependency and add it to `pyproject.toml` and `poetry.lock`. Add `--group test` or `--group dev` to install a CI or development dependency, respectively. -- Run `poetry update` from within the development environment to upgrade all dependencies to the latest versions allowed by `pyproject.toml`. -{%- if cookiecutter.with_conventional_commits|int %} -- Run `cz bump` to bump the {{ cookiecutter.project_type }}'s version, update the `CHANGELOG.md`, and create a git tag. +{%- if cookiecutter.with_typer_cli|int %} +│ ├── cli.py # Typer CLI {%- endif %} +│ ├── models.py # Pydantic models +│ └── services.py # business logic +├── tests/ # test suite +├── docs/ # MkDocs + ADRs +├── pyproject.toml # Poetry config +├── Dockerfile # production image +└── docker-compose.yml # local development +``` + +## Contributing -
+See [CONTRIBUTING.md](CONTRIBUTING.md). diff --git a/{{ cookiecutter.__project_name_kebab_case }}/docker-compose.yml b/{{ cookiecutter.__project_name_kebab_case }}/docker-compose.yml index 07c1e58d..98f9a98f 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/docker-compose.yml +++ b/{{ cookiecutter.__project_name_kebab_case }}/docker-compose.yml @@ -1,26 +1,15 @@ -version: "3.9" - services: devcontainer: build: context: . target: dev - {%- if cookiecutter.private_package_repository_name %} - secrets: - - poetry-auth - {%- endif %} args: PYTHON_VERSION: ${PYTHON_VERSION:-{{ cookiecutter.python_version }}} UID: ${UID:-1000} GID: ${GID:-1000} - {%- if not cookiecutter.private_package_repository_name %} environment: - POETRY_PYPI_TOKEN_PYPI - {%- else %} - secrets: - - poetry-auth - {%- endif %} volumes: - ..:/workspaces - command-history-volume:/home/user/.history/ @@ -37,43 +26,26 @@ services: "sudo chown user $$SSH_AUTH_SOCK && cp --update /opt/build/poetry/poetry.lock /workspaces/{{ cookiecutter.__project_name_kebab_case }}/ && mkdir -p /workspaces/{{ cookiecutter.__project_name_kebab_case }}/.git/hooks/ && cp --update /opt/build/git/* /workspaces/{{ cookiecutter.__project_name_kebab_case }}/.git/hooks/ && zsh" ] environment: - {%- if not cookiecutter.private_package_repository_name %} - - POETRY_PYPI_TOKEN_PYPI - {%- endif %} - SSH_AUTH_SOCK=/run/host-services/ssh-auth.sock - {%- if cookiecutter.with_fastapi_api|int %} ports: - - "8000" - {%- endif %} + - "8000:8000" volumes: - ~/.gitconfig:/etc/gitconfig - ~/.ssh/known_hosts:/home/user/.ssh/known_hosts - ${SSH_AGENT_AUTH_SOCK:-/run/host-services/ssh-auth.sock}:/run/host-services/ssh-auth.sock profiles: - dev - {%- if cookiecutter.project_type == "app" %} - app: +{%- if cookiecutter.with_fastapi_api|int %} + api: build: context: . target: app - {%- if cookiecutter.private_package_repository_name %} - secrets: - - poetry-auth - {%- endif %} tty: true - {%- if cookiecutter.with_fastapi_api|int %} ports: - "8000:8000" - {%- endif %} profiles: - - app - {%- endif %} -{%- if cookiecutter.private_package_repository_name %} - -secrets: - poetry-auth: - file: "${POETRY_AUTH_TOML_PATH:-~/Library/Application Support/pypoetry/auth.toml}" + - api {%- endif %} volumes: diff --git a/{{ cookiecutter.__project_name_kebab_case }}/docs/decisions/0001-record-architecture-decisions.md b/{{ cookiecutter.__project_name_kebab_case }}/docs/decisions/0001-record-architecture-decisions.md new file mode 100644 index 00000000..b81a3958 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/docs/decisions/0001-record-architecture-decisions.md @@ -0,0 +1,23 @@ +--- +status: accepted +date: 2024-01-01 +--- +# 1. Record architecture decisions + +## Context and Problem Statement + +We need to record the architectural decisions made on this project. +When new team members join, they need to understand the reasoning behind past decisions. + +## Decision Outcome + +We will use Architecture Decision Records (ADRs), as described by Michael Nygard +in [Documenting Architecture Decisions](https://cognitect.com/blog/2011/11/15/documenting-architecture-decisions). + +ADRs are stored in `docs/decisions/` and follow the template in `adr_template.md`. + +### Consequences + +- Decisions are documented and easily discoverable. +- New team members can read the decision log to understand the project's history. +- Each ADR is small and focused on a single decision. diff --git a/{{ cookiecutter.__project_name_kebab_case }}/docs/decisions/adr_template.md b/{{ cookiecutter.__project_name_kebab_case }}/docs/decisions/adr_template.md new file mode 100644 index 00000000..1cfb4c68 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/docs/decisions/adr_template.md @@ -0,0 +1,79 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: "{proposed | rejected | accepted | deprecated | … | superseded by [ADR-0005](0005-example.md)}" +date: {YYYY-MM-DD when the decision was last updated} +deciders: {list everyone involved in the decision} +consulted: {list everyone whose opinions are sought (typically subject-matter experts); and with whom there is a two-way communication} +informed: {list everyone who is kept up-to-date on progress; and with whom there is a one-way communication} +--- +# {short title of solved problem and solution} + +## Context and Problem Statement + +{Describe the context and problem statement, e.g., in free form using two to three sentences or in the form of an illustrative story. + You may want to articulate the problem in form of a question and add links to collaboration boards or issue management systems.} + + +## Decision Drivers + +* {decision driver 1, e.g., a force, facing concern, …} +* {decision driver 2, e.g., a force, facing concern, …} +* … + +## Considered Options + +* {title of option 1} +* {title of option 2} +* {title of option 3} +* … + +## Decision Outcome + +Chosen option: "{title of option 1}", because +{justification. e.g., only option, which meets k.o. criterion decision driver | which resolves force {force} | … | comes out best (see below)}. + + +### Consequences + +* Good, because {positive consequence, e.g., improvement of one or more desired qualities, …} +* Bad, because {negative consequence, e.g., compromising one or more desired qualities, …} +* … + + +### Confirmation + +{Describe how the implementation of/compliance with the ADR is confirmed. E.g., by a review or an ArchUnit test. + Although we classify this element as optional, it is included in most ADRs.} + + +## Pros and Cons of the Options + +### {title of option 1} + + +{example | description | pointer to more information | …} + +* Good, because {argument a} +* Good, because {argument b} + +* Neutral, because {argument c} +* Bad, because {argument d} +* … + +### {title of other option} + +{example | description | pointer to more information | …} + +* Good, because {argument a} +* Good, because {argument b} +* Neutral, because {argument c} +* Bad, because {argument d} +* … + + +## More Information + +{You might want to provide additional evidence/confidence for the decision outcome here and/or + document the team agreement on the decision and/or + define when/how this decision the decision should be realized and if/when it should be re-visited. +Links to other decisions and resources might appear here as well.} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/docs/index.md b/{{ cookiecutter.__project_name_kebab_case }}/docs/index.md new file mode 100644 index 00000000..907ffd34 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/docs/index.md @@ -0,0 +1,11 @@ +# {{ cookiecutter.project_name }} + +{{ cookiecutter.project_description }} + +## Getting started + +See the [README](https://github.com/{{ cookiecutter.github_org }}/{{ cookiecutter.__project_name_kebab_case }}#readme) for installation and usage instructions. + +## Architecture decisions + +Architecture Decision Records (ADRs) are documented in [docs/decisions/](decisions/). diff --git a/{{ cookiecutter.__project_name_kebab_case }}/mkdocs.yml b/{{ cookiecutter.__project_name_kebab_case }}/mkdocs.yml new file mode 100644 index 00000000..c74504fa --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/mkdocs.yml @@ -0,0 +1,12 @@ +site_name: {{ cookiecutter.project_name }} +site_description: {{ cookiecutter.project_description }} +site_url: {{ cookiecutter.project_url }} + +theme: + name: material + +nav: + - Home: index.md + +plugins: + - search diff --git a/{{ cookiecutter.__project_name_kebab_case }}/pyproject.toml b/{{ cookiecutter.__project_name_kebab_case }}/pyproject.toml index 56dbbc96..3d035fd8 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/pyproject.toml +++ b/{{ cookiecutter.__project_name_kebab_case }}/pyproject.toml @@ -9,6 +9,7 @@ description = "{{ cookiecutter.project_description }}" authors = ["{{ cookiecutter.author_name }} <{{ cookiecutter.author_email }}>"] readme = "README.md" repository = "{{ cookiecutter.project_url }}" +license = "{{ cookiecutter.license }}" {%- if cookiecutter.with_conventional_commits|int %} [tool.commitizen] # https://commitizen-tools.github.io/commitizen/config/ @@ -25,57 +26,56 @@ version_provider = "poetry" [tool.poetry.dependencies] # https://python-poetry.org/docs/dependency-specification/ {%- if cookiecutter.with_fastapi_api|int %} -coloredlogs = ">=15.0.1" -fastapi = { extras = ["all"], version = ">=0.110.1" } -gunicorn = ">=21.2.0" +fastapi = { extras = ["all"], version = ">=0.115.0" } +loguru = ">=0.7.3" +gunicorn = ">=23.0.0" {%- endif %} {%- if cookiecutter.project_type == "app" %} -poethepoet = ">=0.25.0" +poethepoet = ">=0.32.0" {%- endif %} +pydantic-settings = ">=2.7.0" python = ">={{ cookiecutter.python_version }},<4.0" +{%- if cookiecutter.with_sentry|int %} +sentry-sdk = { extras = ["fastapi"], version = ">=2.19.0" } +{%- endif %} {%- if cookiecutter.with_typer_cli|int %} -typer = { extras = ["all"], version = ">=0.12.0" } +typer = { extras = ["all"], version = ">=0.15.0" } {%- endif %} {%- if cookiecutter.with_fastapi_api|int %} -uvicorn = { extras = ["standard"], version = ">=0.29.0" } +uvicorn = { extras = ["standard"], version = ">=0.34.0" } {%- endif %} [tool.poetry.group.test.dependencies] # https://python-poetry.org/docs/master/managing-dependencies/ {%- if cookiecutter.with_conventional_commits|int %} -commitizen = ">=3.21.3" +commitizen = ">=4.1.0" {%- endif %} -coverage = { extras = ["toml"], version = ">=7.4.4" } -mypy = ">=1.9.0" -{%- if cookiecutter.project_type == "package" %} -poethepoet = ">=0.25.0" +coverage = { extras = ["toml"], version = ">=7.6.0" } +detect-secrets = ">=1.5.0" +mypy = ">=1.14.0" +pre-commit = ">=4.0.0" +pytest = ">=8.3.0" +{%- if cookiecutter.with_pytest_bdd|int %} +pytest-bdd = ">=8.1.0" {%- endif %} -pre-commit = ">=3.7.0" -pytest = ">=8.1.1" pytest-mock = ">=3.14.0" pytest-xdist = ">=3.5.0" -ruff = ">=0.3.5" +{%- if cookiecutter.with_fastapi_api|int %} +pytest-asyncio = ">=0.25.0" +{%- endif %} +httpx = ">=0.28.0" +codespell = ">=2.4.0" +ruff = ">=0.11.0" {%- if cookiecutter.development_environment == "strict" %} -safety = ">=3.1.0" +safety = ">=3.2.0" shellcheck-py = ">=0.10.0.1" -typeguard = ">=4.2.1" +typeguard = ">=4.4.0" {%- endif %} [tool.poetry.group.dev.dependencies] # https://python-poetry.org/docs/master/managing-dependencies/ cruft = ">=2.15.0" ipykernel = ">=6.29.4" ipywidgets = ">=8.1.2" -pdoc = ">=14.4.0" -{%- if cookiecutter.private_package_repository_name %} - -[[tool.poetry.source]] -name = "pypi" -priority = "default" - -[[tool.poetry.source]] # https://python-poetry.org/docs/repositories/#using-a-private-repository -name = "{{ cookiecutter.private_package_repository_name|slugify }}" -url = "{{ cookiecutter.private_package_repository_url }}" -priority = "explicit" -{%- endif %} +mkdocs-material = ">=9.5.0" [tool.coverage.report] # https://coverage.readthedocs.io/en/latest/config.html#report {%- if cookiecutter.development_environment == "strict" %} @@ -91,14 +91,15 @@ command_line = "--module pytest" data_file = "reports/.coverage" source = ["src"] -[tool.coverage.xml] # https://coverage.readthedocs.io/en/latest/config.html#xml +[tool.coverage.html] # https://coverage.readthedocs.io/en/latest/cmd.html#html-reporting-coverage-html +directory = "reports/htmlcov" + +[tool.coverage.xml] # https://coverage.readthedocs.io/en/latest/cmd.html#cmd-xml output = "reports/coverage.xml" [tool.mypy] # https://mypy.readthedocs.io/en/latest/config_file.html junit_xml = "reports/mypy.xml" -{%- if cookiecutter.with_fastapi_api|int %} plugins = "pydantic.mypy" -{%- endif %} {%- if cookiecutter.development_environment == "strict" %} strict = true disallow_subclassing_any = false @@ -110,7 +111,16 @@ show_column_numbers = true show_error_codes = true show_error_context = true warn_unreachable = true -{%- if cookiecutter.development_environment == "strict" and cookiecutter.with_fastapi_api|int %} +files = ["src/**/*.py", "tests/**/*.py"] + +[[tool.mypy.overrides]] +module = "tests.*" +disable_error_code = [ + "method-assign", # Necessary when mocking + "attr-defined", # Mocked attributes are dynamically defined +] + +{%- if cookiecutter.development_environment == "strict" %} [tool.pydantic-mypy] # https://pydantic-docs.helpmanual.io/mypy_plugin/#configuring-the-plugin init_forbid_extra = true @@ -120,42 +130,83 @@ warn_untyped_fields = true {%- endif %} [tool.pytest.ini_options] # https://docs.pytest.org/en/latest/reference/reference.html#ini-options-ref -addopts = "--color=yes --doctest-modules --exitfirst --failed-first{% if cookiecutter.development_environment == 'strict' %} --strict-config --strict-markers --typeguard-packages={{ cookiecutter.__project_name_snake_case }}{% endif %} --verbosity=2 --junitxml=reports/pytest.xml" +# Development mode: {{ cookiecutter.development_environment }} {%- if cookiecutter.development_environment == "strict" %} -filterwarnings = ["error", "ignore::DeprecationWarning"] +addopts = "--color=yes --doctest-modules --exitfirst --failed-first --strict-config --strict-markers --typeguard-packages={{ cookiecutter.__project_name_snake_case }} --verbosity=2 --junitxml=reports/pytest.xml" +filterwarnings = ["error", "ignore::DeprecationWarning", "ignore::pytest.PytestUnraisableExceptionWarning"] +{%- else %} +addopts = "--color=yes --doctest-modules --exitfirst --failed-first --verbosity=2 --junitxml=reports/pytest.xml" {%- endif %} -testpaths = ["src", "tests"] +{%- if cookiecutter.with_fastapi_api|int %} +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" +{%- endif %} +testpaths = ["tests"] xfail_strict = true +{%- if cookiecutter.with_pytest_bdd|int %} +bdd_features_base_dir = "tests/features/" +{%- endif %} -[tool.ruff] # https://github.com/charliermarsh/ruff +[tool.ruff] # https://docs.astral.sh/ruff/ fix = true -line-length = 100 +line-length = 99 +preview = true src = ["src", "tests"] target-version = "py{{ cookiecutter.python_version.split('.')[:2]|join }}" [tool.ruff.lint] -ignore-init-module-imports = true {%- if cookiecutter.development_environment == "strict" %} -select = ["A", "ASYNC", "B", "BLE", "C4", "C90", "D", "DTZ", "E", "EM", "ERA", "F", "FBT", "FLY", "FURB", "G", "I", "ICN", "INP", "INT", "ISC", "LOG", "N", "NPY", "PERF", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "Q", "RET", "RSE", "RUF", "S", "SIM", "SLF", "SLOT", "T10", "T20", "TCH", "TID", "TRY", "UP", "W", "YTT"] -ignore = ["D203", "D213", "E501", "RET504", "S101", "S307"] +select = ["ALL"] +ignore = [ + "ANN002", # Missing type annotation for args. Complicates the code too much with the current annotation system. + "ANN003", # Missing type annotation for kwargs. Complicates the code too much with the current annotation system. + "COM812", # Missing trailing comma in a single-line list. Already handled by ruff formatter + "CPY001", # Copyright notice missing. Not always needed. + "E501", # Line too long. Already handled by ruff formatter + "E731", # Do not assign a lambda expression. Do not agree with this rule, it improves readability as it is self-documenting the lambda function. + "RET504", # Unnecessary assign before return. This is not bad, it helps debugging. + "S311", # Standard pseudo-random generators are not suitable for security/cryptographic purposes. Our use case is not security/cryptographic. + "TRY003", # Raise exception with vanilla arguments. Creating exception classes should be done only when really needed. + "W505", # Doc line too long. Editor autowraps doc, and it's too much work to fix it. + "ISC001", # single-line-implicit-string-concatenation, `z = "The quick " "brown fox."` becomes `z = "The quick brown fox."` This rule may cause conflicts when used with the formatter. + "DOC201", # `return` is not documented in docstring. Too verbose for most projects. + "DOC501", # Raised exception missing from docstring. Too verbose for most projects. +] unfixable = ["ERA001", "F401", "F841", "T201", "T203"] {%- else %} -select = ["A", "ASYNC", "B", "C4", "C90", "D", "DTZ", "E", "F", "FLY", "FURB", "I", "ISC", "LOG", "N", "NPY", "PERF", "PGH", "PIE", "PL", "PT", "Q", "RET", "RUF", "RSE", "SIM", "TID", "UP", "W", "YTT"] -ignore = ["D203", "D213", "E501", "PGH002", "PGH003", "RET504", "S101", "S307"] +select = ["A", "ASYNC", "B", "C4", "C90", "D", "DTZ", "E", "F", "FLY", "I", "ISC", "LOG", "N", "NPY", "PERF", "PGH", "PIE", "PL", "PT", "Q", "RET", "RUF", "RSE", "SIM", "TID", "UP", "W", "YTT"] +ignore = ["D203", "D213", "E501", "PGH003", "RET504"] unfixable = ["F401", "F841"] {%- endif %} +[tool.ruff.lint.per-file-ignores] +"tests/*" = ["S101", "INP001", "D100", "D101", "SLF"] + [tool.ruff.lint.flake8-tidy-imports] ban-relative-imports = "all" -{%- if cookiecutter.development_environment == "strict" %} + +[tool.ruff.lint.flake8-unused-arguments] +ignore-variadic-names = true # Allow unused *args and **kwargs in function signature + +[tool.ruff.lint.isort] +lines-after-imports = 2 [tool.ruff.lint.pycodestyle] -max-doc-length = 100 -{%- endif %} +max-doc-length = 99 [tool.ruff.lint.pydocstyle] convention = "{{ cookiecutter.__docstring_style|lower }}" +[tool.ruff.lint.pylint] # https://docs.astral.sh/ruff/settings/#lint_pylint +max-args = 6 +max-public-methods = 30 + +[tool.codespell] # https://github.com/codespell-project/codespell +builtin = "en-GB_to_en-US,clear,code,rare" +check-filenames = true +ignore-words-list = "jupyter" +skip = "./*_cache/,./.venv,./reports" + [tool.poe.tasks] # https://github.com/nat-n/poethepoet {%- if cookiecutter.with_fastapi_api|int %} @@ -201,35 +252,24 @@ convention = "{{ cookiecutter.__docstring_style|lower }}" type = "boolean" name = "dev" options = ["--dev"] -{%- elif cookiecutter.project_type == "app" %} - - [tool.poe.tasks.app] - help = "Serve the app" - - [[tool.poe.tasks.app.sequence]] - cmd = "echo 'Serving app...'" {%- endif %} [tool.poe.tasks.docs] - help = "Generate this {{ cookiecutter.project_type }}'s docs" - cmd = """ - pdoc - --docformat $docformat - --output-directory $outputdirectory - {{ cookiecutter.__project_name_snake_case }} + help = "Build or serve the documentation" + shell = """ + if [ $serve ] + then { + mkdocs serve + } else { + mkdocs build + } fi """ [[tool.poe.tasks.docs.args]] - help = "The docstring style (default: {{ cookiecutter.__docstring_style|lower }})" - name = "docformat" - options = ["--docformat"] - default = "{{ cookiecutter.__docstring_style|lower }}" - - [[tool.poe.tasks.docs.args]] - help = "The output directory (default: docs)" - name = "outputdirectory" - options = ["--output-directory"] - default = "docs" + help = "Serve the documentation locally with live reload" + type = "boolean" + name = "serve" + options = ["--serve"] [tool.poe.tasks.lint] help = "Lint this {{ cookiecutter.project_type }}" @@ -255,5 +295,12 @@ convention = "{{ cookiecutter.__docstring_style|lower }}" [[tool.poe.tasks.test.sequence]] cmd = "coverage report" + [[tool.poe.tasks.test.sequence]] + cmd = "coverage html" + [[tool.poe.tasks.test.sequence]] cmd = "coverage xml" + + [tool.poe.tasks.update] + help = "Update the project from the cookiecutter template" + cmd = "cruft update --cookiecutter-input" diff --git a/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/api.py b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/api.py index 02bd6a94..99cc4174 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/api.py +++ b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/api.py @@ -1,36 +1,136 @@ """{{ cookiecutter.project_name }} REST API.""" -import asyncio -import logging +import sys +import time from collections.abc import AsyncGenerator from contextlib import asynccontextmanager +from typing import Annotated -import coloredlogs -from fastapi import FastAPI +from fastapi import Depends, FastAPI, HTTPException, Request, Response +from fastapi.responses import JSONResponse +from loguru import logger +from starlette.exceptions import HTTPException as StarletteHTTPException +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint + +from {{ cookiecutter.__project_name_snake_case }}.models import HealthResponse, Item, ItemCreate +from {{ cookiecutter.__project_name_snake_case }}.services import ItemService +from {{ cookiecutter.__project_name_snake_case }}.settings import settings @asynccontextmanager -async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: +async def lifespan(app: FastAPI) -> AsyncGenerator[None]: # noqa: ARG001, RUF029 """Handle FastAPI startup and shutdown events.""" - # Startup events: - # - Remove all handlers associated with the root logger object. - for handler in logging.root.handlers: - logging.root.removeHandler(handler) - # - Add coloredlogs' colored StreamHandler to the root logger. - coloredlogs.install() + logger.remove() + logger.add(sys.stderr, level=settings.log_level) +{%- if cookiecutter.with_sentry|int %} + + if settings.sentry_dsn: + import sentry_sdk + sentry_sdk.init( + dsn=settings.sentry_dsn, + environment=settings.sentry_environment, + traces_sample_rate=settings.sentry_traces_sample_rate, + ) + logger.info("Sentry initialized for environment '{}'", settings.sentry_environment) +{%- endif %} + yield - # Shutdown events. -app = FastAPI(lifespan=lifespan) +app = FastAPI(title=settings.app_name, lifespan=lifespan) + + +# --- Dependency injection -------------------------------------------------------- + + +def get_item_service() -> ItemService: + """Provide the shared ItemService instance.""" + return _item_service + + +_item_service = ItemService() + +ItemServiceDep = Annotated[ItemService, Depends(get_item_service)] + + +# --- Middleware ------------------------------------------------------------------ + + +class RequestLoggingMiddleware(BaseHTTPMiddleware): + """Log method, path, status code, and duration for every request.""" + + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: # noqa: PLR6301 + """Process request and log timing information.""" + start = time.perf_counter() + response: Response = await call_next(request) + duration_ms = (time.perf_counter() - start) * 1000 + logger.info( + "{method} {path} {status} {duration:.1f}ms", + method=request.method, + path=request.url.path, + status=response.status_code, + duration=duration_ms, + ) + return response + + +app.add_middleware(RequestLoggingMiddleware) + + +# --- Exception handlers ---------------------------------------------------------- + + +@app.exception_handler(StarletteHTTPException) +def http_exception_handler(request: Request, exc: StarletteHTTPException) -> JSONResponse: # noqa: ARG001 + """Return a structured JSON response for HTTP exceptions.""" + logger.warning("HTTP {status}: {detail}", status=exc.status_code, detail=exc.detail) + return JSONResponse( + status_code=exc.status_code, + content={"detail": exc.detail}, + ) + + +@app.exception_handler(Exception) +def unhandled_exception_handler(request: Request, exc: Exception) -> JSONResponse: # noqa: ARG001 + """Return a 500 JSON response for unhandled exceptions.""" + logger.exception("Unhandled exception: {exc}", exc=exc) + return JSONResponse( + status_code=500, + content={"detail": "Internal server error"}, + ) + + +# --- Routes ---------------------------------------------------------------------- + + +@app.get("/health") +async def health() -> HealthResponse: + """Health check endpoint.""" + return HealthResponse() + + +@app.post("/items", status_code=201) +async def create_item(data: ItemCreate, service: ItemServiceDep) -> Item: + """Create a new item.""" + return service.create(data) + + +@app.get("/items") +async def list_items(service: ItemServiceDep) -> list[Item]: + """List all items.""" + return service.list_all() + +@app.get("/items/{item_id}") +async def get_item(item_id: int, service: ItemServiceDep) -> Item: + """Get a single item by id.""" + item = service.get(item_id) + if item is None: + raise HTTPException(status_code=404, detail=f"Item {item_id} not found") + return item -@app.get("/compute") -async def compute(n: int = 42) -> int: - """Compute the result of a CPU-bound function.""" - def fibonacci(n: int) -> int: - return n if n <= 1 else fibonacci(n - 1) + fibonacci(n - 2) +if __name__ == "__main__": + import uvicorn - result = await asyncio.to_thread(fibonacci, n) - return result + uvicorn.run(app, host=settings.api_host, port=settings.api_port, log_level="info") diff --git a/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/cli.py b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/cli.py index 142b1860..6130ea59 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/cli.py +++ b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/cli.py @@ -1,12 +1,72 @@ """{{ cookiecutter.project_name }} CLI.""" +{% if cookiecutter.with_fastapi_api|int -%} +import urllib.request +{% endif -%} +from typing import Annotated + import typer -from rich import print +from rich import print # noqa: A004 +from rich.table import Table + +from {{ cookiecutter.__project_name_snake_case }}.settings import Settings, settings + + +app = typer.Typer(help="{{ cookiecutter.project_name }} command-line interface.") + +_verbose: bool = False + -app = typer.Typer() +@app.callback() +def main( + verbose: Annotated[ # noqa: FBT002 + bool, typer.Option("--verbose", "-v", help="Enable verbose output.") + ] = False, +) -> None: + """{{ cookiecutter.project_name }} CLI.""" + global _verbose # noqa: PLW0603 + _verbose = verbose + + +@app.command() +def info() -> None: + """Display project metadata.""" + table = Table(title="{{ cookiecutter.project_name }}") + table.add_column("Key", style="cyan") + table.add_column("Value", style="green") + table.add_row("App name", settings.app_name) + table.add_row("Log level", settings.log_level) + table.add_row("Debug", str(settings.debug)) + if _verbose: + table.add_row("Settings class", type(settings).__name__) + print(table) + + +@app.command() +def config() -> None: + """Print current settings from environment and .env file.""" + table = Table(title="Settings") + table.add_column("Key", style="cyan") + table.add_column("Value", style="green") + for field_name in Settings.model_fields: + value = getattr(settings, field_name) + if _verbose or field_name != "sentry_dsn": + table.add_row(field_name, str(value)) + print(table) +{%- if cookiecutter.with_fastapi_api|int %} @app.command() -def fire(name: str = "Chell") -> None: - """Fire portal gun.""" - print(f"[bold red]Alert![/bold red] {name} fired [green]portal gun[/green] :boom:") +def health() -> None: + """Check the API health endpoint.""" + url = f"http://{settings.api_host}:{settings.api_port}/health" + if _verbose: + print(f"[dim]Checking {url}...[/dim]") + try: + with urllib.request.urlopen(url, timeout=5) as response: + data = response.read().decode() + print(f"[bold green]API is healthy:[/bold green] {data}") + except Exception as exc: + print(f"[bold red]API is unreachable:[/bold red] {exc}") + raise typer.Exit(code=1) from exc +{%- endif %} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/models.py b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/models.py new file mode 100644 index 00000000..b7f3dc06 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/models.py @@ -0,0 +1,23 @@ +"""{{ cookiecutter.project_name }} data models.""" + +from pydantic import BaseModel, Field + + +class HealthResponse(BaseModel): + """Health check response.""" + + status: str = "ok" + + +class ItemCreate(BaseModel): + """Schema for creating a new item.""" + + name: str = Field(min_length=1, max_length=100, description="Item name") + description: str = Field(default="", max_length=500, description="Item description") + price: float = Field(gt=0, description="Item price (must be positive)") + + +class Item(ItemCreate): + """Schema for a stored item.""" + + id: int = Field(description="Unique item identifier") diff --git a/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/services.py b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/services.py new file mode 100644 index 00000000..c7e78192 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/services.py @@ -0,0 +1,27 @@ +"""{{ cookiecutter.project_name }} service layer.""" + +from {{ cookiecutter.__project_name_snake_case }}.models import Item, ItemCreate + + +class ItemService: + """In-memory item service demonstrating the service layer pattern.""" + + def __init__(self) -> None: + """Initialize the service with an empty item store.""" + self._items: dict[int, Item] = {} + self._next_id: int = 1 + + def create(self, data: ItemCreate) -> Item: + """Create a new item and return it with an assigned id.""" + item = Item(id=self._next_id, **data.model_dump()) + self._items[self._next_id] = item + self._next_id += 1 + return item + + def get(self, item_id: int) -> Item | None: + """Return an item by id, or None if not found.""" + return self._items.get(item_id) + + def list_all(self) -> list[Item]: + """Return all items.""" + return list(self._items.values()) diff --git a/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/settings.py b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/settings.py new file mode 100644 index 00000000..e676d075 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/src/{{ cookiecutter.__project_name_snake_case }}/settings.py @@ -0,0 +1,25 @@ +"""{{ cookiecutter.project_name }} settings.""" + +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + """Application settings loaded from environment variables and .env file.""" + + model_config = SettingsConfigDict(env_file=".env", extra="ignore") + + app_name: str = "{{ cookiecutter.project_name }}" + log_level: str = "INFO" + debug: bool = False +{%- if cookiecutter.with_fastapi_api|int %} + api_host: str = "0.0.0.0" # noqa: S104 + api_port: int = 8000 +{%- endif %} +{%- if cookiecutter.with_sentry|int %} + sentry_dsn: str = "" + sentry_environment: str = "development" + sentry_traces_sample_rate: float = 0.1 +{%- endif %} + + +settings = Settings() diff --git a/{{ cookiecutter.__project_name_kebab_case }}/tests/conftest.py b/{{ cookiecutter.__project_name_kebab_case }}/tests/conftest.py new file mode 100644 index 00000000..507b9fd5 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/tests/conftest.py @@ -0,0 +1,14 @@ +"""Shared fixtures for the test suite.""" +{%- if cookiecutter.with_fastapi_api|int %} + +import pytest +from fastapi.testclient import TestClient + +from {{ cookiecutter.__project_name_snake_case }}.api import app + + +@pytest.fixture +def api_client() -> TestClient: + """Provide a FastAPI test client.""" + return TestClient(app) +{%- endif %} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/tests/features/api.feature b/{{ cookiecutter.__project_name_kebab_case }}/tests/features/api.feature new file mode 100644 index 00000000..1be6f76c --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/tests/features/api.feature @@ -0,0 +1,19 @@ +Feature: REST API + The API should handle requests correctly. + + Scenario: Health endpoint returns ok + Given the API test client + When I request GET /health + Then the response status code should be 200 + And the response JSON should contain "status" = "ok" + + Scenario: Create an item + Given the API test client + When I create an item with name "Widget" and price 9.99 + Then the response status code should be 201 + And the response JSON should contain "name" = "Widget" + + Scenario: Get a non-existent item returns 404 + Given the API test client + When I request GET /items/999 + Then the response status code should be 404 diff --git a/{{ cookiecutter.__project_name_kebab_case }}/tests/features/cli.feature b/{{ cookiecutter.__project_name_kebab_case }}/tests/features/cli.feature new file mode 100644 index 00000000..5163e9dd --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/tests/features/cli.feature @@ -0,0 +1,26 @@ +Feature: CLI + The CLI should execute commands correctly. + + Scenario: Info command displays metadata + Given a CLI runner + When I run the info command + Then the exit code should be 0 + And the output should contain "{{ cookiecutter.project_name }}" + + Scenario: Config command displays settings + Given a CLI runner + When I run the config command + Then the exit code should be 0 + And the output should contain "app_name" +{%- if cookiecutter.with_fastapi_api|int %} + + Scenario: Health command executes + Given a CLI runner + When I run the health command + Then the exit code should be 0 +{%- endif %} + + Scenario: Verbose flag is accepted + Given a CLI runner + When I run the info command with verbose + Then the exit code should be 0 diff --git a/{{ cookiecutter.__project_name_kebab_case }}/tests/features/import.feature b/{{ cookiecutter.__project_name_kebab_case }}/tests/features/import.feature new file mode 100644 index 00000000..75392a31 --- /dev/null +++ b/{{ cookiecutter.__project_name_kebab_case }}/tests/features/import.feature @@ -0,0 +1,6 @@ +Feature: Package import + The package should be importable and correctly configured. + + Scenario: Import the package + Given the package is installed + Then the package name should be a string diff --git a/{{ cookiecutter.__project_name_kebab_case }}/tests/test_api.py b/{{ cookiecutter.__project_name_kebab_case }}/tests/test_api.py index 34d0b5a9..4382cdfa 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/tests/test_api.py +++ b/{{ cookiecutter.__project_name_kebab_case }}/tests/test_api.py @@ -1,14 +1,79 @@ -"""Test {{ cookiecutter.project_name }} REST API.""" +{%- if cookiecutter.with_pytest_bdd|int -%} +{%- raw %}"""BDD step definitions for API tests."""{% endraw %} -import httpx from fastapi.testclient import TestClient +from httpx import Response +from pytest_bdd import given, parsers, scenarios, then, when from {{ cookiecutter.__project_name_snake_case }}.api import app + +scenarios("api.feature") + + +@given("the API test client", target_fixture="client") +def api_client() -> TestClient: + """Provide a test client for the API.""" + return TestClient(app) + + +@when( + parsers.cfparse("I request GET {path}"), + target_fixture="response", +) +def request_get(client: TestClient, path: str) -> Response: + """Send a GET request to the given path.""" + return client.get(path) + + +@when( + parsers.cfparse('I create an item with name "{name}" and price {price:f}'), + target_fixture="response", +) +def create_item(client: TestClient, name: str, price: float) -> Response: + """Send a POST request to create an item.""" + return client.post("/items", json={"name": name, "price": price}) + + +@then(parsers.cfparse("the response status code should be {code:d}")) +def check_status_code(response: Response, code: int) -> None: + """Verify the response status code.""" + assert response.status_code == code + + +@then(parsers.cfparse('the response JSON should contain "{key}" = "{value}"')) +def check_json_field(response: Response, key: str, value: str) -> None: + """Verify a field in the response JSON.""" + assert response.json()[key] == value +{%- else -%} +"""Tests for the REST API.""" + +from http import HTTPStatus + +from fastapi.testclient import TestClient + +from {{ cookiecutter.__project_name_snake_case }}.api import app + + client = TestClient(app) -def test_read_root() -> None: - """Test that reading the root is successful.""" - response = client.get("/compute", params={"n": 7}) - assert httpx.codes.is_success(response.status_code) +def test_health_endpoint() -> None: + """Health endpoint returns ok status.""" + response = client.get("/health") + assert response.status_code == HTTPStatus.OK + assert response.json()["status"] == "ok" + + +def test_create_item() -> None: + """Create an item via POST.""" + response = client.post("/items", json={"name": "Widget", "price": 9.99}) + assert response.status_code == HTTPStatus.CREATED + assert response.json()["name"] == "Widget" + + +def test_get_nonexistent_item() -> None: + """GET a non-existent item returns 404.""" + response = client.get("/items/999") + assert response.status_code == HTTPStatus.NOT_FOUND +{%- endif %} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/tests/test_cli.py b/{{ cookiecutter.__project_name_kebab_case }}/tests/test_cli.py index 7ff89f4f..ea6e0e70 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/tests/test_cli.py +++ b/{{ cookiecutter.__project_name_kebab_case }}/tests/test_cli.py @@ -1,15 +1,93 @@ -"""Test {{ cookiecutter.project_name }} CLI.""" +{%- if cookiecutter.with_pytest_bdd|int -%} +"""BDD step definitions for CLI tests.""" + +from pytest_bdd import given, parsers, scenarios, then, when +from typer.testing import CliRunner, Result + +from {{ cookiecutter.__project_name_snake_case }}.cli import app + + +scenarios("cli.feature") + + +@given("a CLI runner", target_fixture="runner") +def cli_runner() -> CliRunner: + """Provide a CLI test runner.""" + return CliRunner() + + +@when("I run the info command", target_fixture="result") +def run_info(runner: CliRunner) -> Result: + """Execute the info command.""" + return runner.invoke(app, ["info"]) + + +@when("I run the config command", target_fixture="result") +def run_config(runner: CliRunner) -> Result: + """Execute the config command.""" + return runner.invoke(app, ["config"]) +{%- if cookiecutter.with_fastapi_api|int %} + + +@when("I run the health command", target_fixture="result") +def run_health(runner: CliRunner) -> Result: + """Execute the health command.""" + return runner.invoke(app, ["health"]) +{%- endif %} + + +@when("I run the info command with verbose", target_fixture="result") +def run_info_verbose(runner: CliRunner) -> Result: + """Execute the info command with verbose flag.""" + return runner.invoke(app, ["--verbose", "info"]) + + +@then(parsers.cfparse("the exit code should be {code:d}")) +def check_exit_code(result: Result, code: int) -> None: + """Verify the CLI exit code.""" + assert result.exit_code == code + + +@then(parsers.cfparse('the output should contain "{text}"')) +def check_output_contains(result: Result, text: str) -> None: + """Verify the CLI output contains expected text.""" + assert text in result.stdout +{%- else -%} +"""Tests for the CLI.""" from typer.testing import CliRunner from {{ cookiecutter.__project_name_snake_case }}.cli import app + runner = CliRunner() -def test_fire() -> None: - """Test that the fire command works as expected.""" - name = "GLaDOS" - result = runner.invoke(app, ["--name", name]) +def test_info_command() -> None: + """Info command displays project metadata.""" + result = runner.invoke(app, ["info"]) + assert result.exit_code == 0 + assert "{{ cookiecutter.project_name }}" in result.stdout + + +def test_config_command() -> None: + """Config command displays current settings.""" + result = runner.invoke(app, ["config"]) + assert result.exit_code == 0 + assert "app_name" in result.stdout +{%- if cookiecutter.with_fastapi_api|int %} + + +def test_health_command() -> None: + """Health command runs without error (no server to check).""" + result = runner.invoke(app, ["health"]) + # Exit code 1 is expected when no server is running. + assert result.exit_code in {0, 1} +{%- endif %} + + +def test_verbose_flag() -> None: + """Verbose flag is accepted.""" + result = runner.invoke(app, ["--verbose", "info"]) assert result.exit_code == 0 - assert name in result.stdout +{%- endif %} diff --git a/{{ cookiecutter.__project_name_kebab_case }}/tests/test_import.py b/{{ cookiecutter.__project_name_kebab_case }}/tests/test_import.py index 9cdcbcc0..b7ac90a1 100644 --- a/{{ cookiecutter.__project_name_kebab_case }}/tests/test_import.py +++ b/{{ cookiecutter.__project_name_kebab_case }}/tests/test_import.py @@ -1,8 +1,40 @@ -"""Test {{ cookiecutter.project_name }}.""" +{%- if cookiecutter.with_pytest_bdd|int -%} +"""BDD step definitions for package import tests.""" + +from types import ModuleType + +from pytest_bdd import given, scenarios, then + +import {{ cookiecutter.__project_name_snake_case }} + + +scenarios("import.feature") + + +@given("the package is installed", target_fixture="package") +def installed_package() -> ModuleType: + """Return the installed package module.""" + return {{ cookiecutter.__project_name_snake_case }} + + +@then("the package name should be a string") +def check_package_name(package: ModuleType) -> None: + """Verify the package has a valid name.""" + assert isinstance(package.__name__, str) +{%- else -%} +"""Tests for package import.""" import {{ cookiecutter.__project_name_snake_case }} +from {{ cookiecutter.__project_name_snake_case }}.settings import Settings, settings def test_import() -> None: - """Test that the {{ cookiecutter.project_type }} can be imported.""" + """Verify the package can be imported and has a valid name.""" assert isinstance({{ cookiecutter.__project_name_snake_case }}.__name__, str) + + +def test_settings_defaults() -> None: + """Verify settings can be instantiated with defaults.""" + assert isinstance(settings, Settings) + assert isinstance(settings.app_name, str) +{%- endif %}