diff --git a/.devcontainer/README.md b/.devcontainer/README.md deleted file mode 100644 index 36c1399..0000000 --- a/.devcontainer/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# Dev Container Configuration - -This directory contains the configuration files for setting up a development container. -These configurations are compatible with **GitHub Codespaces**, **Visual Studio Code**, -and **JetBrains IDEs**, and provide a pre-configured environment with all necessary -dependencies for development. - -## GitHub Codespaces - -To launch a dev container using GitHub Codespaces: - -1. Navigate to the repository's main page. -2. Click the **"Code"** button. -3. Select the **"Codespaces"** tab. -4. Click the **"+"** button to create a new codespace. - -The container will be initialized automatically using the configurations in this -directory. - -[GitHub Codespaces Documentation](https://docs.github.com/en/codespaces/developing-in-a-codespace/creating-a-codespace-for-a-repository) - -## Visual Studio Code - -To use the dev container in VS Code: - -1. Open the root folder of the repository in Visual Studio Code. -2. A prompt will appear asking if you want to reopen the folder in a dev container. -3. Confirm by selecting **"Reopen in Container"**. - -[VS Code Dev Containers Guide](https://code.visualstudio.com/docs/devcontainers/tutorial) - -## JetBrains IDEs - -To open the dev container in a JetBrains IDE (e.g., IntelliJ IDEA, PyCharm): - -1. Open the `.devcontainer/devcontainer.json` file in your IDE. -2. Click the Docker icon that appears in the UI. -3. Follow the prompts to create and open the dev container. - -[JetBrains Dev Container Integration Guide](https://www.jetbrains.com/help/idea/connect-to-devcontainer.html) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json deleted file mode 100644 index a2652e3..0000000 --- a/.devcontainer/devcontainer.json +++ /dev/null @@ -1,78 +0,0 @@ -{ - "image": "mcr.microsoft.com/vscode/devcontainers/python:3.11", - "postCreateCommand": "sh ./.devcontainer/setup.sh", - "customizations": { - "vscode": { - "settings": { - "todo-tree.regex.enableMultiLine": true, - "editor.rulers": [ - { - "column": 72, - "color": "#4a4f63" - }, - { - "column": 88, - "color": "#7a8ad1" - } - ], - "autoDocstring.docstringFormat": "google-notypes", - "autoDocstring.startOnNewLine": true, - "better-comments.tags": [ - { - "tag": "!!", - "color": "#F6FF33", - "strikethrough": false, - "backgroundColor": "transparent" - }, - { - "tag": "#!", - "color": "#3498DB", - "strikethrough": false, - "backgroundColor": "transparent" - }, - { - "tag": "TODO", - "color": "#FF8C00", - "strikethrough": false, - "backgroundColor": "transparent" - }, - { - "tag": "//", - "color": "#68FF33", - "strikethrough": false, - "backgroundColor": "transparent" - }, - { - "tag": "**", - "color": "#FF33EC", - "strikethrough": false, - "backgroundColor": "transparent" - } - ], - "[python]": { - "editor.formatOnType": true - }, - "python.defaultInterpreterPath": "/home/jovyan/envs/env/bin/python", - "[jsonc]": { - "editor.defaultFormatter": "vscode.json-language-features" - }, - "[json]": { - "editor.defaultFormatter": "esbenp.prettier-vscode" - }, - "prettier.printWidth": 88, - "prettier.proseWrap": "always" - }, - "extensions": [ - "njpwerner.autodocstring", - "aaron-bond.better-comments", - "esbenp.prettier-vscode", - "ms-python.python", - "ms-python.debugpy", - "Gruntfuggly.todo-tree" - ] - } - }, - "features": { - "ghcr.io/devcontainers/features/github-cli:1": {} - } -} diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh deleted file mode 100644 index 658115b..0000000 --- a/.devcontainer/setup.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -set -e - -echo "Installing uv..." -curl -LsSf https://astral.sh/uv/install.sh | sh > /dev/null 2>&1 -echo "✅ uv installed." - -echo "Installing system dependencies..." -sudo apt-get update > /dev/null 2>&1 -sudo apt-get install -y build-essential > /dev/null 2>&1 -echo "✅ System dependencies installed." - -echo "Installing Python dependencies with Makefile..." -make install > /dev/null 2>&1 - -echo "✅ Devcontainer setup complete." \ No newline at end of file diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index fa54ff5..0f0cf75 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -27,8 +27,8 @@ You can contribute by: Start by opening an issue to describe your proposed change or the problem you encountered. This helps maintainers review and guide the work before coding begins. -> For minor changes, such as typo fixes, you may skip this step and submit a pull -> request directly. +> For minor changes, such as typo fixes, you may skip this step and submit a pull request +> directly. ### Step 2: Make Your Changes @@ -75,7 +75,7 @@ To set up locally: 3. Clone the repository and run: ```bash -make install +make setup ``` > If using `uv`, a compatible virtual environment will be created automatically. diff --git a/.github/actions/build-mkdocs/action.yml b/.github/actions/build-mkdocs/action.yml index fb2ddd0..847a095 100644 --- a/.github/actions/build-mkdocs/action.yml +++ b/.github/actions/build-mkdocs/action.yml @@ -18,15 +18,8 @@ runs: - name: Setup gh-pages Branch shell: bash run: | - # Store current branch - CURRENT_BRANCH=$(git branch --show-current) - - # Check if gh-pages branch exists - if git ls-remote --heads origin gh-pages | grep -q gh-pages; then - echo "gh-pages branch exists, fetching..." - git fetch origin gh-pages:gh-pages - else - echo "gh-pages branch doesn't exist, creating..." + if ! git ls-remote --heads origin gh-pages | grep -q gh-pages; then + CURRENT_BRANCH=$(git branch --show-current) git checkout --orphan gh-pages git reset --hard git commit --allow-empty -m "Initial gh-pages commit" @@ -34,14 +27,8 @@ runs: git checkout "$CURRENT_BRANCH" fi - - name: Build and Deploy Docs with Mike + - name: Build and Deploy Docs shell: bash run: | - echo "Deploying Docs Version: ${{ inputs.docs-version }}" uv run mike deploy --push --update-aliases "${{ inputs.docs-version }}" latest - - - name: Set Default Version to Docs - shell: bash - run: | - echo "Setting 'Latest' as Default Version" uv run mike set-default --push latest diff --git a/.github/actions/create-release/action.yml b/.github/actions/create-release/action.yml index 3d91904..4afd27d 100644 --- a/.github/actions/create-release/action.yml +++ b/.github/actions/create-release/action.yml @@ -26,52 +26,24 @@ runs: run: | if gh release view "${{ inputs.version }}" >/dev/null 2>&1; then echo "exists=true" >> $GITHUB_OUTPUT - echo "Release ${{ inputs.version }} already exists" else echo "exists=false" >> $GITHUB_OUTPUT - echo "Release ${{ inputs.version }} doesn't exist" fi env: GH_TOKEN: ${{ inputs.token }} - - name: Generate Release Notes + - name: Create Release if: steps.check_release.outputs.exists == 'false' shell: bash run: | - echo "Generating release notes..." LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "") + RANGE="${LAST_TAG:+$LAST_TAG..HEAD}" + COMMITS=$(git log ${RANGE:---max-count=10} --pretty=format:"- %s" --no-merges) - if [ -z "$LAST_TAG" ]; then - echo "No previous tags found, using last 10 commits..." - COMMITS=$(git log --pretty=format:"- %s" --no-merges -10) - else - echo "Previous tag found: $LAST_TAG" - COMMITS=$(git log "${LAST_TAG}..HEAD" --pretty=format:"- %s" --no-merges) - fi - - cat > release-notes.md << EOF - ## Changes - - $COMMITS - EOF - - echo "Release notes generated:" - cat release-notes.md - - - name: Create Release - if: steps.check_release.outputs.exists == 'false' - shell: bash - run: | - echo "Creating release ${{ inputs.version }}..." gh release create "${{ inputs.version }}" \ --title "${{ inputs.version }}" \ - --notes-file release-notes.md - echo "Release ${{ inputs.version }} created successfully" + --notes "## Changes + + $COMMITS" env: GH_TOKEN: ${{ inputs.token }} - - - name: Skip Release - if: steps.check_release.outputs.exists == 'true' - shell: bash - run: | - echo "Skipping release creation - ${{ inputs.version }} already exists" diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml index 2a553f6..ef503ec 100644 --- a/.github/actions/setup-python-env/action.yml +++ b/.github/actions/setup-python-env/action.yml @@ -25,39 +25,20 @@ runs: enable-cache: true - name: Install dependencies with uv - id: install-deps shell: bash run: | - # Check if we should install all extras - if [ -z "${{ inputs.uv-group }}" ] && [ -z "${{ inputs.uv-extra }}" ]; then - echo "Installing all extras (default when no group or extra specified)..." - uv sync --all-extras - elif [ "${{ inputs.uv-extra }}" = "--all-extras" ]; then - echo "Installing all extras (explicitly requested)..." - if [ -n "${{ inputs.uv-group }}" ]; then - echo "Note: Installing all extras overrides the specified group: ${{ inputs.uv-group }}" - fi - uv sync --all-extras + ARGS="" + if [ "${{ inputs.uv-extra }}" = "--all-extras" ] || [ -z "${{ inputs.uv-group }}${{ inputs.uv-extra }}" ]; then + ARGS="--all-extras" else - echo "Installing with group: ${{ inputs.uv-group }}, and extra: ${{ inputs.uv-extra }}..." - if [ -n "${{ inputs.uv-group }}" ] && [ -n "${{ inputs.uv-extra }}" ]; then - uv sync --group ${{ inputs.uv-group }} --extra ${{ inputs.uv-extra }} - elif [ -n "${{ inputs.uv-group }}" ]; then - uv sync --group ${{ inputs.uv-group }} - elif [ -n "${{ inputs.uv-extra }}" ]; then - uv sync --extra ${{ inputs.uv-extra }} - else - uv sync - fi + [ -n "${{ inputs.uv-group }}" ] && ARGS="$ARGS --group ${{ inputs.uv-group }}" + [ -n "${{ inputs.uv-extra }}" ] && ARGS="$ARGS --extra ${{ inputs.uv-extra }}" fi + uv sync $ARGS - name: Verify uv and environment - id: verify shell: bash run: | - echo "uv version:" uv --version - echo "Virtual environments:" uv venv list - echo "Python version:" uv run python --version diff --git a/.github/actions/test-code/action.yml b/.github/actions/test-code/action.yml new file mode 100644 index 0000000..7ebb725 --- /dev/null +++ b/.github/actions/test-code/action.yml @@ -0,0 +1,23 @@ +name: Test Code +description: Run Python tests with Pytest + +inputs: + src-project-folder: + description: "Directory where the project is located" + required: true + default: "src" + + src-tests-folder: + description: "Directory where the tests are located" + required: true + default: "tests" + +runs: + using: composite + steps: + - name: Run tests with Pytest + shell: bash + run: | + if [ -d "${{ inputs.src-tests-folder }}" ] && [ -n "$(find ${{ inputs.src-tests-folder }} -name 'test_*.py')" ]; then + uv run pytest ${{ inputs.src-tests-folder }} + fi diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index 706cfcc..72dff19 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -8,10 +8,11 @@ on: env: SRC_PYTHON_VERSION: "3.11" + TEST_PATH: "tests" jobs: - setup: - name: Setup Code + setup-test: + name: Setup and Test runs-on: ubuntu-latest steps: @@ -25,11 +26,16 @@ jobs: uv-group: "pipeline" uv-extra: "--all-extras" + - name: Run Tests + uses: ./.github/actions/test-code + with: + src-tests-folder: ${{ env.TEST_PATH }} + build-deploy-docs: if: github.ref == 'refs/heads/main' name: Build MkDocs Documentation runs-on: ubuntu-latest - needs: setup + needs: setup-test permissions: contents: write diff --git a/AGENTS.md b/AGENTS.md index 8c67fba..cc37180 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -9,9 +9,9 @@ many optional settings and Jinja2 template blocks. Before interacting with the template, make sure to: -1. **Review the README**: The `README.md` provides a high-level overview of the - template, its features, CI/CD workflow, and instructions to generate new projects. - Key features include: +1. **Review the README**: The `README.md` provides a high-level overview of the template, + its features, CI/CD workflow, and instructions to generate new projects. Key features + include: - Linting & type checking (Ruff & Mypy) - Security scanning (Bandit) @@ -36,7 +36,7 @@ Before interacting with the template, make sure to: - Create new projects inside the `workspaces/` directory. - Run the Makefile targets to validate functionality: - - `make install` → installs dependencies + - `make setup` → installs dependencies - `make pipeline` → runs linting, type checking, security analysis, complexity checks, and tests - `make all` → full workflow including documentation preview diff --git a/Makefile b/Makefile index 2c43880..37d7a58 100644 --- a/Makefile +++ b/Makefile @@ -1,12 +1,13 @@ .PHONY: setup \ clean-cache-temp-files \ - lint code-check \ + lint code-check test \ doc \ pipeline all .DEFAULT_GOAL := all -SRC_PROJECT_HOOKS ?= hooks +PATH_PROJECT_ROOT ?= . +TEST_PATH ?= tests setup: @echo "Installing dependencies..." @@ -23,23 +24,27 @@ clean-cache-temp-files: lint: @echo "Running lint checks..." - @uv run isort $(SRC_PROJECT_HOOKS)/ - @uv run ruff check --fix $(SRC_PROJECT_HOOKS)/ - @uv run ruff format $(SRC_PROJECT_HOOKS)/ + @uv run isort $(PATH_PROJECT_ROOT) + @uv run ruff check --fix $(PATH_PROJECT_ROOT) + @uv run ruff format $(PATH_PROJECT_ROOT) @echo "✅ Linting complete." code-check: @echo "Running static code checks..." - @uv run mypy $(SRC_PROJECT_HOOKS)/ - @uv run complexipy -f $(SRC_PROJECT_HOOKS)/ - @uv run bandit -r $(SRC_PROJECT_HOOKS)/ + @uv run mypy $(PATH_PROJECT_ROOT) + @uv run complexipy -f $(PATH_PROJECT_ROOT) @echo "✅ Code and security checks complete." +test: + @echo "Running tests..." + @uv run pytest $(TEST_PATH) -v + @echo "✅ Tests complete." + doc: @echo "Serving documentation..." @uv run mkdocs serve -pipeline: clean-cache-temp-files lint code-check +pipeline: clean-cache-temp-files lint code-check test @echo "✅ Pipeline complete." all: setup pipeline doc diff --git a/README.md b/README.md index 9977947..41a2157 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ grouped dependency installations. `Makefile`: ```bash - make install + make setup ``` This installs development, testing, and documentation tools as defined in diff --git a/docs/content/makefile-commands.md b/docs/content/makefile-commands.md index c319e75..11779a2 100644 --- a/docs/content/makefile-commands.md +++ b/docs/content/makefile-commands.md @@ -42,7 +42,6 @@ readability and maintainability. It integrates several tools: - **isort**: Organizes imports alphabetically and categorically. - **Ruff**: A high-performance linter that detects and fixes style issues. -- **nbqa**: Applies linting to Jupyter notebooks. Typical automatic fixes include removing unused imports, correcting spacing and formatting, and reorganizing imports according to PEP 8 conventions. This command is diff --git a/docs/content/pyproject-configuration.md b/docs/content/pyproject-configuration.md index 03546c0..c6a8f4c 100644 --- a/docs/content/pyproject-configuration.md +++ b/docs/content/pyproject-configuration.md @@ -48,7 +48,6 @@ automation: - `complexipy`: Measures cyclomatic complexity to identify potentially unmaintainable code. - `isort`: Sorts imports according to standard conventions. -- `nbqa`: Extends linting capabilities to Jupyter notebooks. - `deadcode`: Detects unused or unreachable code. - `pre-commit`: Manages Git pre-commit hooks for automated checks. diff --git a/docs/index.md b/docs/index.md index 9f48e68..0ff2747 100644 --- a/docs/index.md +++ b/docs/index.md @@ -54,7 +54,7 @@ grouped dependency installations. `Makefile`: ```bash - make install + make setup ``` This installs development, testing, and documentation tools as defined in diff --git a/mkdocs.yml b/mkdocs.yml index 9f66c19..7dc1487 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -2,21 +2,19 @@ site_name: Python project template site_description: Python project template repo_name: python-project-template repo_url: https://github.com/danibcorr/python-project-template -site_url: https://https://danibcorr.github.io/python-project-template +site_url: https://danibcorr.github.io/python-project-template theme: name: "material" language: en palette: - - media: "(prefers-color-scheme: dark)" - scheme: slate + - scheme: slate primary: black accent: black toggle: icon: material/weather-night name: "Switch to light mode" - - media: "(prefers-color-scheme: light)" - scheme: default + - scheme: default primary: black accent: black toggle: @@ -54,6 +52,9 @@ theme: use_directory_urls: false markdown_extensions: + - tables + - toc: + toc_depth: 2 - admonition - pymdownx.details - pymdownx.superfences: diff --git a/pyproject.toml b/pyproject.toml index 4d41e49..ceee8b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "python-project-template" -version = "2.5.1" +version = "2.6.0" description = "Python project template" authors = [ {name = "Daniel Bazo Correa", email = "none@none.com"}, @@ -34,11 +34,11 @@ required-environments = [ [dependency-groups] pipeline = [ - "bandit==1.9.2", "complexipy==5.1.0", "mypy==1.19.1", "ruff==0.14.11", - "isort==7.0.0" + "isort==7.0.0", + "pytest==9.0.2" ] documentation = [ "mkdocs==1.6.1", @@ -50,7 +50,6 @@ documentation = [ "mike==2.1.3" ] - [tool.ruff] line-length = 88 indent-width = 4 @@ -82,7 +81,7 @@ exclude = [ "site-packages", "venv", ] -extend-exclude = ["*.ipynb"] +extend-exclude = ["*.ipynb", "{{*}}"] [tool.ruff.lint] select = ["E", "F", "UP", "B", "SIM"] @@ -101,16 +100,20 @@ docstring-code-line-length = 88 [tool.mypy] check_untyped_defs = true ignore_missing_imports = true +explicit_package_bases = true exclude = [ "^(build|dist|venv)", - ".venv/" + ".venv/", + "{{ cookiecutter.project_name }}", ] cache_dir = "/dev/null" +[tool.complexipy] +exclude = ["{{ cookiecutter.project_name }}"] + [tool.isort] -known_first_party = ["{{ cookiecutter.project_module_name }}"] sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] import_heading_stdlib = "Standard libraries" import_heading_thirdparty = "3pps" import_heading_firstparty = "Own modules" -line_length = 88 \ No newline at end of file +line_length = 88 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..93c6851 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,16 @@ +# 3pps +import pytest + + +@pytest.fixture(autouse=True) +def reset_modules(): + """ + Reset imported modules between tests. + + Ensures test isolation by clearing module state after each test. + + Returns: + None + """ + + yield diff --git a/tests/test_cookiecutter_template.py b/tests/test_cookiecutter_template.py new file mode 100644 index 0000000..de405f7 --- /dev/null +++ b/tests/test_cookiecutter_template.py @@ -0,0 +1,333 @@ +# Standard libraries +import json +import subprocess +from pathlib import Path + +# 3pps +import pytest + + +@pytest.fixture +def template_dir() -> Path: + """ + Get the template directory path. + + Returns: + The absolute path to the cookiecutter template root + directory. + """ + + return Path(__file__).parent.parent + + +@pytest.fixture +def cookiecutter_config() -> dict: + """ + Load cookiecutter configuration. + + Returns: + Dictionary containing the parsed cookiecutter.json + configuration. + """ + + config_path = Path(__file__).parent.parent / "cookiecutter.json" + return json.loads(config_path.read_text()) + + +def test_cookiecutter_json_exists(template_dir: Path) -> None: + """ + Test that cookiecutter.json exists. + + Args: + template_dir: The absolute path to the cookiecutter template + root directory. + + Returns: + None + """ + + assert (template_dir / "cookiecutter.json").exists() + + +def test_cookiecutter_json_valid(cookiecutter_config: dict) -> None: + """ + Test that cookiecutter.json is valid JSON with required fields. + + Args: + cookiecutter_config: Dictionary containing the parsed + cookiecutter.json configuration. + + Returns: + None + """ + + required_fields = [ + "project_name", + "project_module_name", + "project_test_folder_name", + "project_version", + ] + for field in required_fields: + assert field in cookiecutter_config + + +def test_template_generation(tmp_path: Path, template_dir: Path) -> None: + """ + Test that cookiecutter generates a valid project. + + Args: + tmp_path: Temporary directory path provided by pytest. + template_dir: The absolute path to the cookiecutter template + root directory. + + Returns: + None + """ + + result = subprocess.run( + [ + "uv", + "run", + "cookiecutter", + str(template_dir), + "--no-input", + "--output-dir", + str(tmp_path), + ], + capture_output=True, + text=True, + ) + + assert result.returncode == 0, f"Failed: {result.stderr}" + + generated_project = tmp_path / "project-name" + assert generated_project.exists() + + +def test_generated_project_structure(tmp_path: Path, template_dir: Path) -> None: + """ + Test that generated project has correct structure. + + Args: + tmp_path: Temporary directory path provided by pytest. + template_dir: The absolute path to the cookiecutter template + root directory. + + Returns: + None + """ + + subprocess.run( + [ + "uv", + "run", + "cookiecutter", + str(template_dir), + "--no-input", + "--output-dir", + str(tmp_path), + ], + check=True, + ) + + project = tmp_path / "project-name" + expected_files = [ + "pyproject.toml", + "Makefile", + "README.md", + "mkdocs.yml", + ".gitignore", + ] + + for file in expected_files: + assert (project / file).exists(), f"Missing {file}" + + +def test_generated_project_folders(tmp_path: Path, template_dir: Path) -> None: + """ + Test that generated project has correct folders. + + Args: + tmp_path: Temporary directory path provided by pytest. + template_dir: The absolute path to the cookiecutter template + root directory. + + Returns: + None + """ + + subprocess.run( + [ + "uv", + "run", + "cookiecutter", + str(template_dir), + "--no-input", + "--output-dir", + str(tmp_path), + ], + check=True, + ) + + project = tmp_path / "project-name" + expected_folders = ["src", "tests", "docs", "config"] + + for folder in expected_folders: + assert (project / folder).exists(), f"Missing {folder}" + + +@pytest.mark.parametrize( + "folder_option,folder_name", + [ + ("add_dev_container_folder", ".devcontainer"), + ("add_vscode_folder", ".vscode"), + ("add_notebooks_folder", "notebooks"), + ("add_prompts_folder", "prompts"), + ], +) +def test_optional_folders_yes(tmp_path: Path, template_dir: Path, folder_option: str, folder_name: str) -> None: + """ + Test that optional folders are created when enabled. + + Args: + tmp_path: Temporary directory path provided by pytest. + template_dir: The absolute path to the cookiecutter template + root directory. + folder_option: Cookiecutter configuration option name. + folder_name: Name of the folder to verify. + + Returns: + None + """ + + subprocess.run( + [ + "uv", + "run", + "cookiecutter", + str(template_dir), + "--no-input", + f"{folder_option}=yes", + "--output-dir", + str(tmp_path), + ], + check=True, + ) + + project = tmp_path / "project-name" + assert (project / folder_name).exists() + + +@pytest.mark.parametrize( + "folder_option,folder_name", + [ + ("add_dev_container_folder", ".devcontainer"), + ("add_vscode_folder", ".vscode"), + ("add_notebooks_folder", "notebooks"), + ("add_prompts_folder", "prompts"), + ], +) +def test_optional_folders_no(tmp_path: Path, template_dir: Path, folder_option: str, folder_name: str) -> None: + """ + Test that optional folders are removed when disabled. + + Args: + tmp_path: Temporary directory path provided by pytest. + template_dir: The absolute path to the cookiecutter template + root directory. + folder_option: Cookiecutter configuration option name. + folder_name: Name of the folder to verify. + + Returns: + None + """ + + subprocess.run( + [ + "uv", + "run", + "cookiecutter", + str(template_dir), + "--no-input", + f"{folder_option}=no", + "--output-dir", + str(tmp_path), + ], + check=True, + ) + + project = tmp_path / "project-name" + assert not (project / folder_name).exists() + + +def test_generated_pyproject_valid(tmp_path: Path, template_dir: Path) -> None: + """ + Test that generated pyproject.toml is valid. + + Args: + tmp_path: Temporary directory path provided by pytest. + template_dir: The absolute path to the cookiecutter template + root directory. + + Returns: + None + """ + + subprocess.run( + [ + "uv", + "run", + "cookiecutter", + str(template_dir), + "--no-input", + "project_name=test-project", + "project_module_name=test_module", + "--output-dir", + str(tmp_path), + ], + check=True, + ) + + pyproject = tmp_path / "test-project" / "pyproject.toml" + content = pyproject.read_text() + + assert "test-project" in content + assert "test_module" in content + assert "[project]" in content + assert "[tool.uv]" in content + + +def test_makefile_commands_exist(tmp_path: Path, template_dir: Path) -> None: + """ + Test that Makefile has required commands. + + Args: + tmp_path: Temporary directory path provided by pytest. + template_dir: The absolute path to the cookiecutter template + root directory. + + Returns: + None + """ + + subprocess.run( + [ + "uv", + "run", + "cookiecutter", + str(template_dir), + "--no-input", + "--output-dir", + str(tmp_path), + ], + check=True, + ) + + makefile = tmp_path / "project-name" / "Makefile" + content = makefile.read_text() + + required_commands = ["setup", "lint", "code-check", "tests", "pipeline"] + for cmd in required_commands: + assert f"{cmd}:" in content, f"Missing command: {cmd}" + + assert "uv sync" in content or "uv run" in content diff --git a/tests/test_post_gen_project.py b/tests/test_post_gen_project.py new file mode 100644 index 0000000..68b1ccf --- /dev/null +++ b/tests/test_post_gen_project.py @@ -0,0 +1,121 @@ +# Standard libraries +from pathlib import Path +from unittest.mock import patch + +# 3pps +import pytest + +# Own modules +from hooks.post_gen_project import remove + + +@pytest.fixture +def mock_project_dir(tmp_path: Path) -> Path: + """ + Create a temporary project directory with test folders. + + Generates a mock project structure containing the standard + optional folders (.devcontainer, .vscode, notebooks, prompts) + with a test file in each. + + Args: + tmp_path: Temporary directory path provided by pytest. + + Returns: + The path to the temporary project directory. + """ + + folders = [".devcontainer", ".vscode", "notebooks", "prompts"] + for folder in folders: + (tmp_path / folder).mkdir() + (tmp_path / folder / "test.txt").write_text("test") + + return tmp_path + + +def test_remove_file(tmp_path: Path) -> None: + """ + Test removing a file. + + Args: + tmp_path: Temporary directory path provided by pytest. + + Returns: + None + """ + + test_file = tmp_path / "test.txt" + test_file.write_text("test") + assert test_file.exists() + + remove(test_file) + assert not test_file.exists() + + +def test_remove_directory(tmp_path: Path) -> None: + """ + Test removing a directory. + + Args: + tmp_path: Temporary directory path provided by pytest. + + Returns: + None + """ + + test_dir = tmp_path / "test_dir" + test_dir.mkdir() + (test_dir / "file.txt").write_text("test") + assert test_dir.exists() + + remove(test_dir) + assert not test_dir.exists() + + +def test_remove_nonexistent_path(tmp_path: Path) -> None: + """ + Test removing a non-existent path does not raise error. + + Args: + tmp_path: Temporary directory path provided by pytest. + + Returns: + None + """ + + nonexistent = tmp_path / "nonexistent" + remove(nonexistent) + + +@patch("hooks.post_gen_project.project_dir") +@patch("hooks.post_gen_project.remove") +def test_folder_removal_logic(mock_remove, mock_project_dir, tmp_path: Path) -> None: + """ + Test that folders are removed when not enabled. + + Verifies the post-generation hook correctly removes optional + folders based on cookiecutter configuration. + + Args: + mock_remove: Mock of the remove function. + mock_project_dir: Mock of the project directory path. + tmp_path: Temporary directory path provided by pytest. + + Returns: + None + """ + + mock_project_dir.__truediv__ = lambda self, x: tmp_path / x + + folders = { + ".devcontainer": "no", + ".vscode": "yes", + "notebooks": "no", + "prompts": "yes", + } + + for folder, enabled in folders.items(): + if enabled != "yes": + mock_remove(mock_project_dir / folder) + + assert mock_remove.call_count == 2 diff --git a/uv.lock b/uv.lock index c6fcb93..3be8271 100644 --- a/uv.lock +++ b/uv.lock @@ -57,20 +57,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/02/e3/a4fa1946722c4c7b063cc25043a12d9ce9b4323777f89643be74cef2993c/backrefs-6.1-py39-none-any.whl", hash = "sha256:a9e99b8a4867852cad177a6430e31b0f6e495d65f8c6c134b68c14c3c95bf4b0", size = 381058, upload-time = "2025-11-15T14:52:06.698Z" }, ] -[[package]] -name = "bandit" -version = "1.9.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyyaml", marker = "sys_platform == 'linux'" }, - { name = "rich", marker = "sys_platform == 'linux'" }, - { name = "stevedore", marker = "sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cf/72/f704a97aac430aeb704fa16435dfa24fbeaf087d46724d0965eb1f756a2c/bandit-1.9.2.tar.gz", hash = "sha256:32410415cd93bf9c8b91972159d5cf1e7f063a9146d70345641cd3877de348ce", size = 4241659, upload-time = "2025-11-23T21:36:18.722Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/55/1a/5b0320642cca53a473e79c7d273071b5a9a8578f9e370b74da5daa2768d7/bandit-1.9.2-py3-none-any.whl", hash = "sha256:bda8d68610fc33a6e10b7a8f1d61d92c8f6c004051d5e946406be1fb1b16a868", size = 134377, upload-time = "2025-11-23T21:36:17.39Z" }, -] - [[package]] name = "beautifulsoup4" version = "4.14.3" @@ -325,6 +311,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" }, ] +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + [[package]] name = "isort" version = "7.0.0" @@ -689,6 +684,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, ] +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + [[package]] name = "pydantic" version = "2.12.5" @@ -800,6 +804,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8b/40/2614036cdd416452f5bf98ec037f38a1afb17f327cb8e6b652d4729e0af8/pyparsing-3.3.1-py3-none-any.whl", hash = "sha256:023b5e7e5520ad96642e2c6db4cb683d3970bd640cdf7115049a6e9c3682df82", size = 121793, upload-time = "2025-12-23T03:14:02.103Z" }, ] +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "iniconfig", marker = "sys_platform == 'linux'" }, + { name = "packaging", marker = "sys_platform == 'linux'" }, + { name = "pluggy", marker = "sys_platform == 'linux'" }, + { name = "pygments", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -814,7 +833,7 @@ wheels = [ [[package]] name = "python-project-template" -version = "2.5.0" +version = "2.6.0" source = { virtual = "." } dependencies = [ { name = "cookiecutter", marker = "sys_platform == 'linux'" }, @@ -831,10 +850,10 @@ documentation = [ { name = "mkdocs-material", marker = "sys_platform == 'linux'" }, ] pipeline = [ - { name = "bandit", marker = "sys_platform == 'linux'" }, { name = "complexipy", marker = "sys_platform == 'linux'" }, { name = "isort", marker = "sys_platform == 'linux'" }, { name = "mypy", marker = "sys_platform == 'linux'" }, + { name = "pytest", marker = "sys_platform == 'linux'" }, { name = "ruff", marker = "sys_platform == 'linux'" }, ] @@ -852,10 +871,10 @@ documentation = [ { name = "mkdocs-material", specifier = "==9.7.1" }, ] pipeline = [ - { name = "bandit", specifier = "==1.9.2" }, { name = "complexipy", specifier = "==5.1.0" }, { name = "isort", specifier = "==7.0.0" }, { name = "mypy", specifier = "==1.19.1" }, + { name = "pytest", specifier = "==9.0.2" }, { name = "ruff", specifier = "==0.14.11" }, ] @@ -1001,15 +1020,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/f3/b67d6ea49ca9154453b6d70b34ea22f3996b9fa55da105a79d8732227adc/soupsieve-2.8.1-py3-none-any.whl", hash = "sha256:a11fe2a6f3d76ab3cf2de04eb339c1be5b506a8a47f2ceb6d139803177f85434", size = 36710, upload-time = "2025-12-18T13:50:33.267Z" }, ] -[[package]] -name = "stevedore" -version = "5.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/5b/496f8abebd10c3301129abba7ddafd46c71d799a70c44ab080323987c4c9/stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945", size = 516074, upload-time = "2025-11-20T10:06:07.264Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820", size = 54428, upload-time = "2025-11-20T10:06:05.946Z" }, -] - [[package]] name = "text-unidecode" version = "1.3" diff --git a/{{ cookiecutter.project_name }}/.devcontainer/setup.sh b/{{ cookiecutter.project_name }}/.devcontainer/setup.sh index 658115b..dbf3bd8 100644 --- a/{{ cookiecutter.project_name }}/.devcontainer/setup.sh +++ b/{{ cookiecutter.project_name }}/.devcontainer/setup.sh @@ -12,6 +12,6 @@ sudo apt-get install -y build-essential > /dev/null 2>&1 echo "✅ System dependencies installed." echo "Installing Python dependencies with Makefile..." -make install > /dev/null 2>&1 +make setup > /dev/null 2>&1 echo "✅ Devcontainer setup complete." \ No newline at end of file diff --git a/{{ cookiecutter.project_name }}/.github/CONTRIBUTING.md b/{{ cookiecutter.project_name }}/.github/CONTRIBUTING.md index 085b1a4..2078707 100644 --- a/{{ cookiecutter.project_name }}/.github/CONTRIBUTING.md +++ b/{{ cookiecutter.project_name }}/.github/CONTRIBUTING.md @@ -71,7 +71,7 @@ To work locally: 3. Run: ```bash - make install + make setup ``` > The required Python version is defined in `pyproject.toml`. If using `uv`, it will @@ -123,5 +123,5 @@ Not a fan of writing code? You can still help by: - Suggesting features or usability improvements - Helping triage and respond to issues -Thank you for being part of {{ cookiecutter.project_name }}'s journey. We’re thrilled to have -you here. If you have questions, feel free to reach out or open an issue. +Thank you for being part of {{ cookiecutter.project_name }}'s journey. We’re thrilled to +have you here. If you have questions, feel free to reach out or open an issue. diff --git a/{{ cookiecutter.project_name }}/.github/actions/build-mkdocs/action.yml b/{{ cookiecutter.project_name }}/.github/actions/build-mkdocs/action.yml index fb2ddd0..847a095 100644 --- a/{{ cookiecutter.project_name }}/.github/actions/build-mkdocs/action.yml +++ b/{{ cookiecutter.project_name }}/.github/actions/build-mkdocs/action.yml @@ -18,15 +18,8 @@ runs: - name: Setup gh-pages Branch shell: bash run: | - # Store current branch - CURRENT_BRANCH=$(git branch --show-current) - - # Check if gh-pages branch exists - if git ls-remote --heads origin gh-pages | grep -q gh-pages; then - echo "gh-pages branch exists, fetching..." - git fetch origin gh-pages:gh-pages - else - echo "gh-pages branch doesn't exist, creating..." + if ! git ls-remote --heads origin gh-pages | grep -q gh-pages; then + CURRENT_BRANCH=$(git branch --show-current) git checkout --orphan gh-pages git reset --hard git commit --allow-empty -m "Initial gh-pages commit" @@ -34,14 +27,8 @@ runs: git checkout "$CURRENT_BRANCH" fi - - name: Build and Deploy Docs with Mike + - name: Build and Deploy Docs shell: bash run: | - echo "Deploying Docs Version: ${{ inputs.docs-version }}" uv run mike deploy --push --update-aliases "${{ inputs.docs-version }}" latest - - - name: Set Default Version to Docs - shell: bash - run: | - echo "Setting 'Latest' as Default Version" uv run mike set-default --push latest diff --git a/{{ cookiecutter.project_name }}/.github/actions/create-release/action.yml b/{{ cookiecutter.project_name }}/.github/actions/create-release/action.yml index 3d91904..4afd27d 100644 --- a/{{ cookiecutter.project_name }}/.github/actions/create-release/action.yml +++ b/{{ cookiecutter.project_name }}/.github/actions/create-release/action.yml @@ -26,52 +26,24 @@ runs: run: | if gh release view "${{ inputs.version }}" >/dev/null 2>&1; then echo "exists=true" >> $GITHUB_OUTPUT - echo "Release ${{ inputs.version }} already exists" else echo "exists=false" >> $GITHUB_OUTPUT - echo "Release ${{ inputs.version }} doesn't exist" fi env: GH_TOKEN: ${{ inputs.token }} - - name: Generate Release Notes + - name: Create Release if: steps.check_release.outputs.exists == 'false' shell: bash run: | - echo "Generating release notes..." LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "") + RANGE="${LAST_TAG:+$LAST_TAG..HEAD}" + COMMITS=$(git log ${RANGE:---max-count=10} --pretty=format:"- %s" --no-merges) - if [ -z "$LAST_TAG" ]; then - echo "No previous tags found, using last 10 commits..." - COMMITS=$(git log --pretty=format:"- %s" --no-merges -10) - else - echo "Previous tag found: $LAST_TAG" - COMMITS=$(git log "${LAST_TAG}..HEAD" --pretty=format:"- %s" --no-merges) - fi - - cat > release-notes.md << EOF - ## Changes - - $COMMITS - EOF - - echo "Release notes generated:" - cat release-notes.md - - - name: Create Release - if: steps.check_release.outputs.exists == 'false' - shell: bash - run: | - echo "Creating release ${{ inputs.version }}..." gh release create "${{ inputs.version }}" \ --title "${{ inputs.version }}" \ - --notes-file release-notes.md - echo "Release ${{ inputs.version }} created successfully" + --notes "## Changes + + $COMMITS" env: GH_TOKEN: ${{ inputs.token }} - - - name: Skip Release - if: steps.check_release.outputs.exists == 'true' - shell: bash - run: | - echo "Skipping release creation - ${{ inputs.version }} already exists" diff --git a/{{ cookiecutter.project_name }}/.github/actions/lint-code/action.yml b/{{ cookiecutter.project_name }}/.github/actions/lint-code/action.yml index a2e8165..0c07777 100644 --- a/{{ cookiecutter.project_name }}/.github/actions/lint-code/action.yml +++ b/{{ cookiecutter.project_name }}/.github/actions/lint-code/action.yml @@ -11,26 +11,17 @@ runs: using: composite steps: - name: Lint with Ruff - id: ruff - run: | - echo "Running Ruff..." - uv run ruff check ${{ inputs.src-project-folder }}/ shell: bash + run: uv run ruff check ${{ inputs.src-project-folder }}/ - - name: Checking Imports with isort - run: | - echo "Running isort..." - uv run isort --check ${{ inputs.src-project-folder }}/ + - name: Check Imports with isort shell: bash + run: uv run isort --check ${{ inputs.src-project-folder }}/ - - name: Checking Cognitive Complexity with complexipy - run: | - echo "Running complexipy..." - uv run complexipy -f ${{ inputs.src-project-folder }}/ + - name: Check Cognitive Complexity shell: bash + run: uv run complexipy -f ${{ inputs.src-project-folder }}/ - - name: Running Mypy Type Checker - run: | - echo "Running Mypy..." - uv run mypy ${{ inputs.src-project-folder }}/ + - name: Run Mypy Type Checker shell: bash + run: uv run mypy ${{ inputs.src-project-folder }}/ diff --git a/{{ cookiecutter.project_name }}/.github/actions/security/action.yml b/{{ cookiecutter.project_name }}/.github/actions/security/action.yml index 93cb09f..3c15540 100644 --- a/{{ cookiecutter.project_name }}/.github/actions/security/action.yml +++ b/{{ cookiecutter.project_name }}/.github/actions/security/action.yml @@ -16,8 +16,7 @@ runs: using: composite steps: - name: Security Scan with Bandit - id: bandit shell: bash run: | - echo "Running Bandit..." - uv run bandit -r "${{ inputs.src-project-folder }}/" --exclude "${{ inputs.src-exclude }}" + uv run bandit -r "${{ inputs.src-project-folder }}/" --exclude "${{ + inputs.src-exclude }}" diff --git a/{{ cookiecutter.project_name }}/.github/actions/setup-python-env/action.yml b/{{ cookiecutter.project_name }}/.github/actions/setup-python-env/action.yml index 2a553f6..ef503ec 100644 --- a/{{ cookiecutter.project_name }}/.github/actions/setup-python-env/action.yml +++ b/{{ cookiecutter.project_name }}/.github/actions/setup-python-env/action.yml @@ -25,39 +25,20 @@ runs: enable-cache: true - name: Install dependencies with uv - id: install-deps shell: bash run: | - # Check if we should install all extras - if [ -z "${{ inputs.uv-group }}" ] && [ -z "${{ inputs.uv-extra }}" ]; then - echo "Installing all extras (default when no group or extra specified)..." - uv sync --all-extras - elif [ "${{ inputs.uv-extra }}" = "--all-extras" ]; then - echo "Installing all extras (explicitly requested)..." - if [ -n "${{ inputs.uv-group }}" ]; then - echo "Note: Installing all extras overrides the specified group: ${{ inputs.uv-group }}" - fi - uv sync --all-extras + ARGS="" + if [ "${{ inputs.uv-extra }}" = "--all-extras" ] || [ -z "${{ inputs.uv-group }}${{ inputs.uv-extra }}" ]; then + ARGS="--all-extras" else - echo "Installing with group: ${{ inputs.uv-group }}, and extra: ${{ inputs.uv-extra }}..." - if [ -n "${{ inputs.uv-group }}" ] && [ -n "${{ inputs.uv-extra }}" ]; then - uv sync --group ${{ inputs.uv-group }} --extra ${{ inputs.uv-extra }} - elif [ -n "${{ inputs.uv-group }}" ]; then - uv sync --group ${{ inputs.uv-group }} - elif [ -n "${{ inputs.uv-extra }}" ]; then - uv sync --extra ${{ inputs.uv-extra }} - else - uv sync - fi + [ -n "${{ inputs.uv-group }}" ] && ARGS="$ARGS --group ${{ inputs.uv-group }}" + [ -n "${{ inputs.uv-extra }}" ] && ARGS="$ARGS --extra ${{ inputs.uv-extra }}" fi + uv sync $ARGS - name: Verify uv and environment - id: verify shell: bash run: | - echo "uv version:" uv --version - echo "Virtual environments:" uv venv list - echo "Python version:" uv run python --version diff --git a/{{ cookiecutter.project_name }}/.github/actions/test-code/action.yml b/{{ cookiecutter.project_name }}/.github/actions/test-code/action.yml index 250cbdd..7ebb725 100644 --- a/{{ cookiecutter.project_name }}/.github/actions/test-code/action.yml +++ b/{{ cookiecutter.project_name }}/.github/actions/test-code/action.yml @@ -1,4 +1,5 @@ name: Test Code +description: Run Python tests with Pytest inputs: src-project-folder: @@ -15,14 +16,8 @@ runs: using: composite steps: - name: Run tests with Pytest - id: run-pytest shell: bash run: | - echo "Checking if tests directory exists..." - if [ -d "${{ inputs.src-tests-folder }}" ] && [ $(find ${{ inputs.src-tests-folder }} -name "test_*.py" | wc -l) -gt 0 ]; then - echo "Running tests..." + if [ -d "${{ inputs.src-tests-folder }}" ] && [ -n "$(find ${{ inputs.src-tests-folder }} -name 'test_*.py')" ]; then uv run pytest ${{ inputs.src-tests-folder }} - echo "Tests complete." - else - echo "No tests directory found or no test files. Skipping tests." fi diff --git a/{{ cookiecutter.project_name }}/.github/workflows/workflow.yml b/{{ cookiecutter.project_name }}/.github/workflows/workflow.yml index 0b1ae03..fcc9188 100644 --- a/{{ cookiecutter.project_name }}/.github/workflows/workflow.yml +++ b/{{ cookiecutter.project_name }}/.github/workflows/workflow.yml @@ -8,7 +8,7 @@ on: env: SRC_PROJECT_FOLDER: "{{ cookiecutter.project_module_name }}" - SRC_PROJECT_TESTS: "{{ cookiecutter.project_test_folder_name }}" + TEST_PATH: "{{ cookiecutter.project_test_folder_name }}" SRC_PYTHON_VERSION: "{{ cookiecutter.project_version_python }}" jobs: @@ -36,13 +36,13 @@ jobs: uses: ./.github/actions/test-code with: src-project-folder: ${{'{{'}} env.SRC_PROJECT_FOLDER {{'}}'}} - src-tests-folder: ${{'{{'}} env.SRC_PROJECT_TESTS {{'}}'}} + src-tests-folder: ${{'{{'}} env.TEST_PATH {{'}}'}} - name: Security Scan uses: ./.github/actions/security with: src-project-folder: ${{'{{'}} env.SRC_PROJECT_FOLDER {{'}}'}} - src-exclude: ${{'{{'}} env.SRC_PROJECT_TESTS {{'}}'}} + src-exclude: ${{'{{'}} env.TEST_PATH {{'}}'}} build-deploy-docs: if: github.ref == 'refs/heads/main' diff --git a/{{ cookiecutter.project_name }}/.pre-commit-config.yaml b/{{ cookiecutter.project_name }}/.pre-commit-config.yaml index bcb0570..11fc356 100644 --- a/{{ cookiecutter.project_name }}/.pre-commit-config.yaml +++ b/{{ cookiecutter.project_name }}/.pre-commit-config.yaml @@ -3,7 +3,7 @@ repos: hooks: - id: local-check name: Makefile Validation - entry: make pipeline + entry: make pre-commit language: system pass_filenames: false always_run: true diff --git a/{{ cookiecutter.project_name }}/.vscode/settings.json b/{{ cookiecutter.project_name }}/.vscode/settings.json index 3e0aaad..d899291 100644 --- a/{{ cookiecutter.project_name }}/.vscode/settings.json +++ b/{{ cookiecutter.project_name }}/.vscode/settings.json @@ -47,7 +47,6 @@ "[python]": { "editor.formatOnType": true }, - "python.defaultInterpreterPath": "/home/jovyan/envs/env/bin/python", "[jsonc]": { "editor.defaultFormatter": "vscode.json-language-features" }, diff --git a/{{ cookiecutter.project_name }}/Makefile b/{{ cookiecutter.project_name }}/Makefile index 9cf36e4..a2d472b 100644 --- a/{{ cookiecutter.project_name }}/Makefile +++ b/{{ cookiecutter.project_name }}/Makefile @@ -7,9 +7,9 @@ .DEFAULT_GOAL := all -SRC_PROJECT_NAME ?= {{ cookiecutter.project_module_name }} -SRC_PROJECT_TESTS ?= {{ cookiecutter.project_test_folder_name }} -SRC_PROJECT_NOTEBOOKS_FOLDER ?= notebooks +SOURCE_PATH ?= {{ cookiecutter.project_module_name }} +TEST_PATH ?= {{ cookiecutter.project_test_folder_name }} +NOTEBOOKS_PATH ?= notebooks setup: @echo "Installing dependencies..." @@ -27,28 +27,26 @@ clean-cache-temp-files: lint: @echo "Running lint checks..." - @uv run isort $(SRC_PROJECT_NAME)/ - @uv run nbqa isort $(SRC_PROJECT_NOTEBOOKS_FOLDER)/ - @uv run ruff check --fix $(SRC_PROJECT_NAME)/ - @uv run ruff format $(SRC_PROJECT_NAME)/ - @uv run nbqa ruff $(SRC_PROJECT_NOTEBOOKS_FOLDER)/ + @uv run isort $(SOURCE_PATH) + @uv run ruff check --fix $(SOURCE_PATH) + @uv run ruff format $(SOURCE_PATH) @echo "✅ Linting complete." code-check: @echo "Running static code checks..." - @uv run mypy $(SRC_PROJECT_NAME)/ - @uv run complexipy -f $(SRC_PROJECT_NAME)/ - @uv run bandit -r $(SRC_PROJECT_NAME)/ --exclude $(SRC_PROJECT_TESTS) + @uv run mypy $(SOURCE_PATH) + @uv run complexipy -f $(SOURCE_PATH) + @uv run bandit -r $(SOURCE_PATH) --exclude $(TEST_PATH) @echo "✅ Code and security checks complete." check-dead-code: @echo "Checking dead code..." - @uv run deadcode $(SRC_PROJECT_NAME) + @uv run deadcode $(SOURCE_PATH) @echo "✅ Dead code check complete." tests: @echo "Running tests..." - @uv run pytest $(SRC_PROJECT_TESTS)/ + @uv run pytest $(TEST_PATH) @echo "✅ Tests complete." doc: @@ -58,5 +56,8 @@ doc: pipeline: clean-cache-temp-files lint code-check tests @echo "✅ Pipeline complete." +pre-commit: clean-cache-temp-files lint code-check + @echo "✅ Pipeline pre-commit complete." + all: setup pipeline doc @echo "✅ All tasks complete." diff --git a/{{ cookiecutter.project_name }}/README.md b/{{ cookiecutter.project_name }}/README.md index aef59ec..7aa3122 100644 --- a/{{ cookiecutter.project_name }}/README.md +++ b/{{ cookiecutter.project_name }}/README.md @@ -38,7 +38,7 @@ collaboration, and maintainability: ├── models/ <- Training, inference, and model-related logic. │ ├── __init__.py │ ├── train.py <- Scripts and functions to train machine learning models. - │ └── predict.py <- Functions for generating model predictions. + │ └── inference.py <- Scripts and functions for model inference. └── utils/ <- Utility scripts and helper functions used across modules. ├── __init__.py └── utils.py diff --git a/{{ cookiecutter.project_name }}/pyproject.toml b/{{ cookiecutter.project_name }}/pyproject.toml index 9a5eeb2..2059fd7 100644 --- a/{{ cookiecutter.project_name }}/pyproject.toml +++ b/{{ cookiecutter.project_name }}/pyproject.toml @@ -46,7 +46,6 @@ pipeline = [ "pytest-order==1.3.0", "ruff==0.14.11", "isort==7.0.0", - "nbqa==1.9.1", "deadcode==2.4.1", "pre-commit==4.5.1", ] diff --git a/{{ cookiecutter.project_name }}/{{ cookiecutter.project_module_name }}/__init__.py b/{{ cookiecutter.project_name }}/{{ cookiecutter.project_module_name }}/__init__.py index df8c9b2..d5a6a8a 100644 --- a/{{ cookiecutter.project_name }}/{{ cookiecutter.project_module_name }}/__init__.py +++ b/{{ cookiecutter.project_name }}/{{ cookiecutter.project_module_name }}/__init__.py @@ -1,5 +1,5 @@ -# Own modules -from {{ cookiecutter.project_module_name }}.version import __version__ +# 3pps +from {{cookiecutter.project_module_name}}.version import __version__ # Define all names to be imported __all__: list[str] = ["__version__"] diff --git a/{{ cookiecutter.project_name }}/{{ cookiecutter.project_module_name }}/version.py b/{{ cookiecutter.project_name }}/{{ cookiecutter.project_module_name }}/version.py index af52d68..5da51c0 100644 --- a/{{ cookiecutter.project_name }}/{{ cookiecutter.project_module_name }}/version.py +++ b/{{ cookiecutter.project_name }}/{{ cookiecutter.project_module_name }}/version.py @@ -5,5 +5,4 @@ # Standard libraries import importlib.metadata - -__version__: str = importlib.metadata.version("{{ cookiecutter.project_module_name }}") \ No newline at end of file +__version__: str = importlib.metadata.version("{{ cookiecutter.project_name }}")