diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..00d113e --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,270 @@ +name: CI/CD Pipeline + +on: + push: + branches: + - main + pull_request: + types: [opened, synchronize, reopened] + workflow_dispatch: + inputs: + bump_type: + description: 'Version bump type' + required: true + type: choice + options: + - patch + - minor + - major + description: + description: 'Release description (optional)' + required: false + type: string + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + CARGO_TERM_COLOR: always + +jobs: + # Test on multiple OS - main CI job + test: + name: Test (${{ matrix.os }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + steps: + - uses: actions/checkout@v4 + + - name: Setup Rust (nightly-2022-08-22) + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly-2022-08-22 + + - name: Cache cargo registry + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Run tests + run: cargo test --all-features --verbose + + - name: Run doc tests + run: cargo test --doc --verbose + + # Build package - only runs if tests pass + build: + name: Build Package + runs-on: ubuntu-latest + needs: [test] + steps: + - uses: actions/checkout@v4 + + - name: Setup Rust (nightly-2022-08-22) + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly-2022-08-22 + + - name: Cache cargo registry + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-build-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo-build- + + - name: Build release + run: cargo build --release --verbose + + - name: Check package + run: cargo package --list + + # Check for changelog fragments in PRs + changelog: + name: Changelog Fragment Check + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Check for changelog fragments + run: | + # Get list of fragment files (excluding README and template) + FRAGMENTS=$(find changelog.d -name "*.md" ! -name "README.md" 2>/dev/null | wc -l) + + # Get changed files in PR + CHANGED_FILES=$(git diff --name-only origin/${{ github.base_ref }}...HEAD) + + # Check if any source files changed (excluding docs and config) + SOURCE_CHANGED=$(echo "$CHANGED_FILES" | grep -E "^(src/|tests/|scripts/|examples/)" | wc -l) + + if [ "$SOURCE_CHANGED" -gt 0 ] && [ "$FRAGMENTS" -eq 0 ]; then + echo "::warning::No changelog fragment found. Please add a changelog entry in changelog.d/" + echo "" + echo "To create a changelog fragment:" + echo " Create a new .md file in changelog.d/ with your changes" + echo "" + echo "See changelog.d/README.md for more information." + # Note: This is a warning, not a failure, to allow flexibility + # Change 'exit 0' to 'exit 1' to make it required + exit 0 + fi + + echo "Changelog check passed" + + # Automatic release on push to main using changelog fragments + # This job automatically bumps version based on fragments in changelog.d/ + auto-release: + name: Auto Release + needs: [test, build] + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup Rust (nightly-2022-08-22) + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly-2022-08-22 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20.x' + + - name: Configure git + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Determine bump type from changelog fragments + id: bump_type + run: node scripts/get-bump-type.mjs + + - name: Check if version already released or no fragments + id: check + run: | + # Check if there are changelog fragments + if [ "${{ steps.bump_type.outputs.has_fragments }}" != "true" ]; then + # No fragments - check if current version tag exists + CURRENT_VERSION=$(grep -Po '(?<=^version = ")[^"]*' Cargo.toml) + if git rev-parse "v$CURRENT_VERSION" >/dev/null 2>&1; then + echo "No changelog fragments and v$CURRENT_VERSION already released" + echo "should_release=false" >> $GITHUB_OUTPUT + else + echo "No changelog fragments but v$CURRENT_VERSION not yet released" + echo "should_release=true" >> $GITHUB_OUTPUT + echo "skip_bump=true" >> $GITHUB_OUTPUT + fi + else + echo "Found changelog fragments, proceeding with release" + echo "should_release=true" >> $GITHUB_OUTPUT + echo "skip_bump=false" >> $GITHUB_OUTPUT + fi + + - name: Collect changelog and bump version + id: version + if: steps.check.outputs.should_release == 'true' && steps.check.outputs.skip_bump != 'true' + run: | + node scripts/version-and-commit.mjs \ + --bump-type "${{ steps.bump_type.outputs.bump_type }}" + + - name: Get current version + id: current_version + if: steps.check.outputs.should_release == 'true' + run: | + CURRENT_VERSION=$(grep -Po '(?<=^version = ")[^"]*' Cargo.toml) + echo "version=$CURRENT_VERSION" >> $GITHUB_OUTPUT + + - name: Build release + if: steps.check.outputs.should_release == 'true' + run: cargo build --release + + - name: Create GitHub Release + if: steps.check.outputs.should_release == 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + node scripts/create-github-release.mjs \ + --release-version "${{ steps.current_version.outputs.version }}" \ + --repository "${{ github.repository }}" + + # Manual release via workflow_dispatch - only after CI passes + manual-release: + name: Manual Release + needs: [test, build] + if: github.event_name == 'workflow_dispatch' + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup Rust (nightly-2022-08-22) + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly-2022-08-22 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20.x' + + - name: Configure git + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Collect changelog fragments + run: | + # Check if there are any fragments to collect + FRAGMENTS=$(find changelog.d -name "*.md" ! -name "README.md" 2>/dev/null | wc -l) + if [ "$FRAGMENTS" -gt 0 ]; then + echo "Found $FRAGMENTS changelog fragment(s), collecting..." + node scripts/collect-changelog.mjs + else + echo "No changelog fragments found, skipping collection" + fi + + - name: Version and commit + id: version + run: | + node scripts/version-and-commit.mjs \ + --bump-type "${{ github.event.inputs.bump_type }}" \ + --description "${{ github.event.inputs.description }}" + + - name: Build release + if: steps.version.outputs.version_committed == 'true' || steps.version.outputs.already_released == 'true' + run: cargo build --release + + - name: Create GitHub Release + if: steps.version.outputs.version_committed == 'true' || steps.version.outputs.already_released == 'true' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + node scripts/create-github-release.mjs \ + --release-version "${{ steps.version.outputs.new_version }}" \ + --repository "${{ github.repository }}" diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..260fedb --- /dev/null +++ b/.gitignore @@ -0,0 +1,64 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# See https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +# Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +# Generated by cargo mutants +# Contains mutation testing data +**/mutants.out*/ + +# IDE and editor files +.idea/ +.vscode/ +*.swp +*.swo +*~ + +# OS files +.DS_Store +Thumbs.db + +# Python virtual environments (for scripts) +.venv/ +venv/ +__pycache__/ +*.pyc +*.pyo + +# Coverage reports +*.lcov +coverage/ +tarpaulin-report.html + +# Benchmark results +criterion/ + +# Documentation build output +doc/ + +# Local development files +.env +.env.local +*.local + +# Log files +*.log +logs/ + +# Profiling data +*.profraw +*.profdata + +# Temporary files +tmp.file +*.tmp diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..88f176a --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,8 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..5ee64ed --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,279 @@ +# Contributing to platform-mem + +Thank you for your interest in contributing! This document provides guidelines and instructions for contributing to this project. + +## Development Setup + +1. **Fork and clone the repository** + + ```bash + git clone https://github.com/YOUR-USERNAME/mem-rs.git + cd mem-rs + ``` + +2. **Install Rust** + + This project requires a specific nightly Rust version. Install Rust using rustup: + + ```bash + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + ``` + + The toolchain will be automatically installed when you build the project (configured in `rust-toolchain.toml`). + +3. **Build the project** + + ```bash + cargo build + ``` + +## Development Workflow + +1. **Create a feature branch** + + ```bash + git checkout -b feature/my-feature + ``` + +2. **Make your changes** + + - Write code following the project's style guidelines + - Add tests for any new functionality + - Update documentation as needed + +3. **Run tests** + + ```bash + # Run all tests + cargo test + + # Run tests with verbose output + cargo test --verbose + + # Run doc tests + cargo test --doc + + # Run a specific test + cargo test test_name + ``` + +4. **Add a changelog fragment** + + For any user-facing changes, create a changelog fragment: + + ```bash + # Create a new file in changelog.d/ + # Format: YYYYMMDD_HHMMSS_description.md + touch changelog.d/$(date +%Y%m%d_%H%M%S)_my_change.md + ``` + + Edit the file to document your changes: + + ```markdown + --- + bump: patch + --- + + ### Added + - Description of new feature + + ### Fixed + - Description of bug fix + ``` + + **Why fragments?** This prevents merge conflicts in CHANGELOG.md when multiple PRs are open simultaneously. + +5. **Commit your changes** + + ```bash + git add . + git commit -m "feat: add new feature" + ``` + +6. **Push and create a Pull Request** + + ```bash + git push origin feature/my-feature + ``` + + Then create a Pull Request on GitHub. + +## Code Style Guidelines + +This project uses: + +- **Nightly Rust features** for memory management +- **cargo test** for testing + +### Code Standards + +- Follow Rust idioms and best practices +- Use documentation comments (`///`) for all public APIs +- Write tests for all new functionality +- Keep functions focused and reasonably sized +- Keep files under 1000 lines +- Use meaningful variable and function names + +### Documentation Format + +Use Rust documentation comments: + +```rust +/// Brief description of the function. +/// +/// Longer description if needed. +/// +/// # Arguments +/// +/// * `arg1` - Description of arg1 +/// * `arg2` - Description of arg2 +/// +/// # Returns +/// +/// Description of return value +/// +/// # Errors +/// +/// Description of when errors are returned +/// +/// # Examples +/// +/// ``` +/// use platform_mem::example_function; +/// let result = example_function(1, 2); +/// assert_eq!(result, 3); +/// ``` +pub fn example_function(arg1: i32, arg2: i32) -> i32 { + arg1 + arg2 +} +``` + +## Testing Guidelines + +- Write tests for all new features +- Maintain or improve test coverage +- Use descriptive test names +- Organize tests in modules when appropriate +- Use `#[cfg(test)]` for test-only code + +Example test structure: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + mod my_feature_tests { + use super::*; + + #[test] + fn test_basic_functionality() { + assert_eq!(my_function(), expected_result); + } + + #[test] + fn test_edge_case() { + assert_eq!(my_function(edge_case_input), expected_result); + } + } +} +``` + +## Pull Request Process + +1. Ensure all tests pass locally +2. Update documentation if needed +3. Add a changelog fragment (see step 4 in Development Workflow) +4. Ensure the PR description clearly describes the changes +5. Link any related issues in the PR description +6. Wait for CI checks to pass +7. Address any review feedback + +## Changelog Management + +This project uses a fragment-based changelog system similar to [Scriv](https://scriv.readthedocs.io/) (Python) and [Changesets](https://github.com/changesets/changesets) (JavaScript). + +### Creating a Fragment + +```bash +# Create a new fragment with timestamp +touch changelog.d/$(date +%Y%m%d_%H%M%S)_description.md +``` + +### Fragment Categories + +Use these categories in your fragments: + +- **Added**: New features +- **Changed**: Changes to existing functionality +- **Deprecated**: Features that will be removed in future +- **Removed**: Features that were removed +- **Fixed**: Bug fixes +- **Security**: Security-related changes + +### During Release + +Fragments are automatically collected into CHANGELOG.md during the release process. The release workflow: + +1. Collects all fragments +2. Updates CHANGELOG.md with the new version entry +3. Removes processed fragment files +4. Bumps the version in Cargo.toml +5. Creates a git tag and GitHub release + +## Project Structure + +``` +. +├── .github/workflows/ # GitHub Actions CI/CD +├── changelog.d/ # Changelog fragments +│ ├── README.md # Fragment instructions +│ └── *.md # Individual changelog fragments +├── experiments/ # Example and experiment scripts +├── scripts/ # Utility scripts for releases +├── src/ +│ ├── lib.rs # Library entry point +│ ├── alloc.rs # Memory allocation +│ ├── file_mapped.rs # File-mapped memory +│ ├── raw_mem.rs # Raw memory trait +│ ├── raw_place.rs # Raw place utilities +│ └── utils.rs # Utility functions +├── tests/ # Integration tests +├── .gitignore # Git ignore patterns +├── Cargo.toml # Project configuration +├── CHANGELOG.md # Project changelog +├── CONTRIBUTING.md # This file +├── LICENSE # Unlicense (public domain) +├── README.md # Project README +└── rust-toolchain.toml # Rust toolchain configuration +``` + +## Release Process + +This project uses semantic versioning (MAJOR.MINOR.PATCH): + +- **MAJOR**: Breaking changes +- **MINOR**: New features (backward compatible) +- **PATCH**: Bug fixes (backward compatible) + +Releases are managed through GitHub releases. The CI/CD pipeline automatically: + +1. Runs tests on all platforms (Ubuntu, macOS, Windows) +2. Collects changelog fragments on merge to main +3. Bumps version based on fragment bump types +4. Creates GitHub releases + +## Getting Help + +- Open an issue for bugs or feature requests +- Use discussions for questions and general help +- Check existing issues and PRs before creating new ones + +## Code of Conduct + +- Be respectful and inclusive +- Provide constructive feedback +- Focus on what is best for the community +- Show empathy towards other community members + +Thank you for contributing! diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..8affb39 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,295 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "env_logger" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "libc" +version = "0.2.178" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "memchr" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "memmap2" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f49388d20533534cd19360ad3d6a7dadc885944aa802ba3995040c5ec11288c6" +dependencies = [ + "libc", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "platform-mem" +version = "0.1.0-pre+beta.2" +dependencies = [ + "memmap2", + "paste", + "quickcheck", + "quickcheck_macros", + "tempfile", + "thiserror", +] + +[[package]] +name = "proc-macro2" +version = "1.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quickcheck" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" +dependencies = [ + "env_logger", + "log", + "rand", +] + +[[package]] +name = "quickcheck_macros" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f71ee38b42f8459a88d3362be6f9b841ad2d5421844f61eb1c59c11bff3ac14a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "syn" +version = "2.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6f671d4b5ffdb8eadec19c0ae67fe2639df8684bd7bc4b83d986b8db549cf01" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempfile" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +dependencies = [ + "cfg-if", + "fastrand", + "libc", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "thiserror" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/changelog.d/20251228_012500_cicd_setup.md b/changelog.d/20251228_012500_cicd_setup.md new file mode 100644 index 0000000..49fb53b --- /dev/null +++ b/changelog.d/20251228_012500_cicd_setup.md @@ -0,0 +1,12 @@ +--- +bump: minor +--- + +### Added +- CI/CD pipeline with GitHub Actions for automated testing and releases +- Multi-platform testing (Ubuntu, macOS, Windows) +- Automated version bumping and changelog management +- Scripts for release automation (version bump, changelog collection, GitHub releases) +- Contributing guidelines and development workflow documentation +- Comprehensive test coverage (72 new tests) +- Toolchain configuration for nightly-2022-08-22 Rust diff --git a/changelog.d/README.md b/changelog.d/README.md new file mode 100644 index 0000000..b3437e3 --- /dev/null +++ b/changelog.d/README.md @@ -0,0 +1,135 @@ +# Changelog Fragments + +This directory contains changelog fragments that will be collected into `CHANGELOG.md` during releases. + +## How to Add a Changelog Fragment + +When making changes that should be documented in the changelog, create a fragment file: + +```bash +# Create a new fragment with timestamp +touch changelog.d/$(date +%Y%m%d_%H%M%S)_description.md + +# Or manually create a file matching the pattern: YYYYMMDD_HHMMSS_description.md +``` + +## Fragment Format + +Each fragment should include a **frontmatter section** specifying the version bump type: + +```markdown +--- +bump: patch +--- + +### Fixed +- Description of bug fix +``` + +### Bump Types + +Use semantic versioning bump types in the frontmatter: + +- **`major`**: Breaking changes (incompatible API changes) +- **`minor`**: New features (backward compatible) +- **`patch`**: Bug fixes (backward compatible) + +### Content Categories + +Use these categories in your fragment content: + +```markdown +--- +bump: minor +--- + +### Added +- Description of new feature + +### Changed +- Description of change to existing functionality + +### Fixed +- Description of bug fix + +### Removed +- Description of removed feature + +### Deprecated +- Description of deprecated feature + +### Security +- Description of security fix +``` + +## Examples + +### Adding a new feature (minor bump) + +```markdown +--- +bump: minor +--- + +### Added +- New async processing mode for batch operations +``` + +### Fixing a bug (patch bump) + +```markdown +--- +bump: patch +--- + +### Fixed +- Fixed memory leak in connection pool handling +``` + +### Breaking change (major bump) + +```markdown +--- +bump: major +--- + +### Changed +- Renamed `process()` to `process_async()` - this is a breaking change + +### Removed +- Removed deprecated `legacy_mode` option +``` + +## Why Fragments? + +Using changelog fragments (similar to [Changesets](https://github.com/changesets/changesets) in JavaScript and [Scriv](https://scriv.readthedocs.io/) in Python): + +1. **No merge conflicts**: Multiple PRs can add fragments without conflicts +2. **Per-PR documentation**: Each PR documents its own changes +3. **Automated version bumping**: Version bump type is specified per-change +4. **Automated collection**: Fragments are automatically collected during release +5. **Consistent format**: Template ensures consistent changelog entries + +## How It Works + +1. **During PR**: Add a fragment file with your changes and bump type +2. **On merge to main**: The release workflow automatically: + - Reads all fragment files and determines the highest bump type + - Bumps the version in `Cargo.toml` accordingly + - Collects fragments into `CHANGELOG.md` + - Creates a git tag and GitHub release + - Removes processed fragment files + +## Multiple PRs and Bump Priority + +When multiple PRs are merged before a release, all pending fragments are processed together. The **highest** bump type wins: + +- If any fragment specifies `major`, the release is a major version bump +- Otherwise, if any specifies `minor`, the release is a minor version bump +- Otherwise, the release is a patch version bump + +This ensures that breaking changes are never missed, even when combined with smaller changes. + +## Default Behavior + +If a fragment doesn't include a bump type in the frontmatter, it defaults to `patch`. diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000..d7c5705 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "nightly-2022-08-22" diff --git a/scripts/bump-version.mjs b/scripts/bump-version.mjs new file mode 100644 index 0000000..579aafc --- /dev/null +++ b/scripts/bump-version.mjs @@ -0,0 +1,119 @@ +#!/usr/bin/env node + +/** + * Bump version in Cargo.toml + * Usage: node scripts/bump-version.mjs --bump-type [--dry-run] + * + * Uses link-foundation libraries: + * - use-m: Dynamic package loading without package.json dependencies + * - lino-arguments: Unified configuration from CLI args, env vars, and .lenv files + */ + +import { readFileSync, writeFileSync } from 'fs'; + +// Load use-m dynamically +const { use } = eval( + await (await fetch('https://unpkg.com/use-m/use.js')).text() +); + +// Import lino-arguments for CLI argument parsing +const { makeConfig } = await use('lino-arguments'); + +// Parse CLI arguments +const config = makeConfig({ + yargs: ({ yargs, getenv }) => + yargs + .option('bump-type', { + type: 'string', + default: getenv('BUMP_TYPE', ''), + describe: 'Version bump type: major, minor, or patch', + choices: ['major', 'minor', 'patch'], + }) + .option('dry-run', { + type: 'boolean', + default: false, + describe: 'Show what would be done without making changes', + }), +}); + +const { bumpType, dryRun } = config; + +if (!bumpType || !['major', 'minor', 'patch'].includes(bumpType)) { + console.error( + 'Usage: node scripts/bump-version.mjs --bump-type [--dry-run]' + ); + process.exit(1); +} + +/** + * Get current version from Cargo.toml + * @returns {{major: number, minor: number, patch: number}} + */ +function getCurrentVersion() { + const cargoToml = readFileSync('Cargo.toml', 'utf-8'); + const match = cargoToml.match(/^version\s*=\s*"(\d+)\.(\d+)\.(\d+)"/m); + + if (!match) { + console.error('Error: Could not parse version from Cargo.toml'); + process.exit(1); + } + + return { + major: parseInt(match[1], 10), + minor: parseInt(match[2], 10), + patch: parseInt(match[3], 10), + }; +} + +/** + * Calculate new version based on bump type + * @param {{major: number, minor: number, patch: number}} current + * @param {string} bumpType + * @returns {string} + */ +function calculateNewVersion(current, bumpType) { + const { major, minor, patch } = current; + + switch (bumpType) { + case 'major': + return `${major + 1}.0.0`; + case 'minor': + return `${major}.${minor + 1}.0`; + case 'patch': + return `${major}.${minor}.${patch + 1}`; + default: + throw new Error(`Invalid bump type: ${bumpType}`); + } +} + +/** + * Update version in Cargo.toml + * @param {string} newVersion + */ +function updateCargoToml(newVersion) { + let cargoToml = readFileSync('Cargo.toml', 'utf-8'); + cargoToml = cargoToml.replace( + /^(version\s*=\s*")[^"]+(")/m, + `$1${newVersion}$2` + ); + writeFileSync('Cargo.toml', cargoToml, 'utf-8'); +} + +try { + const current = getCurrentVersion(); + const currentStr = `${current.major}.${current.minor}.${current.patch}`; + const newVersion = calculateNewVersion(current, bumpType); + + console.log(`Current version: ${currentStr}`); + console.log(`New version: ${newVersion}`); + + if (dryRun) { + console.log('Dry run - no changes made'); + } else { + updateCargoToml(newVersion); + console.log('Updated Cargo.toml'); + } +} catch (error) { + console.error('Error:', error.message); + process.exit(1); +} diff --git a/scripts/check-file-size.mjs b/scripts/check-file-size.mjs new file mode 100644 index 0000000..4f2aedc --- /dev/null +++ b/scripts/check-file-size.mjs @@ -0,0 +1,100 @@ +#!/usr/bin/env node + +/** + * Check for files exceeding the maximum allowed line count + * Exits with error code 1 if any files exceed the limit + * + * Uses link-foundation libraries: + * - use-m: Dynamic package loading without package.json dependencies + */ + +import { readFileSync, readdirSync, statSync } from 'fs'; +import { join, relative, extname } from 'path'; + +const MAX_LINES = 1000; +const FILE_EXTENSIONS = ['.rs']; +const EXCLUDE_PATTERNS = ['target', '.git', 'node_modules']; + +/** + * Check if a path should be excluded + * @param {string} path + * @returns {boolean} + */ +function shouldExclude(path) { + return EXCLUDE_PATTERNS.some((pattern) => path.includes(pattern)); +} + +/** + * Recursively find all Rust files in a directory + * @param {string} directory + * @returns {string[]} + */ +function findRustFiles(directory) { + const files = []; + + function walkDir(dir) { + const entries = readdirSync(dir, { withFileTypes: true }); + + for (const entry of entries) { + const fullPath = join(dir, entry.name); + + if (shouldExclude(fullPath)) { + continue; + } + + if (entry.isDirectory()) { + walkDir(fullPath); + } else if (entry.isFile() && FILE_EXTENSIONS.includes(extname(entry.name))) { + files.push(fullPath); + } + } + } + + walkDir(directory); + return files; +} + +/** + * Count lines in a file + * @param {string} filePath + * @returns {number} + */ +function countLines(filePath) { + const content = readFileSync(filePath, 'utf-8'); + return content.split('\n').length; +} + +try { + const cwd = process.cwd(); + console.log(`\nChecking Rust files for maximum ${MAX_LINES} lines...\n`); + + const files = findRustFiles(cwd); + const violations = []; + + for (const file of files) { + const lineCount = countLines(file); + if (lineCount > MAX_LINES) { + violations.push({ + file: relative(cwd, file), + lines: lineCount, + }); + } + } + + if (violations.length === 0) { + console.log('All files are within the line limit\n'); + process.exit(0); + } else { + console.log('Found files exceeding the line limit:\n'); + for (const violation of violations) { + console.log( + ` ${violation.file}: ${violation.lines} lines (exceeds ${MAX_LINES})` + ); + } + console.log(`\nPlease refactor these files to be under ${MAX_LINES} lines\n`); + process.exit(1); + } +} catch (error) { + console.error('Error:', error.message); + process.exit(1); +} diff --git a/scripts/collect-changelog.mjs b/scripts/collect-changelog.mjs new file mode 100644 index 0000000..a8c59ac --- /dev/null +++ b/scripts/collect-changelog.mjs @@ -0,0 +1,170 @@ +#!/usr/bin/env node + +/** + * Collect changelog fragments into CHANGELOG.md + * This script collects all .md files from changelog.d/ (except README.md) + * and prepends them to CHANGELOG.md, then removes the processed fragments. + * + * Uses link-foundation libraries: + * - use-m: Dynamic package loading without package.json dependencies + */ + +import { + readFileSync, + writeFileSync, + readdirSync, + unlinkSync, + existsSync, +} from 'fs'; +import { join } from 'path'; + +const CHANGELOG_DIR = 'changelog.d'; +const CHANGELOG_FILE = 'CHANGELOG.md'; +const INSERT_MARKER = ''; + +/** + * Get version from Cargo.toml + * @returns {string} + */ +function getVersionFromCargo() { + const cargoToml = readFileSync('Cargo.toml', 'utf-8'); + const match = cargoToml.match(/^version\s*=\s*"([^"]+)"/m); + + if (!match) { + console.error('Error: Could not find version in Cargo.toml'); + process.exit(1); + } + + return match[1]; +} + +/** + * Strip frontmatter from markdown content + * @param {string} content - Markdown content potentially with frontmatter + * @returns {string} - Content without frontmatter + */ +function stripFrontmatter(content) { + const frontmatterMatch = content.match(/^---\s*\n[\s\S]*?\n---\s*\n([\s\S]*)$/); + if (frontmatterMatch) { + return frontmatterMatch[1].trim(); + } + return content.trim(); +} + +/** + * Collect all changelog fragments + * @returns {string} + */ +function collectFragments() { + if (!existsSync(CHANGELOG_DIR)) { + return ''; + } + + const files = readdirSync(CHANGELOG_DIR) + .filter((f) => f.endsWith('.md') && f !== 'README.md') + .sort(); + + const fragments = []; + for (const file of files) { + const rawContent = readFileSync(join(CHANGELOG_DIR, file), 'utf-8'); + // Strip frontmatter (which contains bump type metadata) + const content = stripFrontmatter(rawContent); + if (content) { + fragments.push(content); + } + } + + return fragments.join('\n\n'); +} + +/** + * Update CHANGELOG.md with collected fragments + * @param {string} version + * @param {string} fragments + */ +function updateChangelog(version, fragments) { + const dateStr = new Date().toISOString().split('T')[0]; + const newEntry = `\n## [${version}] - ${dateStr}\n\n${fragments}\n`; + + if (existsSync(CHANGELOG_FILE)) { + let content = readFileSync(CHANGELOG_FILE, 'utf-8'); + + if (content.includes(INSERT_MARKER)) { + content = content.replace(INSERT_MARKER, `${INSERT_MARKER}${newEntry}`); + } else { + // Insert after the first ## heading + const lines = content.split('\n'); + let insertIndex = -1; + + for (let i = 0; i < lines.length; i++) { + if (lines[i].startsWith('## [')) { + insertIndex = i; + break; + } + } + + if (insertIndex >= 0) { + lines.splice(insertIndex, 0, newEntry); + content = lines.join('\n'); + } else { + // Append after the main heading + content += newEntry; + } + } + + writeFileSync(CHANGELOG_FILE, content, 'utf-8'); + } else { + const content = `# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +${INSERT_MARKER} +${newEntry} +`; + writeFileSync(CHANGELOG_FILE, content, 'utf-8'); + } + + console.log(`Updated CHANGELOG.md with version ${version}`); +} + +/** + * Remove processed changelog fragments + */ +function removeFragments() { + if (!existsSync(CHANGELOG_DIR)) { + return; + } + + const files = readdirSync(CHANGELOG_DIR).filter( + (f) => f.endsWith('.md') && f !== 'README.md' + ); + + for (const file of files) { + const filePath = join(CHANGELOG_DIR, file); + unlinkSync(filePath); + console.log(`Removed ${filePath}`); + } +} + +try { + const version = getVersionFromCargo(); + console.log(`Collecting changelog fragments for version ${version}`); + + const fragments = collectFragments(); + + if (!fragments) { + console.log('No changelog fragments found'); + process.exit(0); + } + + updateChangelog(version, fragments); + removeFragments(); + + console.log('Changelog collection complete'); +} catch (error) { + console.error('Error:', error.message); + process.exit(1); +} diff --git a/scripts/create-github-release.mjs b/scripts/create-github-release.mjs new file mode 100644 index 0000000..1d82c96 --- /dev/null +++ b/scripts/create-github-release.mjs @@ -0,0 +1,110 @@ +#!/usr/bin/env node + +/** + * Create GitHub Release from CHANGELOG.md + * Usage: node scripts/create-github-release.mjs --release-version --repository + * + * Uses link-foundation libraries: + * - use-m: Dynamic package loading without package.json dependencies + * - command-stream: Modern shell command execution with streaming support + * - lino-arguments: Unified configuration from CLI args, env vars, and .lenv files + */ + +import { readFileSync, existsSync } from 'fs'; + +// Load use-m dynamically +const { use } = eval( + await (await fetch('https://unpkg.com/use-m/use.js')).text() +); + +// Import link-foundation libraries +const { $ } = await use('command-stream'); +const { makeConfig } = await use('lino-arguments'); + +// Parse CLI arguments +// Note: Using --release-version instead of --version to avoid conflict with yargs' built-in --version flag +const config = makeConfig({ + yargs: ({ yargs, getenv }) => + yargs + .option('release-version', { + type: 'string', + default: getenv('VERSION', ''), + describe: 'Version number (e.g., 1.0.0)', + }) + .option('repository', { + type: 'string', + default: getenv('REPOSITORY', ''), + describe: 'GitHub repository (e.g., owner/repo)', + }), +}); + +const { releaseVersion: version, repository } = config; + +if (!version || !repository) { + console.error('Error: Missing required arguments'); + console.error( + 'Usage: node scripts/create-github-release.mjs --release-version --repository ' + ); + process.exit(1); +} + +const tag = `v${version}`; + +console.log(`Creating GitHub release for ${tag}...`); + +/** + * Extract changelog content for a specific version + * @param {string} version + * @returns {string} + */ +function getChangelogForVersion(version) { + const changelogPath = 'CHANGELOG.md'; + + if (!existsSync(changelogPath)) { + return `Release v${version}`; + } + + const content = readFileSync(changelogPath, 'utf-8'); + + // Find the section for this version + const escapedVersion = version.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + const pattern = new RegExp( + `## \\[${escapedVersion}\\].*?\\n([\\s\\S]*?)(?=\\n## \\[|$)` + ); + const match = content.match(pattern); + + if (match) { + return match[1].trim(); + } + + return `Release v${version}`; +} + +try { + const releaseNotes = getChangelogForVersion(version); + + // Create release using GitHub API with JSON input + // This avoids shell escaping issues + const payload = JSON.stringify({ + tag_name: tag, + name: `v${version}`, + body: releaseNotes, + }); + + try { + await $`gh api repos/${repository}/releases -X POST --input -`.run({ + stdin: payload, + }); + console.log(`Created GitHub release: ${tag}`); + } catch (error) { + // Check if release already exists + if (error.message && error.message.includes('already exists')) { + console.log(`Release ${tag} already exists, skipping`); + } else { + throw error; + } + } +} catch (error) { + console.error('Error creating release:', error.message); + process.exit(1); +} diff --git a/scripts/get-bump-type.mjs b/scripts/get-bump-type.mjs new file mode 100644 index 0000000..ff9f77c --- /dev/null +++ b/scripts/get-bump-type.mjs @@ -0,0 +1,152 @@ +#!/usr/bin/env node + +/** + * Parse changelog fragments and determine version bump type + * + * This script reads changeset fragments from changelog.d/ and determines + * the version bump type based on the frontmatter in each fragment. + * + * Fragment format: + * --- + * bump: patch|minor|major + * --- + * + * ### Added + * - Your changes here + * + * Usage: node scripts/get-bump-type.mjs [--default ] + * + * Uses link-foundation libraries: + * - use-m: Dynamic package loading without package.json dependencies + * - lino-arguments: Unified configuration from CLI args, env vars, and .lenv files + */ + +import { readFileSync, readdirSync, existsSync, appendFileSync } from 'fs'; +import { join } from 'path'; + +// Load use-m dynamically +const { use } = eval( + await (await fetch('https://unpkg.com/use-m/use.js')).text() +); + +// Import lino-arguments for CLI argument parsing +const { makeConfig } = await use('lino-arguments'); + +// Parse CLI arguments +const config = makeConfig({ + yargs: ({ yargs, getenv }) => + yargs + .option('default', { + type: 'string', + default: getenv('DEFAULT_BUMP', 'patch'), + describe: 'Default bump type if no fragments specify one', + choices: ['major', 'minor', 'patch'], + }), +}); + +const { default: defaultBump } = config; + +const CHANGELOG_DIR = 'changelog.d'; + +// Bump type priority (higher = more significant) +const BUMP_PRIORITY = { + patch: 1, + minor: 2, + major: 3, +}; + +/** + * Parse frontmatter from a markdown file + * @param {string} content - File content + * @returns {{bump?: string, content: string}} + */ +function parseFrontmatter(content) { + const frontmatterMatch = content.match(/^---\s*\n([\s\S]*?)\n---\s*\n([\s\S]*)$/); + + if (!frontmatterMatch) { + return { content }; + } + + const frontmatter = frontmatterMatch[1]; + const body = frontmatterMatch[2]; + + // Parse YAML-like frontmatter (simple key: value format) + const data = {}; + for (const line of frontmatter.split('\n')) { + const match = line.match(/^\s*(\w+)\s*:\s*(.+?)\s*$/); + if (match) { + data[match[1]] = match[2]; + } + } + + return { ...data, content: body }; +} + +/** + * Get all changelog fragments and determine bump type + * @returns {{bumpType: string, fragmentCount: number}} + */ +function determineBumpType() { + if (!existsSync(CHANGELOG_DIR)) { + console.log(`No ${CHANGELOG_DIR} directory found`); + return { bumpType: defaultBump, fragmentCount: 0 }; + } + + const files = readdirSync(CHANGELOG_DIR) + .filter((f) => f.endsWith('.md') && f !== 'README.md') + .sort(); + + if (files.length === 0) { + console.log('No changelog fragments found'); + return { bumpType: defaultBump, fragmentCount: 0 }; + } + + let highestPriority = 0; + let highestBumpType = defaultBump; + + for (const file of files) { + const content = readFileSync(join(CHANGELOG_DIR, file), 'utf-8'); + const { bump } = parseFrontmatter(content); + + if (bump && BUMP_PRIORITY[bump]) { + const priority = BUMP_PRIORITY[bump]; + if (priority > highestPriority) { + highestPriority = priority; + highestBumpType = bump; + } + console.log(`Fragment ${file}: bump=${bump}`); + } else { + console.log(`Fragment ${file}: no bump specified, using default`); + } + } + + return { bumpType: highestBumpType, fragmentCount: files.length }; +} + +/** + * Append to GitHub Actions output file + * @param {string} key + * @param {string} value + */ +function setOutput(key, value) { + const outputFile = process.env.GITHUB_OUTPUT; + if (outputFile) { + appendFileSync(outputFile, `${key}=${value}\n`); + } + // Also log for visibility + console.log(`Output: ${key}=${value}`); +} + +try { + const { bumpType, fragmentCount } = determineBumpType(); + + console.log(`\nDetermined bump type: ${bumpType} (from ${fragmentCount} fragment(s))`); + + setOutput('bump_type', bumpType); + setOutput('fragment_count', String(fragmentCount)); + setOutput('has_fragments', fragmentCount > 0 ? 'true' : 'false'); + +} catch (error) { + console.error('Error:', error.message); + process.exit(1); +} diff --git a/scripts/version-and-commit.mjs b/scripts/version-and-commit.mjs new file mode 100644 index 0000000..4e66286 --- /dev/null +++ b/scripts/version-and-commit.mjs @@ -0,0 +1,276 @@ +#!/usr/bin/env node + +/** + * Bump version in Cargo.toml and commit changes + * Used by the CI/CD pipeline for releases + * + * Usage: node scripts/version-and-commit.mjs --bump-type [--description ] + * + * Uses link-foundation libraries: + * - use-m: Dynamic package loading without package.json dependencies + * - command-stream: Modern shell command execution with streaming support + * - lino-arguments: Unified configuration from CLI args, env vars, and .lenv files + */ + +import { readFileSync, writeFileSync, appendFileSync, readdirSync, existsSync } from 'fs'; +import { join } from 'path'; + +// Load use-m dynamically +const { use } = eval( + await (await fetch('https://unpkg.com/use-m/use.js')).text() +); + +// Import link-foundation libraries +const { $ } = await use('command-stream'); +const { makeConfig } = await use('lino-arguments'); + +// Parse CLI arguments +const config = makeConfig({ + yargs: ({ yargs, getenv }) => + yargs + .option('bump-type', { + type: 'string', + default: getenv('BUMP_TYPE', ''), + describe: 'Version bump type: major, minor, or patch', + choices: ['major', 'minor', 'patch'], + }) + .option('description', { + type: 'string', + default: getenv('DESCRIPTION', ''), + describe: 'Release description', + }), +}); + +const { bumpType, description } = config; + +if (!bumpType || !['major', 'minor', 'patch'].includes(bumpType)) { + console.error( + 'Usage: node scripts/version-and-commit.mjs --bump-type [--description ]' + ); + process.exit(1); +} + +/** + * Append to GitHub Actions output file + * @param {string} key + * @param {string} value + */ +function setOutput(key, value) { + const outputFile = process.env.GITHUB_OUTPUT; + if (outputFile) { + appendFileSync(outputFile, `${key}=${value}\n`); + } + // Also log for visibility + console.log(`::set-output name=${key}::${value}`); +} + +/** + * Get current version from Cargo.toml + * @returns {{major: number, minor: number, patch: number}} + */ +function getCurrentVersion() { + const cargoToml = readFileSync('Cargo.toml', 'utf-8'); + const match = cargoToml.match(/^version\s*=\s*"(\d+)\.(\d+)\.(\d+)"/m); + + if (!match) { + console.error('Error: Could not parse version from Cargo.toml'); + process.exit(1); + } + + return { + major: parseInt(match[1], 10), + minor: parseInt(match[2], 10), + patch: parseInt(match[3], 10), + }; +} + +/** + * Calculate new version based on bump type + * @param {{major: number, minor: number, patch: number}} current + * @param {string} bumpType + * @returns {string} + */ +function calculateNewVersion(current, bumpType) { + const { major, minor, patch } = current; + + switch (bumpType) { + case 'major': + return `${major + 1}.0.0`; + case 'minor': + return `${major}.${minor + 1}.0`; + case 'patch': + return `${major}.${minor}.${patch + 1}`; + default: + throw new Error(`Invalid bump type: ${bumpType}`); + } +} + +/** + * Update version in Cargo.toml + * @param {string} newVersion + */ +function updateCargoToml(newVersion) { + let cargoToml = readFileSync('Cargo.toml', 'utf-8'); + cargoToml = cargoToml.replace( + /^(version\s*=\s*")[^"]+(")/m, + `$1${newVersion}$2` + ); + writeFileSync('Cargo.toml', cargoToml, 'utf-8'); + console.log(`Updated Cargo.toml to version ${newVersion}`); +} + +/** + * Check if a git tag exists for this version + * @param {string} version + * @returns {Promise} + */ +async function checkTagExists(version) { + try { + await $`git rev-parse v${version}`.run({ capture: true }); + return true; + } catch { + return false; + } +} + +/** + * Strip frontmatter from markdown content + * @param {string} content - Markdown content potentially with frontmatter + * @returns {string} - Content without frontmatter + */ +function stripFrontmatter(content) { + const frontmatterMatch = content.match(/^---\s*\n[\s\S]*?\n---\s*\n([\s\S]*)$/); + if (frontmatterMatch) { + return frontmatterMatch[1].trim(); + } + return content.trim(); +} + +/** + * Collect changelog fragments and update CHANGELOG.md + * @param {string} version + */ +function collectChangelog(version) { + const changelogDir = 'changelog.d'; + const changelogFile = 'CHANGELOG.md'; + + if (!existsSync(changelogDir)) { + return; + } + + const files = readdirSync(changelogDir).filter( + (f) => f.endsWith('.md') && f !== 'README.md' + ); + + if (files.length === 0) { + return; + } + + const fragments = files + .sort() + .map((f) => { + const rawContent = readFileSync(join(changelogDir, f), 'utf-8'); + // Strip frontmatter (which contains bump type metadata) + return stripFrontmatter(rawContent); + }) + .filter(Boolean) + .join('\n\n'); + + if (!fragments) { + return; + } + + const dateStr = new Date().toISOString().split('T')[0]; + const newEntry = `\n## [${version}] - ${dateStr}\n\n${fragments}\n`; + + if (existsSync(changelogFile)) { + let content = readFileSync(changelogFile, 'utf-8'); + const lines = content.split('\n'); + let insertIndex = -1; + + for (let i = 0; i < lines.length; i++) { + if (lines[i].startsWith('## [')) { + insertIndex = i; + break; + } + } + + if (insertIndex >= 0) { + lines.splice(insertIndex, 0, newEntry); + content = lines.join('\n'); + } else { + content += newEntry; + } + + writeFileSync(changelogFile, content, 'utf-8'); + } + + console.log(`Collected ${files.length} changelog fragment(s)`); +} + +async function main() { + try { + // Configure git + await $`git config user.name "github-actions[bot]"`; + await $`git config user.email "github-actions[bot]@users.noreply.github.com"`; + + const current = getCurrentVersion(); + const newVersion = calculateNewVersion(current, bumpType); + + // Check if this version was already released + if (await checkTagExists(newVersion)) { + console.log(`Tag v${newVersion} already exists`); + setOutput('already_released', 'true'); + setOutput('new_version', newVersion); + return; + } + + // Update version in Cargo.toml + updateCargoToml(newVersion); + + // Collect changelog fragments + collectChangelog(newVersion); + + // Stage Cargo.toml and CHANGELOG.md + await $`git add Cargo.toml CHANGELOG.md`; + + // Check if there are changes to commit + try { + await $`git diff --cached --quiet`.run({ capture: true }); + // No changes to commit + console.log('No changes to commit'); + setOutput('version_committed', 'false'); + setOutput('new_version', newVersion); + return; + } catch { + // There are changes to commit (git diff exits with 1 when there are differences) + } + + // Commit changes + const commitMsg = description + ? `chore: release v${newVersion}\n\n${description}` + : `chore: release v${newVersion}`; + await $`git commit -m ${commitMsg}`; + console.log(`Committed version ${newVersion}`); + + // Create tag + const tagMsg = description + ? `Release v${newVersion}\n\n${description}` + : `Release v${newVersion}`; + await $`git tag -a v${newVersion} -m ${tagMsg}`; + console.log(`Created tag v${newVersion}`); + + // Push changes and tag + await $`git push`; + await $`git push --tags`; + console.log('Pushed changes and tags'); + + setOutput('version_committed', 'true'); + setOutput('new_version', newVersion); + } catch (error) { + console.error('Error:', error.message); + process.exit(1); + } +} + +main(); diff --git a/src/lib.rs b/src/lib.rs index 446d51e..59ddc8e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,10 +1,16 @@ #![feature( allocator_api, + unchecked_math, + maybe_uninit_slice, slice_ptr_get, ptr_as_uninit, + inline_const, slice_range, + maybe_uninit_write_slice, unboxed_closures, - fn_traits + fn_traits, + let_else, + nonnull_slice_from_raw_parts )] // special lint #![cfg_attr(not(test), forbid(clippy::unwrap_used))] @@ -14,7 +20,7 @@ mod alloc; mod file_mapped; -mod raw_mem; +pub mod raw_mem; mod raw_place; mod utils; diff --git a/src/raw_mem.rs b/src/raw_mem.rs index 4cdaade..66e4138 100644 --- a/src/raw_mem.rs +++ b/src/raw_mem.rs @@ -58,7 +58,7 @@ pub trait RawMem { /// # Safety /// Caller must guarantee that `fill` makes the uninitialized part valid for - /// [`assume_init_mut`](prim@slice#method.assume_init_mut) + /// [`MaybeUninit::slice_assume_init_mut`] /// /// ### Incorrect usage /// ```no_run @@ -128,7 +128,7 @@ pub trait RawMem { } /// # Safety - /// [`Item`] must satisfy [initialization invariant][inv] for [`std::mem::zeroed`] + /// [`Item`] must satisfy [initialization invariant][inv] for [`mem::zeroed`] /// /// [`Item`]: Self::Item /// [inv]: MaybeUninit#initialization-invariant @@ -232,7 +232,7 @@ pub trait RawMem { let Range { start, end } = slice::range(range, ..self.allocated().len()); unsafe { self.grow(end - start, |_, (within, uninit)| { - uninit.write_clone_of_slice(&within[start..end]); + MaybeUninit::write_slice_cloned(uninit, &within[start..end]); }) } } @@ -243,7 +243,7 @@ pub trait RawMem { { unsafe { self.grow(src.len(), |_, (_, uninit)| { - uninit.write_clone_of_slice(src); + MaybeUninit::write_slice_cloned(uninit, src); }) } } @@ -396,9 +396,9 @@ pub mod uninit { // SAFETY: this raw slice will contain only initialized objects // that's why, it is allowed to drop it. unsafe { - ptr::drop_in_place( - self.slice.get_unchecked_mut(..self.init).assume_init_mut(), - ); + ptr::drop_in_place(MaybeUninit::slice_assume_init_mut( + self.slice.get_unchecked_mut(..self.init), + )); } } } diff --git a/src/raw_place.rs b/src/raw_place.rs index 646ac8d..1dfa8a6 100644 --- a/src/raw_place.rs +++ b/src/raw_place.rs @@ -73,7 +73,7 @@ impl RawPlace { self.len = cap; // `len` is same `cap` only if `uninit` was init - uninit.assume_init_mut() + MaybeUninit::slice_assume_init_mut(uninit) } pub fn shrink_to(&mut self, cap: usize) { diff --git a/tests/coverage.rs b/tests/coverage.rs new file mode 100644 index 0000000..3e510e8 --- /dev/null +++ b/tests/coverage.rs @@ -0,0 +1,758 @@ +//! Comprehensive tests for 100% code coverage + +#![feature(allocator_api)] +#![feature(assert_matches)] + +use platform_mem::{Alloc, ErasedMem, Error, FileMapped, Global, RawMem, Result, System, TempFile}; +use std::alloc::Global as GlobalAlloc; +use std::assert_matches::assert_matches; +use std::io; + +// ============================================================================ +// Alloc tests +// ============================================================================ + +mod alloc_tests { + use super::*; + + #[test] + fn new_creates_empty_alloc() { + let alloc: Alloc = Alloc::new(GlobalAlloc); + assert_eq!(alloc.allocated().len(), 0); + } + + #[test] + fn allocated_returns_empty_slice_initially() { + let alloc: Alloc = Alloc::new(GlobalAlloc); + assert!(alloc.allocated().is_empty()); + } + + #[test] + fn allocated_mut_returns_empty_slice_initially() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + assert!(alloc.allocated_mut().is_empty()); + } + + #[test] + fn grow_increases_capacity() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + alloc.grow_filled(10, 42).unwrap(); + assert_eq!(alloc.allocated().len(), 10); + assert_eq!(alloc.allocated(), &[42u64; 10]); + } + + #[test] + fn grow_capacity_overflow() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + let result = alloc.grow_filled(usize::MAX, 0); + assert_matches!(result, Err(Error::CapacityOverflow)); + } + + #[test] + fn shrink_decreases_capacity() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + alloc.grow_filled(10, 42).unwrap(); + alloc.shrink(5).unwrap(); + assert_eq!(alloc.allocated().len(), 5); + } + + #[test] + fn shrink_to_zero() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + alloc.grow_filled(10, 42).unwrap(); + alloc.shrink(10).unwrap(); + assert_eq!(alloc.allocated().len(), 0); + } + + #[test] + fn shrink_on_empty_does_nothing() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + alloc.shrink(0).unwrap(); + assert_eq!(alloc.allocated().len(), 0); + } + + #[test] + #[should_panic(expected = "Tried to shrink to a larger capacity")] + fn shrink_beyond_capacity_panics() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + alloc.grow_filled(5, 42).unwrap(); + alloc.shrink(10).unwrap(); + } + + #[test] + fn multiple_grows() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + alloc.grow_filled(5, 1).unwrap(); + alloc.grow_filled(5, 2).unwrap(); + alloc.grow_filled(5, 3).unwrap(); + assert_eq!(alloc.allocated().len(), 15); + assert_eq!(&alloc.allocated()[..5], &[1, 1, 1, 1, 1]); + assert_eq!(&alloc.allocated()[5..10], &[2, 2, 2, 2, 2]); + assert_eq!(&alloc.allocated()[10..15], &[3, 3, 3, 3, 3]); + } + + #[test] + fn drop_cleans_up_memory() { + let mut alloc: Alloc = Alloc::new(GlobalAlloc); + alloc.grow_filled(5, String::from("hello")).unwrap(); + // Drop should clean up allocated strings + } + + #[test] + fn debug_format() { + let alloc: Alloc = Alloc::new(GlobalAlloc); + let debug_str = format!("{:?}", alloc); + assert!(debug_str.contains("Alloc")); + } +} + +// ============================================================================ +// Global/System allocator wrapper tests +// ============================================================================ + +mod wrapper_tests { + use super::*; + + #[test] + fn global_new() { + let global: Global = Global::new(); + assert_eq!(global.allocated().len(), 0); + } + + #[test] + fn global_default() { + let global: Global = Global::default(); + assert_eq!(global.allocated().len(), 0); + } + + #[test] + fn global_grow_and_shrink() { + let mut global: Global = Global::new(); + global.grow_filled(10, 42).unwrap(); + assert_eq!(global.allocated().len(), 10); + global.shrink(5).unwrap(); + assert_eq!(global.allocated().len(), 5); + } + + #[test] + fn global_allocated_mut() { + let mut global: Global = Global::new(); + global.grow_filled(5, 0).unwrap(); + global.allocated_mut()[0] = 42; + assert_eq!(global.allocated()[0], 42); + } + + #[test] + fn global_size_hint() { + let global: Global = Global::new(); + assert_eq!(global.size_hint(), None); + } + + #[test] + fn global_debug() { + let global: Global = Global::new(); + let debug_str = format!("{:?}", global); + assert!(debug_str.contains("Global")); + } + + #[test] + fn system_new() { + let system: System = System::new(); + assert_eq!(system.allocated().len(), 0); + } + + #[test] + fn system_default() { + let system: System = System::default(); + assert_eq!(system.allocated().len(), 0); + } + + #[test] + fn system_grow_and_shrink() { + let mut system: System = System::new(); + system.grow_filled(10, 42).unwrap(); + assert_eq!(system.allocated().len(), 10); + system.shrink(5).unwrap(); + assert_eq!(system.allocated().len(), 5); + } +} + +// ============================================================================ +// TempFile tests +// ============================================================================ + +mod tempfile_tests { + use super::*; + + #[test] + fn tempfile_new() { + let tempfile = TempFile::::new().unwrap(); + assert_eq!(tempfile.allocated().len(), 0); + } + + #[test] + fn tempfile_new_in() { + let tempfile = TempFile::::new_in(".").unwrap(); + assert_eq!(tempfile.allocated().len(), 0); + } + + #[test] + fn tempfile_grow() { + let mut tempfile = TempFile::::new().unwrap(); + tempfile.grow_filled(10, 42).unwrap(); + assert_eq!(tempfile.allocated().len(), 10); + assert_eq!(tempfile.allocated(), &[42u64; 10]); + } + + #[test] + fn tempfile_shrink() { + let mut tempfile = TempFile::::new().unwrap(); + tempfile.grow_filled(10, 42).unwrap(); + tempfile.shrink(5).unwrap(); + assert_eq!(tempfile.allocated().len(), 5); + } + + #[test] + fn tempfile_debug() { + let tempfile = TempFile::::new().unwrap(); + let debug_str = format!("{:?}", tempfile); + assert!(debug_str.contains("TempFile")); + } +} + +// ============================================================================ +// FileMapped tests +// ============================================================================ + +mod file_mapped_tests { + use super::*; + use std::fs; + + fn cleanup_test_file(path: &str) { + let _ = fs::remove_file(path); + } + + #[test] + fn from_path_creates_new_file() -> Result<()> { + const FILE: &str = "test_from_path.bin"; + cleanup_test_file(FILE); + + let mem = FileMapped::::from_path(FILE)?; + assert_eq!(mem.allocated().len(), 0); + + cleanup_test_file(FILE); + Ok(()) + } + + #[test] + fn grow_and_access() -> Result<()> { + const FILE: &str = "test_grow_access.bin"; + cleanup_test_file(FILE); + + { + let mut mem = FileMapped::::from_path(FILE)?; + mem.grow_filled(10, 42)?; + assert_eq!(mem.allocated().len(), 10); + mem.allocated_mut()[0] = 123; + } + + cleanup_test_file(FILE); + Ok(()) + } + + #[test] + fn shrink_memory() -> Result<()> { + const FILE: &str = "test_shrink.bin"; + cleanup_test_file(FILE); + + { + let mut mem = FileMapped::::from_path(FILE)?; + mem.grow_filled(10, 42)?; + mem.shrink(5)?; + assert_eq!(mem.allocated().len(), 5); + } + + cleanup_test_file(FILE); + Ok(()) + } + + #[test] + fn capacity_overflow() -> Result<()> { + const FILE: &str = "test_overflow.bin"; + cleanup_test_file(FILE); + + let mut mem = FileMapped::::from_path(FILE)?; + let result = mem.grow_filled(usize::MAX, 0); + assert_matches!(result, Err(Error::CapacityOverflow)); + + cleanup_test_file(FILE); + Ok(()) + } + + #[test] + fn debug_format() -> Result<()> { + const FILE: &str = "test_debug.bin"; + cleanup_test_file(FILE); + + let mem = FileMapped::::from_path(FILE)?; + let debug_str = format!("{:?}", mem); + assert!(debug_str.contains("FileMapped")); + + cleanup_test_file(FILE); + Ok(()) + } + + #[test] + fn grow_assumed_with_existing_file() -> Result<()> { + const FILE: &str = "test_assumed.bin"; + cleanup_test_file(FILE); + + unsafe { + let mut mem = FileMapped::::from_path(FILE)?; + // File is zeroed, so we can assume it's initialized for u8 + let _slice = mem.grow_assumed(100)?; + } + + cleanup_test_file(FILE); + Ok(()) + } +} + +// ============================================================================ +// RawMem trait method tests +// ============================================================================ + +mod raw_mem_tests { + use super::*; + + #[test] + fn grow_zeroed() { + let mut mem = Global::::new(); + unsafe { + mem.grow_zeroed(10).unwrap(); + } + assert_eq!(mem.allocated(), &[0u64; 10]); + } + + #[test] + fn grow_zeroed_exact() { + let mut mem = Global::::new(); + mem.grow_filled(5, 1).unwrap(); + unsafe { + mem.grow_zeroed_exact(5).unwrap(); + } + // First 5 should be 1, next 5 should be 0 + assert_eq!(&mem.allocated()[..5], &[1, 1, 1, 1, 1]); + // The grow_zeroed_exact zeros from inited onwards + } + + #[test] + fn grow_with() { + let mut mem = Global::::new(); + let mut counter = 0u64; + mem.grow_with(5, || { + counter += 1; + counter + }) + .unwrap(); + assert_eq!(mem.allocated(), &[1, 2, 3, 4, 5]); + } + + #[test] + fn grow_with_exact() { + let mut mem = Global::::new(); + mem.grow_filled(3, 0).unwrap(); + let mut counter = 0u64; + unsafe { + mem.grow_with_exact(3, || { + counter += 1; + counter + }) + .unwrap(); + } + assert_eq!(mem.allocated().len(), 6); + } + + #[test] + fn grow_filled_exact() { + let mut mem = Global::::new(); + mem.grow_filled(3, 0).unwrap(); + unsafe { + mem.grow_filled_exact(3, 42).unwrap(); + } + assert_eq!(mem.allocated().len(), 6); + } + + #[test] + fn grow_within_range() { + let mut mem = Global::::new(); + mem.grow_filled(5, 0).unwrap(); + mem.allocated_mut()[0] = 1; + mem.allocated_mut()[1] = 2; + mem.allocated_mut()[2] = 3; + mem.grow_within(0..3).unwrap(); + assert_eq!(mem.allocated().len(), 8); + assert_eq!(&mem.allocated()[5..8], &[1, 2, 3]); + } + + #[test] + fn grow_from_slice() { + let mut mem = Global::::new(); + let slice = [1, 2, 3, 4, 5]; + mem.grow_from_slice(&slice).unwrap(); + assert_eq!(mem.allocated(), &slice); + } + + #[test] + fn size_hint_returns_none() { + let mem = Global::::new(); + assert_eq!(mem.size_hint(), None); + } +} + +// ============================================================================ +// ErasedMem tests +// ============================================================================ + +mod erased_mem_tests { + use super::*; + + #[test] + fn box_dyn_erased_mem() { + let mut mem: Box> = Box::new(Global::::new()); + mem.grow_filled(5, 42).unwrap(); + assert_eq!(mem.allocated().len(), 5); + } + + #[test] + fn box_dyn_erased_mem_sync() { + let mut mem: Box + Sync> = Box::new(Global::::new()); + mem.grow_filled(5, 42).unwrap(); + assert_eq!(mem.allocated().len(), 5); + } + + #[test] + fn box_dyn_erased_mem_sync_send() { + let mut mem: Box + Sync + Send> = Box::new(Global::::new()); + mem.grow_filled(5, 42).unwrap(); + assert_eq!(mem.allocated().len(), 5); + } + + #[test] + fn mutable_reference_as_erased_mem() { + let mut inner = Global::::new(); + // Use ErasedMem through a mutable reference, which implements RawMem + let mem: &mut Global = &mut inner; + mem.grow_filled(5, 42).unwrap(); + assert_eq!(mem.allocated().len(), 5); + } + + #[test] + fn erased_shrink() { + let mut mem: Box> = Box::new(Global::::new()); + mem.grow_filled(10, 42).unwrap(); + mem.shrink(5).unwrap(); + assert_eq!(mem.allocated().len(), 5); + } + + #[test] + fn erased_size_hint() { + let mem: Box> = Box::new(Global::::new()); + assert_eq!(mem.size_hint(), None); + } + + #[test] + fn erased_allocated_mut() { + let mut mem: Box> = Box::new(Global::::new()); + mem.grow_filled(5, 0).unwrap(); + mem.allocated_mut()[0] = 42; + assert_eq!(mem.allocated()[0], 42); + } +} + +// ============================================================================ +// Error type tests +// ============================================================================ + +mod error_tests { + use super::*; + use std::alloc::Layout; + + #[test] + fn error_display_capacity_overflow() { + let err = Error::CapacityOverflow; + assert!(err.to_string().contains("capacity")); + } + + #[test] + fn error_display_overgrow() { + let err = Error::OverGrow { to_grow: 100, available: 50 }; + let msg = err.to_string(); + assert!(msg.contains("100")); + assert!(msg.contains("50")); + } + + #[test] + fn error_display_alloc_error() { + let layout = Layout::from_size_align(1024, 8).unwrap(); + let err = Error::AllocError { layout, non_exhaustive: () }; + assert!(err.to_string().contains("allocation")); + } + + #[test] + fn error_display_system() { + let io_err = io::Error::new(io::ErrorKind::Other, "test error"); + let err = Error::from(io_err); + assert!(err.to_string().contains("test error")); + } + + #[test] + fn error_debug() { + let err = Error::CapacityOverflow; + let debug_str = format!("{:?}", err); + assert!(debug_str.contains("CapacityOverflow")); + } +} + +// ============================================================================ +// uninit module tests +// ============================================================================ + +mod uninit_tests { + use std::mem::MaybeUninit; + + #[test] + fn fill_initializes_slice() { + let mut data: [MaybeUninit; 5] = unsafe { MaybeUninit::uninit().assume_init() }; + platform_mem::raw_mem::uninit::fill(&mut data, 42); + for item in data.iter() { + assert_eq!(unsafe { item.assume_init() }, 42); + } + } + + #[test] + fn fill_with_initializes_slice() { + let mut data: [MaybeUninit; 5] = unsafe { MaybeUninit::uninit().assume_init() }; + let mut counter = 0u64; + platform_mem::raw_mem::uninit::fill_with(&mut data, || { + counter += 1; + counter + }); + for (i, item) in data.iter().enumerate() { + assert_eq!(unsafe { item.assume_init() }, (i + 1) as u64); + } + } + + #[test] + fn fill_empty_slice() { + let mut data: [MaybeUninit; 0] = []; + platform_mem::raw_mem::uninit::fill(&mut data, 42); + } + + #[test] + fn fill_with_empty_slice() { + let mut data: [MaybeUninit; 0] = []; + platform_mem::raw_mem::uninit::fill_with(&mut data, || 42); + } + + #[test] + fn fill_single_element() { + let mut data: [MaybeUninit; 1] = unsafe { MaybeUninit::uninit().assume_init() }; + platform_mem::raw_mem::uninit::fill(&mut data, 42); + assert_eq!(unsafe { data[0].assume_init() }, 42); + } +} + +// ============================================================================ +// RawPlace tests (via indirect usage) +// ============================================================================ + +mod raw_place_tests { + use super::*; + + #[test] + fn raw_place_through_alloc() { + // RawPlace is used internally by Alloc + let mut alloc = Global::::new(); + + // Test initial state (dangling) + assert_eq!(alloc.allocated().len(), 0); + + // Test growth (handle_fill) + alloc.grow_filled(10, 42).unwrap(); + assert_eq!(alloc.allocated().len(), 10); + + // Test shrinking (shrink_to) + alloc.shrink(5).unwrap(); + assert_eq!(alloc.allocated().len(), 5); + + // Test multiple operations + alloc.grow_filled(5, 0).unwrap(); + assert_eq!(alloc.allocated().len(), 10); + } + + #[test] + fn raw_place_through_file_mapped() -> Result<()> { + let mut mem = TempFile::::new()?; + + // Test initial state + assert_eq!(mem.allocated().len(), 0); + + // Test growth + mem.grow_filled(10, 42)?; + assert_eq!(mem.allocated().len(), 10); + + // Test shrinking + mem.shrink(5)?; + assert_eq!(mem.allocated().len(), 5); + + Ok(()) + } +} + +// ============================================================================ +// Thread safety tests +// ============================================================================ + +mod thread_safety_tests { + use super::*; + + fn assert_send() {} + fn assert_sync() {} + + #[test] + fn global_is_send_sync() { + assert_send::>(); + assert_sync::>(); + } + + #[test] + fn system_is_send_sync() { + assert_send::>(); + assert_sync::>(); + } + + #[test] + fn tempfile_is_send_sync() { + assert_send::>(); + assert_sync::>(); + } + + #[test] + fn file_mapped_is_send_sync() { + assert_send::>(); + assert_sync::>(); + } + + #[test] + fn alloc_is_send_sync() { + assert_send::>(); + assert_sync::>(); + } +} + +// ============================================================================ +// Drop with complex types tests +// ============================================================================ + +mod drop_tests { + use super::*; + use std::sync::atomic::{AtomicUsize, Ordering}; + use std::sync::Arc; + + #[test] + fn drop_with_arc() { + let counter = Arc::new(AtomicUsize::new(0)); + + { + let mut mem = Global::>::new(); + for _ in 0..5 { + let c = counter.clone(); + c.fetch_add(1, Ordering::SeqCst); + mem.grow_filled(1, c).unwrap(); + } + // 5 clones in the vector + 1 original + assert!(Arc::strong_count(&counter) > 1); + } + + // After drop, only the original should remain + assert_eq!(Arc::strong_count(&counter), 1); + } + + #[test] + fn shrink_drops_elements() { + let counter = Arc::new(AtomicUsize::new(0)); + let mut mem = Global::>::new(); + + for _ in 0..10 { + let c = counter.clone(); + mem.grow_filled(1, c).unwrap(); + } + + let before_shrink = Arc::strong_count(&counter); + mem.shrink(5).unwrap(); + let after_shrink = Arc::strong_count(&counter); + + // Should have dropped 5 references + assert_eq!(before_shrink - after_shrink, 5); + } +} + +// ============================================================================ +// Edge case tests +// ============================================================================ + +mod edge_cases { + use super::*; + + #[test] + fn grow_zero_elements() { + let mut mem = Global::::new(); + mem.grow_filled(0, 42).unwrap(); + assert_eq!(mem.allocated().len(), 0); + } + + #[test] + fn shrink_zero_elements() { + let mut mem = Global::::new(); + mem.grow_filled(10, 42).unwrap(); + mem.shrink(0).unwrap(); + assert_eq!(mem.allocated().len(), 10); + } + + #[test] + fn grow_from_empty_slice() { + let mut mem = Global::::new(); + let empty: [u64; 0] = []; + mem.grow_from_slice(&empty).unwrap(); + assert_eq!(mem.allocated().len(), 0); + } + + #[test] + fn grow_within_empty_range() { + let mut mem = Global::::new(); + mem.grow_filled(5, 42).unwrap(); + mem.grow_within(0..0).unwrap(); + assert_eq!(mem.allocated().len(), 5); + } + + #[test] + fn allocated_modification() { + let mut mem = Global::::new(); + mem.grow_filled(5, 0).unwrap(); + + // Modify all elements + for (i, elem) in mem.allocated_mut().iter_mut().enumerate() { + *elem = i as u64; + } + + assert_eq!(mem.allocated(), &[0, 1, 2, 3, 4]); + } + + #[test] + fn large_allocation() { + let mut mem = Global::::new(); + mem.grow_filled(1_000_000, 0).unwrap(); + assert_eq!(mem.allocated().len(), 1_000_000); + } +}