diff --git a/.cargo/config.toml b/.cargo/config.toml index 1cc12d1..83a36c4 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,11 +1,3 @@ -[source.crates-io] -replace-with = 'rsproxy-sparse' -[source.rsproxy] -registry = "https://rsproxy.cn/crates.io-index" -[source.rsproxy-sparse] -registry = "sparse+https://rsproxy.cn/index/" -[registries.rsproxy] -index = "https://rsproxy.cn/crates.io-index" [net] git-fetch-with-cli = true @@ -19,4 +11,7 @@ rustflags = [ "-C", "link-arg=Ole32.lib", "-C", "link-arg=User32.lib", "-C", "link-arg=msvcrtd.lib" -] \ No newline at end of file +] + +[env] +RUST_LOG="info" \ No newline at end of file diff --git a/.github/workflows/build-setup.yml b/.github/workflows/build-setup.yml new file mode 100644 index 0000000..a8122b3 --- /dev/null +++ b/.github/workflows/build-setup.yml @@ -0,0 +1,8 @@ +- name: Install vcpkg build dependencies + run: sudo apt-get --assume-yes install nasm + +- name: Install vcpkg + run: cargo install cargo-vcpkg + +- name: Build vcpkg dependencies + run: cargo vcpkg --verbose build diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml new file mode 100644 index 0000000..7f6d474 --- /dev/null +++ b/.github/workflows/cd.yml @@ -0,0 +1,26 @@ +name: Continuous Deployment + +on: + push: + branches: + - main + +jobs: + release-plz: + name: Release-plz + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.RELEASE_PLZ_TOKEN }} + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Run release-plz + uses: MarcoIeni/release-plz-action@v0.5 + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_PLZ_TOKEN }} + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 191e6fa..0365e4d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,30 +1,41 @@ -name: Cargo Build & Test - -on: - push: - pull_request: - -env: - CARGO_TERM_COLOR: always - -# TODO: setup versioning and relase with knope -jobs: - build_and_test: - name: Rust project - latest - runs-on: ubuntu-latest - strategy: - matrix: - toolchain: - - stable - - beta - - nightly - steps: - - uses: actions/checkout@v3 - - name: install rust - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} && rustup component add rustfmt - - name: Install vcpkg build deps - run: sudo apt-get --assume-yes install nasm - - run: cargo install cargo-vcpkg - - run: cargo vcpkg --verbose build - - run: cargo build --verbose - - run: cargo test --verbose +name: Cargo Build & Test + +on: + push: + branches: + - "*" + +env: + CARGO_TERM_COLOR: always + +jobs: + test: + name: Test + runs-on: ubuntu-latest + # Run only on pushes to branches with open PRs + if: github.event_name == 'push' && contains(fromJson('["refs/heads/main"]'), github.ref) == false && github.event.repository.pulls != '[]' + strategy: + matrix: + toolchain: + - stable + - beta + - nightly + steps: + - uses: actions/checkout@v3 + - name: Install rust + run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} && rustup component add rustfmt + + - name: Install vcpkg build dependencies + run: sudo apt-get --assume-yes install nasm + + - name: Install vcpkg + run: cargo install cargo-vcpkg + + - name: Build vcpkg dependencies + run: cargo vcpkg --verbose build + + - name: Build + run: cargo build --verbose + + - name: Run tests + run: cargo test --verbose diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..b7aac82 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,289 @@ +# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/ +# +# Copyright 2022-2024, axodotdev +# SPDX-License-Identifier: MIT or Apache-2.0 +# +# CI that: +# +# * checks for a Git Tag that looks like a release +# * builds artifacts with cargo-dist (archives, installers, hashes) +# * uploads those artifacts to temporary workflow zip +# * on success, uploads the artifacts to a GitHub Release +# +# Note that the GitHub Release will be created with a generated +# title/body based on your changelogs. + +name: Release +permissions: + "contents": "write" + +# This task will run whenever you push a git tag that looks like a version +# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. +# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where +# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION +# must be a Cargo-style SemVer Version (must have at least major.minor.patch). +# +# If PACKAGE_NAME is specified, then the announcement will be for that +# package (erroring out if it doesn't have the given version or isn't cargo-dist-able). +# +# If PACKAGE_NAME isn't specified, then the announcement will be for all +# (cargo-dist-able) packages in the workspace with that version (this mode is +# intended for workspaces with only one dist-able package, or with all dist-able +# packages versioned/released in lockstep). +# +# If you push multiple tags at once, separate instances of this workflow will +# spin up, creating an independent announcement for each one. However, GitHub +# will hard limit this to 3 tags per commit, as it will assume more tags is a +# mistake. +# +# If there's a prerelease-style suffix to the version, then the release(s) +# will be marked as a prerelease. +on: + pull_request: + push: + tags: + - '**[0-9]+.[0-9]+.[0-9]+*' + +jobs: + # Run 'cargo dist plan' (or host) to determine what tasks we need to do + plan: + runs-on: "ubuntu-20.04" + outputs: + val: ${{ steps.plan.outputs.manifest }} + tag: ${{ !github.event.pull_request && github.ref_name || '' }} + tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} + publishing: ${{ !github.event.pull_request }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cargo-dist + # we specify bash to get pipefail; it guards against the `curl` command + # failing. otherwise `sh` won't catch that `curl` returned non-0 + shell: bash + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.0/cargo-dist-installer.sh | sh" + - name: Cache cargo-dist + uses: actions/upload-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/cargo-dist + # sure would be cool if github gave us proper conditionals... + # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible + # functionality based on whether this is a pull_request, and whether it's from a fork. + # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* + # but also really annoying to build CI around when it needs secrets to work right.) + - id: plan + run: | + cargo dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json + echo "cargo dist ran successfully" + cat plan-dist-manifest.json + echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + name: artifacts-plan-dist-manifest + path: plan-dist-manifest.json + + # Build and packages all the platform-specific things + build-local-artifacts: + name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) + # Let the initial task tell us to not run (currently very blunt) + needs: + - plan + if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} + strategy: + fail-fast: false + # Target platforms/runners are computed by cargo-dist in create-release. + # Each member of the matrix has the following arguments: + # + # - runner: the github runner + # - dist-args: cli flags to pass to cargo dist + # - install-dist: expression to run to install cargo-dist on the runner + # + # Typically there will be: + # - 1 "global" task that builds universal installers + # - N "local" tasks that build each platform's binaries and platform-specific installers + matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} + runs-on: ${{ matrix.runner }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json + steps: + - name: enable windows longpaths + run: | + git config --global core.longpaths true + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: "Install vcpkg build dependencies" + run: "sudo apt-get --assume-yes install nasm" + - name: "Install vcpkg" + run: "cargo install cargo-vcpkg" + - name: "Build vcpkg dependencies" + run: "cargo vcpkg --verbose build" + - name: Install cargo-dist + run: ${{ matrix.install_dist }} + # Get the dist-manifest + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - name: Install dependencies + run: | + ${{ matrix.packages_install }} + - name: Build artifacts + run: | + # Actually do builds and make zips and whatnot + cargo dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json + echo "cargo dist ran successfully" + - id: cargo-dist + name: Post-build + # We force bash here just because github makes it really hard to get values up + # to "real" actions without writing to env-vars, and writing to env-vars has + # inconsistent syntax between shell and powershell. + shell: bash + run: | + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-local-${{ join(matrix.targets, '_') }} + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + + # Build and package all the platform-agnostic(ish) things + build-global-artifacts: + needs: + - plan + - build-local-artifacts + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cached cargo-dist + uses: actions/download-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/ + - run: chmod +x ~/.cargo/bin/cargo-dist + # Get all the local artifacts for the global tasks to use (for e.g. checksums) + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - id: cargo-dist + shell: bash + run: | + cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json + echo "cargo dist ran successfully" + + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-global + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + # Determines if we should publish/announce + host: + needs: + - plan + - build-local-artifacts + - build-global-artifacts + # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) + if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + runs-on: "ubuntu-20.04" + outputs: + val: ${{ steps.host.outputs.manifest }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cached cargo-dist + uses: actions/download-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/ + - run: chmod +x ~/.cargo/bin/cargo-dist + # Fetch artifacts from scratch-storage + - name: Fetch artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - id: host + shell: bash + run: | + cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json + echo "artifacts uploaded and released successfully" + cat dist-manifest.json + echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + # Overwrite the previous copy + name: artifacts-dist-manifest + path: dist-manifest.json + # Create a GitHub Release while uploading all files to it + - name: "Download GitHub Artifacts" + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: artifacts + merge-multiple: true + - name: Cleanup + run: | + # Remove the granular manifests + rm -f artifacts/*-dist-manifest.json + - name: Create GitHub Release + env: + PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" + ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" + ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" + RELEASE_COMMIT: "${{ github.sha }}" + run: | + # Write and read notes from a file to avoid quoting breaking things + echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt + + gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* + + announce: + needs: + - plan + - host + # use "always() && ..." to allow us to wait for all publish jobs while + # still allowing individual publish jobs to skip themselves (for prereleases). + # "host" however must run to completion, no skipping allowed! + if: ${{ always() && needs.host.result == 'success' }} + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive diff --git a/Cargo.toml b/Cargo.toml index cf7c661..7c97ace 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,27 +1,62 @@ [package] -name = "direct_play_nice" -version = "0.1.0-alpha.1" -edition = "2021" +name = "direct_play_nice" +description = "CLI program that converts video files to direct-play-compatible formats." +license = "GPLv3" +license-files = "LICENSE" +documentation = "https://github.com/ns-mkusper/direct-play-nice" +homepage = "https://github.com/ns-mkusper/direct-play-nice" +repository = "https://github.com/ns-mkusper/direct-play-nice" +version = "0.1.0-alpha.1" +edition = "2021" + [dependencies] -anyhow = "1.0.86" -clap = { version = "4.5.9", features = ["derive"] } -cstr = "0.2.12" -env_logger = "0.11.3" -log = "0.4.22" +anyhow = "1.0.86" +clap = { version = "4.5.9", features = ["derive"] } +cstr = "0.2.12" +env_logger = "0.11.3" +log = "0.4.22" # TODO: upgrade to ffmpeg7 once in vcpkg # see: https://github.com/microsoft/vcpkg/pull/38011 -rsmpeg = { version = "0.15.1", default-features = false, features = ["ffmpeg6"] } -rusty_ffmpeg = { version = "0.14.1", features = ["link_vcpkg_ffmpeg"] } -syslog = "6.1.1" +rsmpeg = { version = "0.15.1", default-features = false, features = ["ffmpeg6"] } +rusty_ffmpeg = { version = "0.14.1", features = ["link_vcpkg_ffmpeg"] } +serde = { version = "1.0.204", features = ["derive"] } +strum = "0.26.3" +strum_macros = "0.26.4" +syslog = "6.1.1" +toml = "0.8.16" +vcpkg = "0.2.15" [patch.crates-io] -rusty_ffmpeg = { git = "https://github.com/CCExtractor/rusty_ffmpeg.git" } +rusty_ffmpeg = { git = "https://github.com/CCExtractor/rusty_ffmpeg.git", rev = "4011018dacd67e0651c5118d9f698e37bd80ac68" } + +# The profile that 'cargo dist' will build with +[profile.dist] +inherits = "release" +lto = "thin" + +# Config for 'cargo dist' +[workspace.metadata.dist] +# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax) +cargo-dist-version = "0.22.0" +# CI backends to support +ci = "github" +# The installers to generate for each app +installers = ["shell", "powershell"] +# Target platforms to build apps for (Rust target-triple syntax) +targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] +# Path that installers should place binaries in +install-path = "CARGO_HOME" +# Whether to install an updater program +install-updater = false +# customize cargo-dist build env (install vcpkg and vcpkg build, etc) +# see: https://opensource.axo.dev/cargo-dist/book/ci/customizing.html#customizing-build-setup +github-build-setup = "build-setup.yml" [package.metadata.vcpkg] -dependencies = ["ffmpeg[x264,vpx]", "x264[asm,core,default-features]"] -git = "https://github.com/microsoft/vcpkg" -rev = "c4467cb686f92671f0172aa8299a77d908175b4e" +dependencies = ["ffmpeg[x264,vpx,freetype,fontconfig]", "x264[asm,core,default-features]"] +git = "https://github.com/microsoft/vcpkg" +rev = "c4467cb686f92671f0172aa8299a77d908175b4e" [package.metadata.vcpkg.target] -x86_64-pc-windows-msvc = {triplet = "x64-windows-static-md" } +x86_64-pc-windows-msvc = { triplet = "x64-windows-static" } diff --git a/cliff.toml b/cliff.toml new file mode 100644 index 0000000..4d368fa --- /dev/null +++ b/cliff.toml @@ -0,0 +1,79 @@ +# git-cliff ~ default configuration file +# https://git-cliff.org/docs/configuration +# +# Lines starting with "#" are comments. +# Configuration options are organized into tables and keys. +# See documentation for more information on available options. + +[changelog] +# template for the changelog header +header = """ +# Changelog\n +All notable changes to this project will be documented in this file.\n +""" +# template for the changelog body +# https://keats.github.io/tera/docs/#introduction +body = """ +{% if version %}\ + ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} +{% else %}\ + ## [unreleased] +{% endif %}\ +{% for group, commits in commits | group_by(attribute="group") %} + ### {{ group | striptags | trim | upper_first }} + {% for commit in commits %} + - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ + {% if commit.breaking %}[**breaking**] {% endif %}\ + {{ commit.message | upper_first }}\ + {% endfor %} +{% endfor %}\n +""" +# template for the changelog footer +footer = """ + +""" +# remove the leading and trailing s +trim = true +# postprocessors +postprocessors = [ + # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL +] + +[git] +# parse the commits based on https://www.conventionalcommits.org +conventional_commits = true +# filter out the commits that are not conventional +filter_unconventional = true +# process each line of a commit as an individual commit +split_commits = false +# regex for preprocessing the commit messages +commit_preprocessors = [ + # Replace issue numbers + #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, + # Check spelling of the commit with https://github.com/crate-ci/typos + # If the spelling is incorrect, it will be automatically fixed. + #{ pattern = '.*', replace_command = 'typos --write-changes -' }, +] +# regex for parsing and grouping commits +commit_parsers = [ + { message = "^feat", group = "๐Ÿš€ Features" }, + { message = "^fix", group = "๐Ÿ› Bug Fixes" }, + { message = "^doc", group = "๐Ÿ“š Documentation" }, + { message = "^perf", group = "โšก Performance" }, + { message = "^refactor", group = "๐Ÿšœ Refactor" }, + { message = "^style", group = "๐ŸŽจ Styling" }, + { message = "^test", group = "๐Ÿงช Testing" }, + { message = "^chore\\(release\\): prepare for", skip = true }, + { message = "^chore\\(deps.*\\)", skip = true }, + { message = "^chore\\(pr\\)", skip = true }, + { message = "^chore\\(pull\\)", skip = true }, + { message = "^chore|^ci", group = "โš™๏ธ Miscellaneous Tasks" }, + { body = ".*security", group = "๐Ÿ›ก๏ธ Security" }, + { message = "^revert", group = "โ—€๏ธ Revert" }, +] +# filter out the commits that are not matched by commit parsers +filter_commits = false +# sort the tags topologically +topo_order = false +# sort the commits inside sections by oldest/newest order +sort_commits = "oldest" diff --git a/release-plz.toml b/release-plz.toml new file mode 100644 index 0000000..a411f82 --- /dev/null +++ b/release-plz.toml @@ -0,0 +1,27 @@ +[workspace] +# path of the git-cliff configuration +changelog_config = "cliff.toml" + +# enable changelog updates +changelog_update = true + +# update dependencies with `cargo update` +dependencies_update = true + +# create tags for the releases +git_tag_enable = true + +# disable GitHub releases +git_release_enable = false + +# labels for the release PR +pr_labels = ["release"] + +# disallow updating repositories with uncommitted changes +allow_dirty = false + +# disallow packaging with uncommitted changes +publish_allow_dirty = false + +# disable running `cargo-semver-checks` +semver_check = false diff --git a/src/config.rs b/src/config.rs new file mode 100644 index 0000000..c0f3385 --- /dev/null +++ b/src/config.rs @@ -0,0 +1,19 @@ +//! config file utilities +// use crate::streaming_devices::streaming_device::StreamingDevice; +// use anyhow::Error; +// use serde::Deserialize; +// use std::fs; +// use std::path::Path; +// use toml::from_str; + +// #[derive(Deserialize, Debug)] +// pub struct Config { +// // TODO: implement complete config file +// streaming_devices: Vec, +// } + +// pub fn parse_config_from_toml>(path: P) -> Result { +// let toml_content = fs::read_to_string(path).unwrap(); +// let config: Config = from_str(&toml_content)?; +// Ok(config) +// } diff --git a/src/main.rs b/src/main.rs index 6f9f48e..cd3c7e4 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,6 @@ use anyhow::{anyhow, bail, Context, Result}; -use clap::Parser; -use log::{debug, error, info}; +use clap::{value_parser, Parser}; +use log::{debug, error, warn}; use rsmpeg::avcodec::{AVCodec, AVCodecContext, AVPacket}; use rsmpeg::avformat::{AVFormatContextInput, AVFormatContextOutput, AVStreamMut, AVStreamRef}; use rsmpeg::avutil::{ra, AVAudioFifo, AVChannelLayout, AVFrame, AVSamples}; @@ -8,19 +8,36 @@ use rsmpeg::error::RsmpegError; use rsmpeg::ffi::{self}; use rsmpeg::swresample::SwrContext; use rsmpeg::swscale::SwsContext; +use std::path::PathBuf; use std::{ ffi::{CStr, CString}, sync::atomic::{AtomicI64, Ordering}, }; +use streaming_devices::{H264Level, H264Profile, Resolution, StreamingDevice}; + +mod config; +mod streaming_devices; // TODO: Make doc comments +// TODO: switch to enum to allow for different modes +// see: https://github.com/clap-rs/clap/discussions/3711#discussioncomment-2717657 #[derive(Parser)] #[command(author, version, about, long_about = None)] struct Args { + /// List of StreamingDevice + #[arg(short, long, value_enum, value_delimiter = ',', value_parser = |s: &_| Args::get_device_by_name(streaming_devices::STREAMING_DEVICES, s))] + streaming_devices: Option>, + + /// Path to the configuration file + #[arg(short, long, value_parser = value_parser!(PathBuf))] + config_file: Option, + + /// Video file to convert #[arg(value_parser = Args::parse_cstring)] input_file: CString, + /// Our output direct-play-compatible video file #[arg(value_parser = Args::parse_cstring)] output_file: CString, } @@ -29,11 +46,18 @@ impl Args { fn parse_cstring(s: &str) -> Result { CString::new(s).map_err(|e| format!("Invalid CString: {}", e)) } + + fn get_device_by_name<'a>( + devices: &'a [StreamingDevice], + model: &str, + ) -> Result<&'a StreamingDevice, String> { + devices + .iter() + .find(|device| device.model == model) + .ok_or_else(|| format!("Provided streaming device model { } not found!", model)) + } } -// TODO: Register all codecs and formats -// TODO: ensure audio streams have the same metadata -// inspired by https://github.com/larksuite/rsmpeg/blob/master/tests/ffmpeg_examples/transcode_aac.rs pub enum StreamExtras { Some((SwrContext, AVAudioFifo)), None, @@ -174,14 +198,14 @@ fn process_video_stream( loop { let frame = match stream_processing_context.decode_context.receive_frame() { Ok(frame) => { - info!("Successfully processed frame!"); + error!("Successfully processed frame!"); frame } Err(RsmpegError::DecoderDrainError) | Err(RsmpegError::DecoderFlushedError) => { break; } Err(e) => { - info!("Decoder receive frame error: {}", e); + error!("Decoder receive frame error: {}", e); break; } }; @@ -234,6 +258,8 @@ fn process_audio_stream( output_format_context: &mut AVFormatContextOutput, packet: &mut AVPacket, ) -> Result<()> { + // TODO: ensure audio streams have the same metadata + // based on https://github.com/larksuite/rsmpeg/blob/master/tests/ffmpeg_examples/transcode_aac.rs packet.rescale_ts( input_stream.time_base, stream_processing_context.decode_context.time_base, @@ -439,6 +465,8 @@ fn set_video_codec_par( decode_context: &mut AVCodecContext, encode_context: &mut AVCodecContext, output_stream: &mut AVStreamMut, + h264_profile: H264Profile, // TODO: handle cases somewhere when target video codec is NOT h264 + h264_level: H264Level, ) { encode_context.set_sample_rate(decode_context.sample_rate); encode_context.set_width(decode_context.width); @@ -451,8 +479,8 @@ fn set_video_codec_par( encode_context.set_sample_aspect_ratio(decode_context.sample_aspect_ratio); // TODO: find a safe way to do this unsafe { - (*encode_context.as_mut_ptr()).profile = ffi::FF_PROFILE_H264_HIGH as i32; - (*encode_context.as_mut_ptr()).level = 41; + (*encode_context.as_mut_ptr()).profile = h264_profile as i32; + (*encode_context.as_mut_ptr()).level = h264_level as i32; } output_stream.set_codecpar(encode_context.extract_codecpar()); } @@ -508,7 +536,17 @@ fn set_subtitle_codec_par( output_stream.set_codecpar(encode_context.extract_codecpar()); } -fn convert_video_file(input_file: &CStr, output_file: &CStr) -> Result<(), anyhow::Error> { +/// Takes input video files and outputs direct-play-compatible video files +fn convert_video_file( + input_file: &CStr, + output_file: &CStr, + target_video_codec: ffi::AVCodecID, + target_audio_codec: ffi::AVCodecID, + min_h264_profile: H264Profile, + min_h264_level: H264Level, + min_fps: u32, + min_resolution: Resolution, +) -> Result<(), anyhow::Error> { let mut input_format_context = AVFormatContextInput::open(input_file, None, &mut None)?; input_format_context.dump(0, input_file)?; @@ -517,10 +555,20 @@ fn convert_video_file(input_file: &CStr, output_file: &CStr) -> Result<(), anyho let mut stream_contexts: Vec = Vec::new(); for stream in input_format_context.streams() { + // TODO: ID streams either unsupported in output container type or without a supported decoder and skip them, producing a warning for each skipped. + let input_codec_type = stream.codecpar().codec_type; + // TODO: implement support for attachments + if input_codec_type == ffi::AVMEDIA_TYPE_ATTACHMENT { + warn!("Warning: Input file contains attachment streams, which may not be handled correctly. Skipping..."); + continue; + } + + let mut output_stream = output_format_context.new_stream(); + let input_stream_codecpar = stream.codecpar(); let input_codec_id = input_stream_codecpar.codec_id; let decoder = AVCodec::find_decoder(input_codec_id) - .with_context(|| anyhow!("decoder ({}) not found.", input_codec_id))?; + .with_context(|| anyhow!("Decoder not found for stream {}.", output_stream.index))?; let mut decode_context = AVCodecContext::new(&decoder); decode_context.apply_codecpar(&input_stream_codecpar)?; decode_context.set_time_base(stream.time_base); // TODO: needed? @@ -529,7 +577,6 @@ fn convert_video_file(input_file: &CStr, output_file: &CStr) -> Result<(), anyho } decode_context.open(None)?; - let mut output_stream = output_format_context.new_stream(); let mut encode_context: AVCodecContext; let media_type: ffi::AVMediaType; let mut frame_buffer: Option = None; @@ -538,17 +585,23 @@ fn convert_video_file(input_file: &CStr, output_file: &CStr) -> Result<(), anyho match decode_context.codec_type { ffi::AVMEDIA_TYPE_VIDEO => { output_stream.set_metadata(stream.metadata().as_deref().cloned()); - let encoder = AVCodec::find_encoder(ffi::AV_CODEC_ID_H264) - .expect("Could not find H264 encoder"); + let encoder = + AVCodec::find_encoder(target_video_codec).expect("Could not find H264 encoder"); encode_context = AVCodecContext::new(&encoder); media_type = ffi::AVMEDIA_TYPE_VIDEO; - set_video_codec_par(&mut decode_context, &mut encode_context, &mut output_stream); + set_video_codec_par( + &mut decode_context, + &mut encode_context, + &mut output_stream, + min_h264_profile, + min_h264_level, + ); } ffi::AVMEDIA_TYPE_AUDIO => { output_stream.set_metadata(stream.metadata().as_deref().cloned()); - let encoder = AVCodec::find_encoder(ffi::AV_CODEC_ID_AAC) - .expect("Could not find AAC encoder"); + let encoder = + AVCodec::find_encoder(target_audio_codec).expect("Could not find AAC encoder"); encode_context = AVCodecContext::new(&encoder); media_type = ffi::AVMEDIA_TYPE_AUDIO; @@ -697,12 +750,52 @@ fn convert_video_file(input_file: &CStr, output_file: &CStr) -> Result<(), anyho } fn main() -> Result<()> { + // FFMPEG TRACE LOGGING // unsafe { - // ffi::av_log_set_level(ffi::AV_LOG_TRACE as i32); // Set the log level to TRACE (most verbose) + // ffi::av_log_set_level(ffi::AV_LOG_TRACE as i32); // } let args = Args::parse(); - convert_video_file(&args.input_file, &args.output_file)?; - Ok(()) + // CLI ARG validation + // TODO: Use clap integration for this? + if args.streaming_devices.is_none() && args.config_file.is_none() { + eprintln!("Error: At least one of --streaming-devices or --config-file is required."); + std::process::exit(1); + } + + // TODO: implement config file + if args.config_file.is_some() { + eprintln!("Error: The --config-file option is not implemented yet."); + std::process::exit(1); + } + + // TODO: add reading of config file + // let config_file = args.config_file.unwrap(); + + // let config_streaming_devices = config::parse_config_from_toml(config_file).unwrap(); + + let cli_arg_streaming_devices = args.streaming_devices.unwrap(); + + let streaming_devices = &cli_arg_streaming_devices; + + let common_video_codec = StreamingDevice::get_common_video_codec(streaming_devices)?; + let common_audio_codec = StreamingDevice::get_common_audio_codec(streaming_devices)?; + let min_h264_profile = StreamingDevice::get_min_h264_profile(streaming_devices)?; + let min_h264_level = StreamingDevice::get_min_h264_level(streaming_devices)?; + let min_fps = StreamingDevice::get_min_fps(streaming_devices)?; + let min_resolution = StreamingDevice::get_min_resolution(streaming_devices)?; + + // TODO: Check if video file is already compatible and skip if it is + + convert_video_file( + &args.input_file, + &args.output_file, + common_video_codec, + common_audio_codec, + min_h264_profile, + min_h264_level, + min_fps, + min_resolution, + ) } diff --git a/src/streaming_devices/mod.rs b/src/streaming_devices/mod.rs new file mode 100644 index 0000000..e8f1fbb --- /dev/null +++ b/src/streaming_devices/mod.rs @@ -0,0 +1,47 @@ +pub(crate) mod streaming_device; + +use rsmpeg::ffi::{self}; +pub use streaming_device::{H264Level, H264Profile, Resolution, StreamingDevice}; + +/// All supported `StreamingDevices` +pub const STREAMING_DEVICES: &[StreamingDevice; 3] = &[ + // Google Cast + // see: https://developers.google.com/cast/docs/media + + // Chromecast (1st gen) + StreamingDevice { + name: "Chromecast (1st gen)", + model: "chromecast_1st_gen", + maker: "Google", + video_codec: [Some(ffi::AV_CODEC_ID_H264), None, None, None, None], + audio_codec: [Some(ffi::AV_CODEC_ID_AAC), None, None, None, None], + max_h264_profile: H264Profile::High, + max_h264_level: H264Level::Level4_1, + max_fps: 30, + max_resolution: Resolution::Resolution1080p, + }, + // Chromecast (2nd gen) + StreamingDevice { + name: "Chromecast (2nd gen)", + model: "chromecast_2nd_gen", + maker: "Google", + video_codec: [Some(ffi::AV_CODEC_ID_H264), None, None, None, None], + audio_codec: [Some(ffi::AV_CODEC_ID_AAC), None, None, None, None], + max_h264_profile: H264Profile::High, + max_h264_level: H264Level::Level4_1, + max_fps: 60, + max_resolution: Resolution::Resolution1080p, + }, + // Chromecast Ultra + StreamingDevice { + name: "Chromecast Ultra", + model: "chromecast_ultra", + maker: "Google", + video_codec: [Some(ffi::AV_CODEC_ID_H264), None, None, None, None], + audio_codec: [Some(ffi::AV_CODEC_ID_AAC), None, None, None, None], + max_h264_profile: H264Profile::High, + max_h264_level: H264Level::Level4_2, + max_fps: 60, + max_resolution: Resolution::Resolution2160p, + }, +]; diff --git a/src/streaming_devices/streaming_device.rs b/src/streaming_devices/streaming_device.rs new file mode 100644 index 0000000..d3f9fc7 --- /dev/null +++ b/src/streaming_devices/streaming_device.rs @@ -0,0 +1,226 @@ +//! Streaming devices and their direct-play specs + +use std::{clone::Clone, cmp::Ordering}; + +use anyhow::{anyhow, bail, Error}; +use rusty_ffmpeg::ffi; +use strum::IntoEnumIterator; +use strum_macros::EnumIter; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Resolution { + Resolution480p, + Resolution720p, + Resolution1080p, + Resolution1440p, + Resolution2160p, +} + +impl Resolution { + fn to_dimensions(&self) -> (u32, u32) { + match self { + Resolution::Resolution480p => (640, 480), + Resolution::Resolution720p => (1280, 720), + Resolution::Resolution1080p => (1920, 1080), + Resolution::Resolution1440p => (2560, 1440), + Resolution::Resolution2160p => (3840, 2160), + } + } + + fn from_resolution(x: u32, y: u32) -> Resolution { + match (x, y) { + (640, 480) => Resolution::Resolution480p, + (1280, 720) => Resolution::Resolution720p, + (1920, 1080) => Resolution::Resolution1080p, + (2560, 1440) => Resolution::Resolution1440p, + (3840, 2160) => Resolution::Resolution2160p, + _ => Resolution::Resolution1080p, // TODO: What should we do if an unsupported resolution is provided? + } + } +} + +#[derive(Debug, Clone, Copy, Eq, Ord, EnumIter)] +pub enum H264Level { + Level1 = 10, + Level1_1 = 11, + Level1_2 = 12, + Level1_3 = 13, + Level2 = 20, + Level2_1 = 21, + Level2_2 = 22, + Level3 = 30, + Level3_1 = 31, + Level3_2 = 32, + Level4 = 40, + Level4_1 = 41, + Level4_2 = 42, + Level5 = 50, + Level5_1 = 51, + Level5_2 = 52, +} + +impl PartialEq for H264Level { + fn eq(&self, other: &Self) -> bool { + (*self as u32) == (*other as u32) + } +} + +impl PartialOrd for H264Level { + fn partial_cmp(&self, other: &Self) -> Option { + Some((*self as u32).cmp(&(*other as u32))) + } +} + +impl TryFrom for H264Level { + type Error = &'static str; + + fn try_from(value: i32) -> Result { + match value { + 10 => Ok(H264Level::Level1), + 11 => Ok(H264Level::Level1_1), + 12 => Ok(H264Level::Level1_2), + 13 => Ok(H264Level::Level1_3), + 20 => Ok(H264Level::Level2), + 21 => Ok(H264Level::Level2_1), + 22 => Ok(H264Level::Level2_2), + 30 => Ok(H264Level::Level3), + 31 => Ok(H264Level::Level3_1), + 32 => Ok(H264Level::Level3_2), + 40 => Ok(H264Level::Level4), + 41 => Ok(H264Level::Level4_1), + 42 => Ok(H264Level::Level4_2), + 50 => Ok(H264Level::Level5), + 51 => Ok(H264Level::Level5_1), + 52 => Ok(H264Level::Level5_2), + _ => Err("Invalid H.264 level value"), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum H264Profile { + Baseline = ffi::FF_PROFILE_H264_BASELINE as isize, + Main = ffi::FF_PROFILE_H264_MAIN as isize, + Extended = ffi::FF_PROFILE_H264_EXTENDED as isize, + High = ffi::FF_PROFILE_H264_HIGH as isize, + High10 = ffi::FF_PROFILE_H264_HIGH_10 as isize, + High422 = ffi::FF_PROFILE_H264_HIGH_422 as isize, + High444 = ffi::FF_PROFILE_H264_HIGH_444 as isize, +} + +#[derive(Clone, Debug)] +pub struct StreamingDevice { + pub name: &'static str, + pub max_h264_profile: H264Profile, + pub max_h264_level: H264Level, + pub max_fps: u32, + pub max_resolution: Resolution, + pub model: &'static str, + pub maker: &'static str, + pub audio_codec: [Option; 5], + pub video_codec: [Option; 5], +} + +impl StreamingDevice { + /// Finds the intersection of video codecs among all `StreamingDevice`'s + pub fn get_common_video_codec( + devices: &Vec<&StreamingDevice>, + ) -> Result { + let mut common_codecs: Vec = devices[0] + .video_codec + .iter() + .filter_map(|codec_id| *codec_id) + .collect(); + + for device in &devices[1..] { + common_codecs.retain(|codec_id| device.video_codec.contains(&Some(*codec_id))); + } + + common_codecs.into_iter().next().ok_or_else(|| { + anyhow!("No common video codec found among the provided streaming devices.") + }) + } + + /// Finds the intersection of audio codecs among all `StreamingDevice`'s + pub fn get_common_audio_codec( + devices: &Vec<&StreamingDevice>, + ) -> Result { + let mut common_codecs: Vec = devices[0] + .audio_codec + .iter() + .filter_map(|codec_id| *codec_id) + .collect(); + + for device in &devices[1..] { + common_codecs.retain(|codec_id| device.audio_codec.contains(&Some(*codec_id))); + } + + common_codecs.into_iter().next().ok_or_else(|| { + anyhow!("No common audio codec found among the provided streaming devices.") + }) + } + + /// Gets the minimum H.264 profile level among all devices + pub fn get_min_h264_profile( + devices: &Vec<&StreamingDevice>, + ) -> Result { + let mut min_profile = H264Profile::High444; // TODO: implement more intelligent max method + + for device in devices { + if device.max_h264_profile < min_profile { + min_profile = device.max_h264_profile; + } + } + + Ok(min_profile) + } + + /// Gets the minimum H.264 level among all `StreamingDevice`'s + pub fn get_min_h264_level(devices: &Vec<&StreamingDevice>) -> Result { + let mut min_level = H264Level::iter().max_by_key(|level| *level as i32).unwrap(); + + for device in devices { + if device.max_h264_level < min_level { + min_level = device.max_h264_level; + } + } + + Ok(min_level) + } + + /// Gets the minimum FPS among all `StreamingDevice`'s + pub fn get_min_fps(devices: &Vec<&StreamingDevice>) -> Result { + let mut min_fps = u32::MAX; + + for device in devices { + if device.max_fps < min_fps as u32 { + min_fps = device.max_fps as u32; + } + } + + Ok(min_fps) + } + + /// Gets the minimum resolution of all provided `StreamingDevice`'s + pub fn get_min_resolution(devices: &Vec<&StreamingDevice>) -> Result { + let mut min_res = (u32::MAX, u32::MAX); + + for device in devices { + let res = device.max_resolution.to_dimensions(); + if res.0 < min_res.0 || res.1 < min_res.1 { + min_res = res; + } + } + + // TODO: Better safety feature to avoid undesired resolutions? + if min_res.0 < 1 || min_res.1 < 1 { + bail!( + "Target resolution ({}, {}) too small. Exiting...", + min_res.0, + min_res.1 + ); + } + + Ok(Resolution::from_resolution(min_res.0, min_res.1)) + } +}