From bfb5cd4d6a52e475514ba82024591c56cb06db54 Mon Sep 17 00:00:00 2001 From: Claude Date: Thu, 5 Feb 2026 18:03:05 +0000 Subject: [PATCH 1/5] Add cargo xtask as alternative to just This adds a pure Rust task runner using the cargo-xtask pattern, which requires no additional dependencies beyond Rust itself. Commands are now more expressive with proper subcommands: - `cargo xtask pub fmp4 bbb` instead of `just pub bbb` - `cargo xtask pub hls bbb` instead of `just pub-hls bbb` - `cargo xtask auth key` instead of `just auth-key` - `cargo xtask serve hls bbb` instead of `just serve-hls bbb` All existing just commands are supported via xtask equivalents. https://claude.ai/code/session_01VVPt25Sduerr9A8aFvRXtt --- .cargo/config.toml | 3 + Cargo.lock | 7 + Cargo.toml | 1 + rs/xtask/Cargo.toml | 8 + rs/xtask/src/main.rs | 1331 ++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 1350 insertions(+) create mode 100644 rs/xtask/Cargo.toml create mode 100644 rs/xtask/src/main.rs diff --git a/.cargo/config.toml b/.cargo/config.toml index bff29e6e1..488284fb6 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,2 +1,5 @@ [build] rustflags = ["--cfg", "tokio_unstable"] + +[alias] +xtask = "run --package xtask --" diff --git a/Cargo.lock b/Cargo.lock index d8b09acfc..f7d6409a9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6006,6 +6006,13 @@ dependencies = [ "xml-rs", ] +[[package]] +name = "xtask" +version = "0.1.0" +dependencies = [ + "clap", +] + [[package]] name = "yasna" version = "0.5.2" diff --git a/Cargo.toml b/Cargo.toml index a615d694c..719cd5b91 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,6 +10,7 @@ members = [ "rs/moq-relay", "rs/moq-token", "rs/moq-token-cli", + "rs/xtask", ] resolver = "2" diff --git a/rs/xtask/Cargo.toml b/rs/xtask/Cargo.toml new file mode 100644 index 000000000..b16a6252a --- /dev/null +++ b/rs/xtask/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "xtask" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +clap = { version = "4", features = ["derive"] } diff --git a/rs/xtask/src/main.rs b/rs/xtask/src/main.rs new file mode 100644 index 000000000..4ce0b5ecd --- /dev/null +++ b/rs/xtask/src/main.rs @@ -0,0 +1,1331 @@ +//! xtask - Development task runner for MoQ +//! +//! Run with: `cargo xtask ` +//! +//! This replaces the justfile with a pure Rust solution. + +use std::{ + env, fs, + io::IsTerminal, + path::{Path, PathBuf}, + process::{Command, ExitCode, Stdio}, +}; + +use clap::{Parser, Subcommand}; + +/// MoQ development task runner +#[derive(Parser)] +#[command(name = "xtask", about = "MoQ development task runner")] +struct Cli { + #[command(subcommand)] + command: Commands, +} + +#[derive(Subcommand)] +enum Commands { + /// Install dependencies + Install, + + /// Run all development services (relay, web server, publish bbb) + Dev, + + /// Run a localhost relay server without authentication + Relay { + /// Additional arguments to pass to moq-relay + #[arg(trailing_var_arg = true)] + args: Vec, + }, + + /// Run a cluster of relay servers + Cluster, + + /// Run a localhost root server + Root, + + /// Run a localhost leaf server + Leaf, + + /// Authentication commands + Auth { + #[command(subcommand)] + command: AuthCommands, + }, + + /// Download test videos + Download { + /// Video name (bbb, tos, av1, hevc) + name: String, + }, + + /// Publish media to a relay + Pub { + #[command(subcommand)] + command: PubCommands, + }, + + /// Serve media directly (without relay) + Serve { + #[command(subcommand)] + command: ServeCommands, + }, + + /// Run the web development server + Web { + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + }, + + /// Clock broadcast commands + Clock { + /// Action: publish or subscribe + action: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + /// Additional arguments + #[arg(trailing_var_arg = true)] + args: Vec, + }, + + /// Run CI checks + Check { + #[command(subcommand)] + command: Option, + }, + + /// Run tests + Test { + #[command(subcommand)] + command: Option, + }, + + /// Auto-fix linting issues + Fix, + + /// Build all packages + Build, + + /// Upgrade dependencies + Update, + + /// Tokio console commands + Console { + #[command(subcommand)] + command: ConsoleCommands, + }, + + /// Serve documentation locally + Doc, + + /// Throttle UDP traffic for testing (macOS only) + Throttle, +} + +#[derive(Subcommand)] +enum AuthCommands { + /// Generate a random secret key for authentication + Key, + /// Generate authentication tokens for local development + Token, +} + +#[derive(Subcommand)] +enum PubCommands { + /// Publish using fMP4 format (default) + Fmp4 { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + /// Additional arguments + #[arg(trailing_var_arg = true)] + args: Vec, + }, + /// Publish using HLS format + Hls { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + relay: String, + }, + /// Publish using H.264 Annex B format + H264 { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + /// Additional arguments + #[arg(trailing_var_arg = true)] + args: Vec, + }, + /// Publish using Iroh transport + Iroh { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Relay URL + url: String, + /// Prefix for the broadcast name + #[arg(default_value = "")] + prefix: String, + }, + /// Publish using GStreamer (deprecated) + Gst { + /// Video name + name: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + }, +} + +#[derive(Subcommand)] +enum ServeCommands { + /// Serve using fMP4 format + Fmp4 { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Additional arguments (e.g., --iroh-enabled) + #[arg(trailing_var_arg = true)] + args: Vec, + }, + /// Generate and serve an HLS stream for testing + Hls { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// HTTP server port + #[arg(default_value = "8000")] + port: String, + }, +} + +#[derive(Subcommand)] +enum CheckCommands { + /// Run comprehensive checks including all feature combinations + All, +} + +#[derive(Subcommand)] +enum TestCommands { + /// Run comprehensive tests including all feature combinations + All, +} + +#[derive(Subcommand)] +enum ConsoleCommands { + /// Connect to the relay server (port 6680) + Relay, + /// Connect to the publisher (port 6681) + Pub, +} + +fn main() -> ExitCode { + let cli = Cli::parse(); + + let result = match cli.command { + Commands::Install => install(), + Commands::Dev => dev(), + Commands::Relay { args } => relay(&args), + Commands::Cluster => cluster(), + Commands::Root => root(), + Commands::Leaf => leaf(), + Commands::Auth { command } => match command { + AuthCommands::Key => auth_key(), + AuthCommands::Token => auth_token(), + }, + Commands::Download { name } => download(&name), + Commands::Pub { command } => match command { + PubCommands::Fmp4 { name, url, args } => pub_fmp4(&name, &url, &args), + PubCommands::Hls { name, relay } => pub_hls(&name, &relay), + PubCommands::H264 { name, url, args } => pub_h264(&name, &url, &args), + PubCommands::Iroh { name, url, prefix } => pub_iroh(&name, &url, &prefix), + PubCommands::Gst { name: _, url: _ } => pub_gst(), + }, + Commands::Serve { command } => match command { + ServeCommands::Fmp4 { name, args } => serve_fmp4(&name, &args), + ServeCommands::Hls { name, port } => serve_hls(&name, &port), + }, + Commands::Web { url } => web(&url), + Commands::Clock { action, url, args } => clock(&action, &url, &args), + Commands::Check { command } => match command { + Some(CheckCommands::All) => check_all(), + None => check(), + }, + Commands::Test { command } => match command { + Some(TestCommands::All) => test_all(), + None => test(), + }, + Commands::Fix => fix(), + Commands::Build => build(), + Commands::Update => update(), + Commands::Console { command } => match command { + ConsoleCommands::Relay => console_relay(), + ConsoleCommands::Pub => console_pub(), + }, + Commands::Doc => doc(), + Commands::Throttle => throttle(), + }; + + match result { + Ok(()) => ExitCode::SUCCESS, + Err(e) => { + eprintln!("Error: {e}"); + ExitCode::FAILURE + } + } +} + +// Helper functions + +fn project_root() -> PathBuf { + Path::new(env!("CARGO_MANIFEST_DIR")) + .parent() + .expect("xtask should be in rs/xtask") + .parent() + .expect("rs should have a parent") + .to_path_buf() +} + +fn run(program: &str, args: &[&str]) -> Result<(), String> { + let status = Command::new(program) + .args(args) + .current_dir(project_root()) + .status() + .map_err(|e| format!("Failed to run {program}: {e}"))?; + + if status.success() { + Ok(()) + } else { + Err(format!("{program} failed with status: {status}")) + } +} + +fn run_with_env(program: &str, args: &[&str], env: &[(&str, &str)]) -> Result<(), String> { + let mut cmd = Command::new(program); + cmd.args(args).current_dir(project_root()); + for (k, v) in env { + cmd.env(k, v); + } + let status = cmd + .status() + .map_err(|e| format!("Failed to run {program}: {e}"))?; + + if status.success() { + Ok(()) + } else { + Err(format!("{program} failed with status: {status}")) + } +} + +fn run_in_dir(program: &str, args: &[&str], dir: &Path) -> Result<(), String> { + let status = Command::new(program) + .args(args) + .current_dir(dir) + .status() + .map_err(|e| format!("Failed to run {program}: {e}"))?; + + if status.success() { + Ok(()) + } else { + Err(format!("{program} failed with status: {status}")) + } +} + +fn cargo() -> String { + env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()) +} + +fn is_tty() -> bool { + std::io::stdout().is_terminal() +} + +fn file_exists(path: &Path) -> bool { + path.exists() +} + +fn command_exists(cmd: &str) -> bool { + Command::new("which") + .arg(cmd) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + .map(|s| s.success()) + .unwrap_or(false) +} + +// Command implementations + +fn install() -> Result<(), String> { + run("bun", &["install"])?; + run( + &cargo(), + &[ + "install", + "--locked", + "cargo-shear", + "cargo-sort", + "cargo-upgrades", + "cargo-edit", + "cargo-hack", + ], + ) +} + +fn dev() -> Result<(), String> { + run("bun", &["install"])?; + run(&cargo(), &["build"])?; + run( + "bun", + &[ + "run", + "concurrently", + "--kill-others", + "--names", + "srv,bbb,web", + "--prefix-colors", + "auto", + "cargo xtask relay", + "sleep 1 && cargo xtask pub fmp4 bbb http://localhost:4443/anon", + "sleep 2 && cargo xtask web http://localhost:4443/anon", + ], + ) +} + +fn relay(args: &[String]) -> Result<(), String> { + let mut cmd_args = vec![ + "run", + "--bin", + "moq-relay", + "--", + "dev/relay.toml", + ]; + let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect(); + cmd_args.extend(args_refs); + + run_with_env( + &cargo(), + &cmd_args, + &[("TOKIO_CONSOLE_BIND", "127.0.0.1:6680")], + ) +} + +fn cluster() -> Result<(), String> { + run("bun", &["install"])?; + auth_token()?; + run(&cargo(), &["build", "--bin", "moq-relay"])?; + + // Read JWT tokens + let root = project_root(); + let demo_cli_jwt = fs::read_to_string(root.join("dev/demo-cli.jwt")) + .map_err(|e| format!("Failed to read demo-cli.jwt: {e}"))? + .trim() + .to_string(); + let demo_web_jwt = fs::read_to_string(root.join("dev/demo-web.jwt")) + .map_err(|e| format!("Failed to read demo-web.jwt: {e}"))? + .trim() + .to_string(); + + run( + "bun", + &[ + "run", + "concurrently", + "--kill-others", + "--names", + "root,leaf,bbb,tos,web", + "--prefix-colors", + "auto", + "cargo xtask root", + "sleep 1 && cargo xtask leaf", + &format!( + "sleep 2 && cargo xtask pub fmp4 bbb 'http://localhost:4444/demo?jwt={demo_cli_jwt}'" + ), + &format!( + "sleep 3 && cargo xtask pub fmp4 tos 'http://localhost:4443/demo?jwt={demo_cli_jwt}'" + ), + &format!( + "sleep 4 && cargo xtask web 'http://localhost:4443/demo?jwt={demo_web_jwt}'" + ), + ], + ) +} + +fn root() -> Result<(), String> { + auth_key()?; + run(&cargo(), &["run", "--bin", "moq-relay", "--", "dev/root.toml"]) +} + +fn leaf() -> Result<(), String> { + auth_token()?; + run(&cargo(), &["run", "--bin", "moq-relay", "--", "dev/leaf.toml"]) +} + +fn auth_key() -> Result<(), String> { + let root = project_root(); + let key_path = root.join("dev/root.jwk"); + + if !file_exists(&key_path) { + // Remove any existing JWT files + for entry in fs::read_dir(root.join("dev")).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + if entry.path().extension().map(|e| e == "jwt").unwrap_or(false) { + fs::remove_file(entry.path()).map_err(|e| e.to_string())?; + } + } + run( + &cargo(), + &["run", "--bin", "moq-token", "--", "--key", "dev/root.jwk", "generate"], + )?; + } + Ok(()) +} + +fn auth_token() -> Result<(), String> { + auth_key()?; + + let root = project_root(); + + // Generate demo-web.jwt + if !file_exists(&root.join("dev/demo-web.jwt")) { + let output = Command::new(cargo()) + .args([ + "run", + "--quiet", + "--bin", + "moq-token", + "--", + "--key", + "dev/root.jwk", + "sign", + "--root", + "demo", + "--subscribe", + "", + "--publish", + "me", + ]) + .current_dir(&root) + .output() + .map_err(|e| format!("Failed to run moq-token: {e}"))?; + + if !output.status.success() { + return Err(format!( + "moq-token failed: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + fs::write(root.join("dev/demo-web.jwt"), output.stdout) + .map_err(|e| format!("Failed to write demo-web.jwt: {e}"))?; + } + + // Generate demo-cli.jwt + if !file_exists(&root.join("dev/demo-cli.jwt")) { + let output = Command::new(cargo()) + .args([ + "run", + "--quiet", + "--bin", + "moq-token", + "--", + "--key", + "dev/root.jwk", + "sign", + "--root", + "demo", + "--publish", + "", + ]) + .current_dir(&root) + .output() + .map_err(|e| format!("Failed to run moq-token: {e}"))?; + + if !output.status.success() { + return Err(format!( + "moq-token failed: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + fs::write(root.join("dev/demo-cli.jwt"), output.stdout) + .map_err(|e| format!("Failed to write demo-cli.jwt: {e}"))?; + } + + // Generate root.jwt + if !file_exists(&root.join("dev/root.jwt")) { + let output = Command::new(cargo()) + .args([ + "run", + "--quiet", + "--bin", + "moq-token", + "--", + "--key", + "dev/root.jwk", + "sign", + "--root", + "", + "--subscribe", + "", + "--publish", + "", + "--cluster", + ]) + .current_dir(&root) + .output() + .map_err(|e| format!("Failed to run moq-token: {e}"))?; + + if !output.status.success() { + return Err(format!( + "moq-token failed: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + fs::write(root.join("dev/root.jwt"), output.stdout) + .map_err(|e| format!("Failed to write root.jwt: {e}"))?; + } + + Ok(()) +} + +fn download_url(name: &str) -> Result<&'static str, String> { + match name { + "bbb" => Ok("http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4"), + "tos" => Ok("http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/TearsOfSteel.mp4"), + "av1" => Ok("http://download.opencontent.netflix.com.s3.amazonaws.com/AV1/Sparks/Sparks-5994fps-AV1-10bit-1920x1080-2194kbps.mp4"), + "hevc" => Ok("https://test-videos.co.uk/vids/jellyfish/mp4/h265/1080/Jellyfish_1080_10s_30MB.mp4"), + _ => Err(format!("Unknown video name: {name}. Use: bbb, tos, av1, hevc")), + } +} + +fn download(name: &str) -> Result<(), String> { + let root = project_root(); + let mp4_path = root.join(format!("dev/{name}.mp4")); + let fmp4_path = root.join(format!("dev/{name}.fmp4")); + + // Download if not exists + if !file_exists(&mp4_path) { + let url = download_url(name)?; + println!("Downloading {name}.mp4..."); + run("curl", &["-fsSL", url, "-o", mp4_path.to_str().unwrap()])?; + } + + // Convert to fmp4 if not exists + if !file_exists(&fmp4_path) { + println!("Converting to fragmented MP4..."); + run( + "ffmpeg", + &[ + "-loglevel", + "error", + "-i", + mp4_path.to_str().unwrap(), + "-c:v", + "copy", + "-f", + "mp4", + "-movflags", + "cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame", + fmp4_path.to_str().unwrap(), + ], + )?; + } + + Ok(()) +} + +fn ffmpeg_cmaf(input: &str) -> Command { + let mut cmd = Command::new("ffmpeg"); + cmd.args([ + "-hide_banner", + "-v", + "quiet", + "-stream_loop", + "-1", + "-re", + "-i", + input, + "-c", + "copy", + "-f", + "mp4", + "-movflags", + "cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame", + "-", + ]) + .current_dir(project_root()) + .stdout(Stdio::piped()); + cmd +} + +fn pub_fmp4(name: &str, url: &str, args: &[String]) -> Result<(), String> { + download(name)?; + run(&cargo(), &["build", "--bin", "moq"])?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.fmp4")); + + let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); + + let mut moq_args = vec![ + "run".to_string(), + "--bin".to_string(), + "moq".to_string(), + "--".to_string(), + ]; + moq_args.extend(args.iter().cloned()); + moq_args.extend([ + "publish".to_string(), + "--url".to_string(), + url.to_string(), + "--name".to_string(), + name.to_string(), + "fmp4".to_string(), + ]); + + let status = Command::new(cargo()) + .args(&moq_args) + .current_dir(&root) + .stdin(ffmpeg_stdout) + .status() + .map_err(|e| format!("Failed to run moq: {e}"))?; + + ffmpeg.wait().ok(); + + if status.success() { + Ok(()) + } else { + Err(format!("moq failed with status: {status}")) + } +} + +fn pub_hls(name: &str, relay: &str) -> Result<(), String> { + download(name)?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.mp4")); + let out_dir = root.join(format!("dev/{name}")); + + // Clean and create output directory + if out_dir.exists() { + fs::remove_dir_all(&out_dir).map_err(|e| e.to_string())?; + } + fs::create_dir_all(&out_dir).map_err(|e| e.to_string())?; + + println!(">>> Generating HLS stream to disk (1280x720 + 256x144)..."); + + // Start ffmpeg in the background + let mut ffmpeg = Command::new("ffmpeg") + .args([ + "-hide_banner", + "-loglevel", + "warning", + "-re", + "-stream_loop", + "-1", + "-i", + input.to_str().unwrap(), + "-filter_complex", + "[0:v]split=2[v0][v1];[v0]scale=-2:720[v720];[v1]scale=-2:144[v144]", + "-map", + "[v720]", + "-map", + "[v144]", + "-map", + "0:a:0", + "-r", + "25", + "-preset", + "veryfast", + "-g", + "50", + "-keyint_min", + "50", + "-sc_threshold", + "0", + "-c:v:0", + "libx264", + "-profile:v:0", + "high", + "-level:v:0", + "4.1", + "-pix_fmt:v:0", + "yuv420p", + "-tag:v:0", + "avc1", + "-b:v:0", + "4M", + "-maxrate:v:0", + "4.4M", + "-bufsize:v:0", + "8M", + "-c:v:1", + "libx264", + "-profile:v:1", + "high", + "-level:v:1", + "4.1", + "-pix_fmt:v:1", + "yuv420p", + "-tag:v:1", + "avc1", + "-b:v:1", + "300k", + "-maxrate:v:1", + "330k", + "-bufsize:v:1", + "600k", + "-c:a", + "aac", + "-b:a", + "128k", + "-f", + "hls", + "-hls_time", + "2", + "-hls_list_size", + "6", + "-hls_flags", + "independent_segments+delete_segments", + "-hls_segment_type", + "fmp4", + "-master_pl_name", + "master.m3u8", + "-var_stream_map", + "v:0,agroup:audio,name:720 v:1,agroup:audio,name:144 a:0,agroup:audio,name:audio", + "-hls_segment_filename", + &format!("{}/v%v/segment_%09d.m4s", out_dir.display()), + &format!("{}/v%v/stream.m3u8", out_dir.display()), + ]) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + // Wait for master playlist + println!(">>> Waiting for HLS playlist generation..."); + let master_path = out_dir.join("master.m3u8"); + for _ in 0..60 { + if file_exists(&master_path) { + break; + } + std::thread::sleep(std::time::Duration::from_millis(500)); + } + + if !file_exists(&master_path) { + ffmpeg.kill().ok(); + return Err("master.m3u8 not generated in time".to_string()); + } + + // Wait for variant playlists + println!(">>> Waiting for variant playlists..."); + std::thread::sleep(std::time::Duration::from_secs(2)); + + // Run moq to ingest from local files + println!(">>> Running with --passthrough flag"); + let moq_result = run( + &cargo(), + &[ + "run", + "--bin", + "moq", + "--", + "publish", + "--url", + relay, + "--name", + name, + "hls", + "--playlist", + &format!("{}/master.m3u8", out_dir.display()), + "--passthrough", + ], + ); + + ffmpeg.kill().ok(); + ffmpeg.wait().ok(); + + moq_result +} + +fn pub_h264(name: &str, url: &str, args: &[String]) -> Result<(), String> { + download(name)?; + run(&cargo(), &["build", "--bin", "moq"])?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.fmp4")); + + let mut ffmpeg = Command::new("ffmpeg") + .args([ + "-hide_banner", + "-v", + "quiet", + "-stream_loop", + "-1", + "-re", + "-i", + input.to_str().unwrap(), + "-c:v", + "copy", + "-an", + "-bsf:v", + "h264_mp4toannexb", + "-f", + "h264", + "-", + ]) + .current_dir(&root) + .stdout(Stdio::piped()) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); + + let mut moq_args = vec![ + "run".to_string(), + "--bin".to_string(), + "moq".to_string(), + "--".to_string(), + ]; + moq_args.extend(args.iter().cloned()); + moq_args.extend([ + "publish".to_string(), + "--url".to_string(), + url.to_string(), + "--name".to_string(), + name.to_string(), + "--format".to_string(), + "annex-b".to_string(), + ]); + + let status = Command::new(cargo()) + .args(&moq_args) + .current_dir(&root) + .stdin(ffmpeg_stdout) + .status() + .map_err(|e| format!("Failed to run moq: {e}"))?; + + ffmpeg.wait().ok(); + + if status.success() { + Ok(()) + } else { + Err(format!("moq failed with status: {status}")) + } +} + +fn pub_iroh(name: &str, url: &str, prefix: &str) -> Result<(), String> { + download(name)?; + run(&cargo(), &["build", "--bin", "moq"])?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.fmp4")); + let broadcast_name = format!("{prefix}{name}"); + + let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); + + let status = Command::new(cargo()) + .args([ + "run", + "--bin", + "moq", + "--", + "--iroh-enabled", + "publish", + "--url", + url, + "--name", + &broadcast_name, + "fmp4", + ]) + .current_dir(&root) + .stdin(ffmpeg_stdout) + .status() + .map_err(|e| format!("Failed to run moq: {e}"))?; + + ffmpeg.wait().ok(); + + if status.success() { + Ok(()) + } else { + Err(format!("moq failed with status: {status}")) + } +} + +fn pub_gst() -> Result<(), String> { + println!("GStreamer plugin has moved to: https://github.com/moq-dev/gstreamer"); + println!("Install and use hang-gst directly for GStreamer functionality"); + Ok(()) +} + +fn serve_fmp4(name: &str, args: &[String]) -> Result<(), String> { + download(name)?; + run(&cargo(), &["build", "--bin", "moq"])?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.fmp4")); + + let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); + + let mut moq_args = vec![ + "run".to_string(), + "--bin".to_string(), + "moq".to_string(), + "--".to_string(), + ]; + moq_args.extend(args.iter().cloned()); + moq_args.extend([ + "serve".to_string(), + "--listen".to_string(), + "[::]:4443".to_string(), + "--tls-generate".to_string(), + "localhost".to_string(), + "--name".to_string(), + name.to_string(), + "fmp4".to_string(), + ]); + + let status = Command::new(cargo()) + .args(&moq_args) + .current_dir(&root) + .stdin(ffmpeg_stdout) + .status() + .map_err(|e| format!("Failed to run moq: {e}"))?; + + ffmpeg.wait().ok(); + + if status.success() { + Ok(()) + } else { + Err(format!("moq failed with status: {status}")) + } +} + +fn serve_hls(name: &str, port: &str) -> Result<(), String> { + download(name)?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.mp4")); + let out_dir = root.join(format!("dev/{name}")); + + // Clean and create output directory + if out_dir.exists() { + fs::remove_dir_all(&out_dir).map_err(|e| e.to_string())?; + } + fs::create_dir_all(&out_dir).map_err(|e| e.to_string())?; + + println!(">>> Starting HLS stream generation..."); + println!(">>> Master playlist: http://localhost:{port}/master.m3u8"); + + // Start ffmpeg + let mut ffmpeg = Command::new("ffmpeg") + .args([ + "-loglevel", + "warning", + "-re", + "-stream_loop", + "-1", + "-i", + input.to_str().unwrap(), + "-map", + "0:v:0", + "-map", + "0:v:0", + "-map", + "0:a:0", + "-r", + "25", + "-preset", + "veryfast", + "-g", + "50", + "-keyint_min", + "50", + "-sc_threshold", + "0", + "-c:v:0", + "libx264", + "-profile:v:0", + "high", + "-level:v:0", + "4.1", + "-pix_fmt:v:0", + "yuv420p", + "-tag:v:0", + "avc1", + "-bsf:v:0", + "dump_extra", + "-b:v:0", + "4M", + "-vf:0", + "scale=1920:-2", + "-c:v:1", + "libx264", + "-profile:v:1", + "high", + "-level:v:1", + "4.1", + "-pix_fmt:v:1", + "yuv420p", + "-tag:v:1", + "avc1", + "-bsf:v:1", + "dump_extra", + "-b:v:1", + "300k", + "-vf:1", + "scale=256:-2", + "-c:a", + "aac", + "-b:a", + "128k", + "-f", + "hls", + "-hls_time", + "2", + "-hls_list_size", + "12", + "-hls_flags", + "independent_segments+delete_segments", + "-hls_segment_type", + "fmp4", + "-master_pl_name", + "master.m3u8", + "-var_stream_map", + "v:0,agroup:audio v:1,agroup:audio a:0,agroup:audio", + "-hls_segment_filename", + &format!("{}/v%v/segment_%09d.m4s", out_dir.display()), + &format!("{}/v%v/stream.m3u8", out_dir.display()), + ]) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + std::thread::sleep(std::time::Duration::from_secs(2)); + println!(">>> HTTP server: http://localhost:{port}/"); + + let http_result = run_in_dir( + "python3", + &["-m", "http.server", port], + &out_dir, + ); + + ffmpeg.kill().ok(); + ffmpeg.wait().ok(); + + http_result +} + +fn web(url: &str) -> Result<(), String> { + let root = project_root(); + let demo_dir = root.join("js/hang-demo"); + + let mut cmd = Command::new("bun"); + cmd.args(["run", "dev"]) + .current_dir(&demo_dir) + .env("VITE_RELAY_URL", url); + + let status = cmd.status().map_err(|e| format!("Failed to run bun: {e}"))?; + + if status.success() { + Ok(()) + } else { + Err(format!("bun failed with status: {status}")) + } +} + +fn clock(action: &str, url: &str, args: &[String]) -> Result<(), String> { + if action != "publish" && action != "subscribe" { + return Err(format!( + "action must be 'publish' or 'subscribe', got '{action}'" + )); + } + + let mut cmd_args = vec![ + "run", + "--bin", + "moq-clock", + "--", + "--url", + url, + "--broadcast", + "clock", + ]; + let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect(); + cmd_args.extend(args_refs); + cmd_args.push(action); + + run(&cargo(), &cmd_args) +} + +fn check() -> Result<(), String> { + // JS checks + run("bun", &["install", "--frozen-lockfile"])?; + if is_tty() { + run("bun", &["run", "--filter=*", "--elide-lines=0", "check"])?; + } else { + run("bun", &["run", "--filter=*", "check"])?; + } + run("bun", &["biome", "check"])?; + + // Rust checks + run(&cargo(), &["check", "--all-targets", "--all-features"])?; + run( + &cargo(), + &[ + "clippy", + "--all-targets", + "--all-features", + "--", + "-D", + "warnings", + ], + )?; + run(&cargo(), &["fmt", "--all", "--check"])?; + + // Documentation warnings + run_with_env( + &cargo(), + &["doc", "--no-deps", "--workspace"], + &[("RUSTDOCFLAGS", "-D warnings")], + )?; + + // cargo-shear + run(&cargo(), &["shear"])?; + + // cargo-sort + run(&cargo(), &["sort", "--workspace", "--check"])?; + + // tofu checks (if available) + if command_exists("tofu") { + let root = project_root(); + run_in_dir("tofu", &["fmt", "-check", "-recursive"], &root.join("cdn"))?; + } + + // nix checks (if available) + if command_exists("nix") { + run("nix", &["flake", "check"])?; + } + + Ok(()) +} + +fn check_all() -> Result<(), String> { + check()?; + + println!("Checking all feature combinations for hang..."); + run( + &cargo(), + &[ + "hack", + "check", + "--package", + "hang", + "--each-feature", + "--no-dev-deps", + ], + ) +} + +fn test() -> Result<(), String> { + // JS tests + run("bun", &["install", "--frozen-lockfile"])?; + if is_tty() { + run("bun", &["run", "--filter=*", "--elide-lines=0", "test"])?; + } else { + run("bun", &["run", "--filter=*", "test"])?; + } + + // Rust tests + run(&cargo(), &["test", "--all-targets", "--all-features"]) +} + +fn test_all() -> Result<(), String> { + test()?; + + println!("Testing all feature combinations for hang..."); + run( + &cargo(), + &["hack", "test", "--package", "hang", "--each-feature"], + ) +} + +fn fix() -> Result<(), String> { + // JS fixes + run("bun", &["install"])?; + run("bun", &["biome", "check", "--write"])?; + + // Rust fixes + run( + &cargo(), + &[ + "clippy", + "--fix", + "--allow-staged", + "--allow-dirty", + "--all-targets", + "--all-features", + ], + )?; + run(&cargo(), &["fmt", "--all"])?; + + // cargo-shear + run(&cargo(), &["shear", "--fix"])?; + + // cargo-sort + run(&cargo(), &["sort", "--workspace"])?; + + // tofu fixes (if available) + if command_exists("tofu") { + let root = project_root(); + run_in_dir("tofu", &["fmt", "-recursive"], &root.join("cdn"))?; + } + + Ok(()) +} + +fn build() -> Result<(), String> { + run("bun", &["run", "--filter=*", "build"])?; + run(&cargo(), &["build"]) +} + +fn update() -> Result<(), String> { + run("bun", &["update"])?; + run("bun", &["outdated"])?; + + // Update patch versions + run(&cargo(), &["update"])?; + + // Update incompatible versions + run(&cargo(), &["upgrade", "--incompatible"])?; + + // Update nix flake + run("nix", &["flake", "update"]) +} + +fn console_relay() -> Result<(), String> { + run("tokio-console", &["http://127.0.0.1:6680"]) +} + +fn console_pub() -> Result<(), String> { + run("tokio-console", &["http://127.0.0.1:6681"]) +} + +fn doc() -> Result<(), String> { + let root = project_root(); + run_in_dir("bun", &["run", "dev"], &root.join("doc")) +} + +fn throttle() -> Result<(), String> { + run("dev/throttle", &[]) +} From 1fbef5767c63b019297b2927489d0b6b11c3066b Mon Sep 17 00:00:00 2001 From: Claude Date: Thu, 5 Feb 2026 18:14:33 +0000 Subject: [PATCH 2/5] Rename xtask to x and add shell script - Renamed rs/xtask to rs/x for brevity - Added ./x shell script for easy invocation - Now run tasks with `./x check` or `cargo x check` https://claude.ai/code/session_01VVPt25Sduerr9A8aFvRXtt --- .cargo/config.toml | 2 +- Cargo.lock | 14 +++++++------- Cargo.toml | 2 +- rs/{xtask => x}/Cargo.toml | 2 +- rs/{xtask => x}/src/main.rs | 6 +++--- x | 4 ++++ 6 files changed, 17 insertions(+), 13 deletions(-) rename rs/{xtask => x}/Cargo.toml (89%) rename rs/{xtask => x}/src/main.rs (99%) create mode 100755 x diff --git a/.cargo/config.toml b/.cargo/config.toml index 488284fb6..3f726677c 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -2,4 +2,4 @@ rustflags = ["--cfg", "tokio_unstable"] [alias] -xtask = "run --package xtask --" +x = "run --package x --" diff --git a/Cargo.lock b/Cargo.lock index f7d6409a9..829c56b8c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5972,6 +5972,13 @@ dependencies = [ "web-sys", ] +[[package]] +name = "x" +version = "0.1.0" +dependencies = [ + "clap", +] + [[package]] name = "x509-parser" version = "0.18.0" @@ -6006,13 +6013,6 @@ dependencies = [ "xml-rs", ] -[[package]] -name = "xtask" -version = "0.1.0" -dependencies = [ - "clap", -] - [[package]] name = "yasna" version = "0.5.2" diff --git a/Cargo.toml b/Cargo.toml index 719cd5b91..427fd5505 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ members = [ "rs/moq-relay", "rs/moq-token", "rs/moq-token-cli", - "rs/xtask", + "rs/x", ] resolver = "2" diff --git a/rs/xtask/Cargo.toml b/rs/x/Cargo.toml similarity index 89% rename from rs/xtask/Cargo.toml rename to rs/x/Cargo.toml index b16a6252a..19178b1d8 100644 --- a/rs/xtask/Cargo.toml +++ b/rs/x/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "xtask" +name = "x" version = "0.1.0" edition = "2021" publish = false diff --git a/rs/xtask/src/main.rs b/rs/x/src/main.rs similarity index 99% rename from rs/xtask/src/main.rs rename to rs/x/src/main.rs index 4ce0b5ecd..cdb6b014d 100644 --- a/rs/xtask/src/main.rs +++ b/rs/x/src/main.rs @@ -1,6 +1,6 @@ -//! xtask - Development task runner for MoQ +//! Development task runner for MoQ //! -//! Run with: `cargo xtask ` +//! Run with: `./x ` or `cargo x ` //! //! This replaces the justfile with a pure Rust solution. @@ -15,7 +15,7 @@ use clap::{Parser, Subcommand}; /// MoQ development task runner #[derive(Parser)] -#[command(name = "xtask", about = "MoQ development task runner")] +#[command(name = "x", about = "MoQ development task runner")] struct Cli { #[command(subcommand)] command: Commands, diff --git a/x b/x new file mode 100755 index 000000000..1e97a40d3 --- /dev/null +++ b/x @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +# Task runner for MoQ development +# Usage: ./x [args...] +exec cargo x "$@" From ed320c0e8226f087cbe20b1615e3c7b31a5e24db Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Thu, 5 Feb 2026 11:46:28 -0800 Subject: [PATCH 3/5] fmt --- rs/x/src/main.rs | 2238 ++++++++++++++++++++++------------------------ 1 file changed, 1089 insertions(+), 1149 deletions(-) diff --git a/rs/x/src/main.rs b/rs/x/src/main.rs index cdb6b014d..add48fb69 100644 --- a/rs/x/src/main.rs +++ b/rs/x/src/main.rs @@ -5,10 +5,10 @@ //! This replaces the justfile with a pure Rust solution. use std::{ - env, fs, - io::IsTerminal, - path::{Path, PathBuf}, - process::{Command, ExitCode, Stdio}, + env, fs, + io::IsTerminal, + path::{Path, PathBuf}, + process::{Command, ExitCode, Stdio}, }; use clap::{Parser, Subcommand}; @@ -17,580 +17,552 @@ use clap::{Parser, Subcommand}; #[derive(Parser)] #[command(name = "x", about = "MoQ development task runner")] struct Cli { - #[command(subcommand)] - command: Commands, + #[command(subcommand)] + command: Commands, } #[derive(Subcommand)] enum Commands { - /// Install dependencies - Install, - - /// Run all development services (relay, web server, publish bbb) - Dev, - - /// Run a localhost relay server without authentication - Relay { - /// Additional arguments to pass to moq-relay - #[arg(trailing_var_arg = true)] - args: Vec, - }, - - /// Run a cluster of relay servers - Cluster, - - /// Run a localhost root server - Root, - - /// Run a localhost leaf server - Leaf, - - /// Authentication commands - Auth { - #[command(subcommand)] - command: AuthCommands, - }, - - /// Download test videos - Download { - /// Video name (bbb, tos, av1, hevc) - name: String, - }, - - /// Publish media to a relay - Pub { - #[command(subcommand)] - command: PubCommands, - }, - - /// Serve media directly (without relay) - Serve { - #[command(subcommand)] - command: ServeCommands, - }, - - /// Run the web development server - Web { - /// Relay URL - #[arg(default_value = "http://localhost:4443/anon")] - url: String, - }, - - /// Clock broadcast commands - Clock { - /// Action: publish or subscribe - action: String, - /// Relay URL - #[arg(default_value = "http://localhost:4443/anon")] - url: String, - /// Additional arguments - #[arg(trailing_var_arg = true)] - args: Vec, - }, - - /// Run CI checks - Check { - #[command(subcommand)] - command: Option, - }, - - /// Run tests - Test { - #[command(subcommand)] - command: Option, - }, - - /// Auto-fix linting issues - Fix, - - /// Build all packages - Build, - - /// Upgrade dependencies - Update, - - /// Tokio console commands - Console { - #[command(subcommand)] - command: ConsoleCommands, - }, - - /// Serve documentation locally - Doc, - - /// Throttle UDP traffic for testing (macOS only) - Throttle, + /// Install dependencies + Install, + + /// Run all development services (relay, web server, publish bbb) + Dev, + + /// Run a localhost relay server without authentication + Relay { + /// Additional arguments to pass to moq-relay + #[arg(trailing_var_arg = true)] + args: Vec, + }, + + /// Run a cluster of relay servers + Cluster, + + /// Run a localhost root server + Root, + + /// Run a localhost leaf server + Leaf, + + /// Authentication commands + Auth { + #[command(subcommand)] + command: AuthCommands, + }, + + /// Download test videos + Download { + /// Video name (bbb, tos, av1, hevc) + name: String, + }, + + /// Publish media to a relay + Pub { + #[command(subcommand)] + command: PubCommands, + }, + + /// Serve media directly (without relay) + Serve { + #[command(subcommand)] + command: ServeCommands, + }, + + /// Run the web development server + Web { + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + }, + + /// Clock broadcast commands + Clock { + /// Action: publish or subscribe + action: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + /// Additional arguments + #[arg(trailing_var_arg = true)] + args: Vec, + }, + + /// Run CI checks + Check { + #[command(subcommand)] + command: Option, + }, + + /// Run tests + Test { + #[command(subcommand)] + command: Option, + }, + + /// Auto-fix linting issues + Fix, + + /// Build all packages + Build, + + /// Upgrade dependencies + Update, + + /// Tokio console commands + Console { + #[command(subcommand)] + command: ConsoleCommands, + }, + + /// Serve documentation locally + Doc, + + /// Throttle UDP traffic for testing (macOS only) + Throttle, } #[derive(Subcommand)] enum AuthCommands { - /// Generate a random secret key for authentication - Key, - /// Generate authentication tokens for local development - Token, + /// Generate a random secret key for authentication + Key, + /// Generate authentication tokens for local development + Token, } #[derive(Subcommand)] enum PubCommands { - /// Publish using fMP4 format (default) - Fmp4 { - /// Video name (bbb, tos, av1, hevc) - name: String, - /// Relay URL - #[arg(default_value = "http://localhost:4443/anon")] - url: String, - /// Additional arguments - #[arg(trailing_var_arg = true)] - args: Vec, - }, - /// Publish using HLS format - Hls { - /// Video name (bbb, tos, av1, hevc) - name: String, - /// Relay URL - #[arg(default_value = "http://localhost:4443/anon")] - relay: String, - }, - /// Publish using H.264 Annex B format - H264 { - /// Video name (bbb, tos, av1, hevc) - name: String, - /// Relay URL - #[arg(default_value = "http://localhost:4443/anon")] - url: String, - /// Additional arguments - #[arg(trailing_var_arg = true)] - args: Vec, - }, - /// Publish using Iroh transport - Iroh { - /// Video name (bbb, tos, av1, hevc) - name: String, - /// Relay URL - url: String, - /// Prefix for the broadcast name - #[arg(default_value = "")] - prefix: String, - }, - /// Publish using GStreamer (deprecated) - Gst { - /// Video name - name: String, - /// Relay URL - #[arg(default_value = "http://localhost:4443/anon")] - url: String, - }, + /// Publish using fMP4 format (default) + Fmp4 { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + /// Additional arguments + #[arg(trailing_var_arg = true)] + args: Vec, + }, + /// Publish using HLS format + Hls { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + relay: String, + }, + /// Publish using H.264 Annex B format + H264 { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + /// Additional arguments + #[arg(trailing_var_arg = true)] + args: Vec, + }, + /// Publish using Iroh transport + Iroh { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Relay URL + url: String, + /// Prefix for the broadcast name + #[arg(default_value = "")] + prefix: String, + }, + /// Publish using GStreamer (deprecated) + Gst { + /// Video name + name: String, + /// Relay URL + #[arg(default_value = "http://localhost:4443/anon")] + url: String, + }, } #[derive(Subcommand)] enum ServeCommands { - /// Serve using fMP4 format - Fmp4 { - /// Video name (bbb, tos, av1, hevc) - name: String, - /// Additional arguments (e.g., --iroh-enabled) - #[arg(trailing_var_arg = true)] - args: Vec, - }, - /// Generate and serve an HLS stream for testing - Hls { - /// Video name (bbb, tos, av1, hevc) - name: String, - /// HTTP server port - #[arg(default_value = "8000")] - port: String, - }, + /// Serve using fMP4 format + Fmp4 { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// Additional arguments (e.g., --iroh-enabled) + #[arg(trailing_var_arg = true)] + args: Vec, + }, + /// Generate and serve an HLS stream for testing + Hls { + /// Video name (bbb, tos, av1, hevc) + name: String, + /// HTTP server port + #[arg(default_value = "8000")] + port: String, + }, } #[derive(Subcommand)] enum CheckCommands { - /// Run comprehensive checks including all feature combinations - All, + /// Run comprehensive checks including all feature combinations + All, } #[derive(Subcommand)] enum TestCommands { - /// Run comprehensive tests including all feature combinations - All, + /// Run comprehensive tests including all feature combinations + All, } #[derive(Subcommand)] enum ConsoleCommands { - /// Connect to the relay server (port 6680) - Relay, - /// Connect to the publisher (port 6681) - Pub, + /// Connect to the relay server (port 6680) + Relay, + /// Connect to the publisher (port 6681) + Pub, } fn main() -> ExitCode { - let cli = Cli::parse(); - - let result = match cli.command { - Commands::Install => install(), - Commands::Dev => dev(), - Commands::Relay { args } => relay(&args), - Commands::Cluster => cluster(), - Commands::Root => root(), - Commands::Leaf => leaf(), - Commands::Auth { command } => match command { - AuthCommands::Key => auth_key(), - AuthCommands::Token => auth_token(), - }, - Commands::Download { name } => download(&name), - Commands::Pub { command } => match command { - PubCommands::Fmp4 { name, url, args } => pub_fmp4(&name, &url, &args), - PubCommands::Hls { name, relay } => pub_hls(&name, &relay), - PubCommands::H264 { name, url, args } => pub_h264(&name, &url, &args), - PubCommands::Iroh { name, url, prefix } => pub_iroh(&name, &url, &prefix), - PubCommands::Gst { name: _, url: _ } => pub_gst(), - }, - Commands::Serve { command } => match command { - ServeCommands::Fmp4 { name, args } => serve_fmp4(&name, &args), - ServeCommands::Hls { name, port } => serve_hls(&name, &port), - }, - Commands::Web { url } => web(&url), - Commands::Clock { action, url, args } => clock(&action, &url, &args), - Commands::Check { command } => match command { - Some(CheckCommands::All) => check_all(), - None => check(), - }, - Commands::Test { command } => match command { - Some(TestCommands::All) => test_all(), - None => test(), - }, - Commands::Fix => fix(), - Commands::Build => build(), - Commands::Update => update(), - Commands::Console { command } => match command { - ConsoleCommands::Relay => console_relay(), - ConsoleCommands::Pub => console_pub(), - }, - Commands::Doc => doc(), - Commands::Throttle => throttle(), - }; - - match result { - Ok(()) => ExitCode::SUCCESS, - Err(e) => { - eprintln!("Error: {e}"); - ExitCode::FAILURE - } - } + let cli = Cli::parse(); + + let result = match cli.command { + Commands::Install => install(), + Commands::Dev => dev(), + Commands::Relay { args } => relay(&args), + Commands::Cluster => cluster(), + Commands::Root => root(), + Commands::Leaf => leaf(), + Commands::Auth { command } => match command { + AuthCommands::Key => auth_key(), + AuthCommands::Token => auth_token(), + }, + Commands::Download { name } => download(&name), + Commands::Pub { command } => match command { + PubCommands::Fmp4 { name, url, args } => pub_fmp4(&name, &url, &args), + PubCommands::Hls { name, relay } => pub_hls(&name, &relay), + PubCommands::H264 { name, url, args } => pub_h264(&name, &url, &args), + PubCommands::Iroh { name, url, prefix } => pub_iroh(&name, &url, &prefix), + PubCommands::Gst { name: _, url: _ } => pub_gst(), + }, + Commands::Serve { command } => match command { + ServeCommands::Fmp4 { name, args } => serve_fmp4(&name, &args), + ServeCommands::Hls { name, port } => serve_hls(&name, &port), + }, + Commands::Web { url } => web(&url), + Commands::Clock { action, url, args } => clock(&action, &url, &args), + Commands::Check { command } => match command { + Some(CheckCommands::All) => check_all(), + None => check(), + }, + Commands::Test { command } => match command { + Some(TestCommands::All) => test_all(), + None => test(), + }, + Commands::Fix => fix(), + Commands::Build => build(), + Commands::Update => update(), + Commands::Console { command } => match command { + ConsoleCommands::Relay => console_relay(), + ConsoleCommands::Pub => console_pub(), + }, + Commands::Doc => doc(), + Commands::Throttle => throttle(), + }; + + match result { + Ok(()) => ExitCode::SUCCESS, + Err(e) => { + eprintln!("Error: {e}"); + ExitCode::FAILURE + } + } } // Helper functions fn project_root() -> PathBuf { - Path::new(env!("CARGO_MANIFEST_DIR")) - .parent() - .expect("xtask should be in rs/xtask") - .parent() - .expect("rs should have a parent") - .to_path_buf() + Path::new(env!("CARGO_MANIFEST_DIR")) + .parent() + .expect("xtask should be in rs/xtask") + .parent() + .expect("rs should have a parent") + .to_path_buf() } fn run(program: &str, args: &[&str]) -> Result<(), String> { - let status = Command::new(program) - .args(args) - .current_dir(project_root()) - .status() - .map_err(|e| format!("Failed to run {program}: {e}"))?; - - if status.success() { - Ok(()) - } else { - Err(format!("{program} failed with status: {status}")) - } + let status = Command::new(program) + .args(args) + .current_dir(project_root()) + .status() + .map_err(|e| format!("Failed to run {program}: {e}"))?; + + if status.success() { + Ok(()) + } else { + Err(format!("{program} failed with status: {status}")) + } } fn run_with_env(program: &str, args: &[&str], env: &[(&str, &str)]) -> Result<(), String> { - let mut cmd = Command::new(program); - cmd.args(args).current_dir(project_root()); - for (k, v) in env { - cmd.env(k, v); - } - let status = cmd - .status() - .map_err(|e| format!("Failed to run {program}: {e}"))?; - - if status.success() { - Ok(()) - } else { - Err(format!("{program} failed with status: {status}")) - } + let mut cmd = Command::new(program); + cmd.args(args).current_dir(project_root()); + for (k, v) in env { + cmd.env(k, v); + } + let status = cmd.status().map_err(|e| format!("Failed to run {program}: {e}"))?; + + if status.success() { + Ok(()) + } else { + Err(format!("{program} failed with status: {status}")) + } } fn run_in_dir(program: &str, args: &[&str], dir: &Path) -> Result<(), String> { - let status = Command::new(program) - .args(args) - .current_dir(dir) - .status() - .map_err(|e| format!("Failed to run {program}: {e}"))?; - - if status.success() { - Ok(()) - } else { - Err(format!("{program} failed with status: {status}")) - } + let status = Command::new(program) + .args(args) + .current_dir(dir) + .status() + .map_err(|e| format!("Failed to run {program}: {e}"))?; + + if status.success() { + Ok(()) + } else { + Err(format!("{program} failed with status: {status}")) + } } fn cargo() -> String { - env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()) + env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()) } fn is_tty() -> bool { - std::io::stdout().is_terminal() + std::io::stdout().is_terminal() } fn file_exists(path: &Path) -> bool { - path.exists() + path.exists() } fn command_exists(cmd: &str) -> bool { - Command::new("which") - .arg(cmd) - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - .map(|s| s.success()) - .unwrap_or(false) + Command::new("which") + .arg(cmd) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + .map(|s| s.success()) + .unwrap_or(false) } // Command implementations fn install() -> Result<(), String> { - run("bun", &["install"])?; - run( - &cargo(), - &[ - "install", - "--locked", - "cargo-shear", - "cargo-sort", - "cargo-upgrades", - "cargo-edit", - "cargo-hack", - ], - ) + run("bun", &["install"])?; + run( + &cargo(), + &[ + "install", + "--locked", + "cargo-shear", + "cargo-sort", + "cargo-upgrades", + "cargo-edit", + "cargo-hack", + ], + ) } fn dev() -> Result<(), String> { - run("bun", &["install"])?; - run(&cargo(), &["build"])?; - run( - "bun", - &[ - "run", - "concurrently", - "--kill-others", - "--names", - "srv,bbb,web", - "--prefix-colors", - "auto", - "cargo xtask relay", - "sleep 1 && cargo xtask pub fmp4 bbb http://localhost:4443/anon", - "sleep 2 && cargo xtask web http://localhost:4443/anon", - ], - ) + run("bun", &["install"])?; + run(&cargo(), &["build"])?; + run( + "bun", + &[ + "run", + "concurrently", + "--kill-others", + "--names", + "srv,bbb,web", + "--prefix-colors", + "auto", + "cargo xtask relay", + "sleep 1 && cargo xtask pub fmp4 bbb http://localhost:4443/anon", + "sleep 2 && cargo xtask web http://localhost:4443/anon", + ], + ) } fn relay(args: &[String]) -> Result<(), String> { - let mut cmd_args = vec![ - "run", - "--bin", - "moq-relay", - "--", - "dev/relay.toml", - ]; - let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect(); - cmd_args.extend(args_refs); - - run_with_env( - &cargo(), - &cmd_args, - &[("TOKIO_CONSOLE_BIND", "127.0.0.1:6680")], - ) + let mut cmd_args = vec!["run", "--bin", "moq-relay", "--", "dev/relay.toml"]; + let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect(); + cmd_args.extend(args_refs); + + run_with_env(&cargo(), &cmd_args, &[("TOKIO_CONSOLE_BIND", "127.0.0.1:6680")]) } fn cluster() -> Result<(), String> { - run("bun", &["install"])?; - auth_token()?; - run(&cargo(), &["build", "--bin", "moq-relay"])?; - - // Read JWT tokens - let root = project_root(); - let demo_cli_jwt = fs::read_to_string(root.join("dev/demo-cli.jwt")) - .map_err(|e| format!("Failed to read demo-cli.jwt: {e}"))? - .trim() - .to_string(); - let demo_web_jwt = fs::read_to_string(root.join("dev/demo-web.jwt")) - .map_err(|e| format!("Failed to read demo-web.jwt: {e}"))? - .trim() - .to_string(); - - run( - "bun", - &[ - "run", - "concurrently", - "--kill-others", - "--names", - "root,leaf,bbb,tos,web", - "--prefix-colors", - "auto", - "cargo xtask root", - "sleep 1 && cargo xtask leaf", - &format!( - "sleep 2 && cargo xtask pub fmp4 bbb 'http://localhost:4444/demo?jwt={demo_cli_jwt}'" - ), - &format!( - "sleep 3 && cargo xtask pub fmp4 tos 'http://localhost:4443/demo?jwt={demo_cli_jwt}'" - ), - &format!( - "sleep 4 && cargo xtask web 'http://localhost:4443/demo?jwt={demo_web_jwt}'" - ), - ], - ) + run("bun", &["install"])?; + auth_token()?; + run(&cargo(), &["build", "--bin", "moq-relay"])?; + + // Read JWT tokens + let root = project_root(); + let demo_cli_jwt = fs::read_to_string(root.join("dev/demo-cli.jwt")) + .map_err(|e| format!("Failed to read demo-cli.jwt: {e}"))? + .trim() + .to_string(); + let demo_web_jwt = fs::read_to_string(root.join("dev/demo-web.jwt")) + .map_err(|e| format!("Failed to read demo-web.jwt: {e}"))? + .trim() + .to_string(); + + run( + "bun", + &[ + "run", + "concurrently", + "--kill-others", + "--names", + "root,leaf,bbb,tos,web", + "--prefix-colors", + "auto", + "cargo xtask root", + "sleep 1 && cargo xtask leaf", + &format!("sleep 2 && cargo xtask pub fmp4 bbb 'http://localhost:4444/demo?jwt={demo_cli_jwt}'"), + &format!("sleep 3 && cargo xtask pub fmp4 tos 'http://localhost:4443/demo?jwt={demo_cli_jwt}'"), + &format!("sleep 4 && cargo xtask web 'http://localhost:4443/demo?jwt={demo_web_jwt}'"), + ], + ) } fn root() -> Result<(), String> { - auth_key()?; - run(&cargo(), &["run", "--bin", "moq-relay", "--", "dev/root.toml"]) + auth_key()?; + run(&cargo(), &["run", "--bin", "moq-relay", "--", "dev/root.toml"]) } fn leaf() -> Result<(), String> { - auth_token()?; - run(&cargo(), &["run", "--bin", "moq-relay", "--", "dev/leaf.toml"]) + auth_token()?; + run(&cargo(), &["run", "--bin", "moq-relay", "--", "dev/leaf.toml"]) } fn auth_key() -> Result<(), String> { - let root = project_root(); - let key_path = root.join("dev/root.jwk"); - - if !file_exists(&key_path) { - // Remove any existing JWT files - for entry in fs::read_dir(root.join("dev")).map_err(|e| e.to_string())? { - let entry = entry.map_err(|e| e.to_string())?; - if entry.path().extension().map(|e| e == "jwt").unwrap_or(false) { - fs::remove_file(entry.path()).map_err(|e| e.to_string())?; - } - } - run( - &cargo(), - &["run", "--bin", "moq-token", "--", "--key", "dev/root.jwk", "generate"], - )?; - } - Ok(()) + let root = project_root(); + let key_path = root.join("dev/root.jwk"); + + if !file_exists(&key_path) { + // Remove any existing JWT files + for entry in fs::read_dir(root.join("dev")).map_err(|e| e.to_string())? { + let entry = entry.map_err(|e| e.to_string())?; + if entry.path().extension().map(|e| e == "jwt").unwrap_or(false) { + fs::remove_file(entry.path()).map_err(|e| e.to_string())?; + } + } + run( + &cargo(), + &["run", "--bin", "moq-token", "--", "--key", "dev/root.jwk", "generate"], + )?; + } + Ok(()) } fn auth_token() -> Result<(), String> { - auth_key()?; - - let root = project_root(); - - // Generate demo-web.jwt - if !file_exists(&root.join("dev/demo-web.jwt")) { - let output = Command::new(cargo()) - .args([ - "run", - "--quiet", - "--bin", - "moq-token", - "--", - "--key", - "dev/root.jwk", - "sign", - "--root", - "demo", - "--subscribe", - "", - "--publish", - "me", - ]) - .current_dir(&root) - .output() - .map_err(|e| format!("Failed to run moq-token: {e}"))?; - - if !output.status.success() { - return Err(format!( - "moq-token failed: {}", - String::from_utf8_lossy(&output.stderr) - )); - } - fs::write(root.join("dev/demo-web.jwt"), output.stdout) - .map_err(|e| format!("Failed to write demo-web.jwt: {e}"))?; - } - - // Generate demo-cli.jwt - if !file_exists(&root.join("dev/demo-cli.jwt")) { - let output = Command::new(cargo()) - .args([ - "run", - "--quiet", - "--bin", - "moq-token", - "--", - "--key", - "dev/root.jwk", - "sign", - "--root", - "demo", - "--publish", - "", - ]) - .current_dir(&root) - .output() - .map_err(|e| format!("Failed to run moq-token: {e}"))?; - - if !output.status.success() { - return Err(format!( - "moq-token failed: {}", - String::from_utf8_lossy(&output.stderr) - )); - } - fs::write(root.join("dev/demo-cli.jwt"), output.stdout) - .map_err(|e| format!("Failed to write demo-cli.jwt: {e}"))?; - } - - // Generate root.jwt - if !file_exists(&root.join("dev/root.jwt")) { - let output = Command::new(cargo()) - .args([ - "run", - "--quiet", - "--bin", - "moq-token", - "--", - "--key", - "dev/root.jwk", - "sign", - "--root", - "", - "--subscribe", - "", - "--publish", - "", - "--cluster", - ]) - .current_dir(&root) - .output() - .map_err(|e| format!("Failed to run moq-token: {e}"))?; - - if !output.status.success() { - return Err(format!( - "moq-token failed: {}", - String::from_utf8_lossy(&output.stderr) - )); - } - fs::write(root.join("dev/root.jwt"), output.stdout) - .map_err(|e| format!("Failed to write root.jwt: {e}"))?; - } - - Ok(()) + auth_key()?; + + let root = project_root(); + + // Generate demo-web.jwt + if !file_exists(&root.join("dev/demo-web.jwt")) { + let output = Command::new(cargo()) + .args([ + "run", + "--quiet", + "--bin", + "moq-token", + "--", + "--key", + "dev/root.jwk", + "sign", + "--root", + "demo", + "--subscribe", + "", + "--publish", + "me", + ]) + .current_dir(&root) + .output() + .map_err(|e| format!("Failed to run moq-token: {e}"))?; + + if !output.status.success() { + return Err(format!("moq-token failed: {}", String::from_utf8_lossy(&output.stderr))); + } + fs::write(root.join("dev/demo-web.jwt"), output.stdout) + .map_err(|e| format!("Failed to write demo-web.jwt: {e}"))?; + } + + // Generate demo-cli.jwt + if !file_exists(&root.join("dev/demo-cli.jwt")) { + let output = Command::new(cargo()) + .args([ + "run", + "--quiet", + "--bin", + "moq-token", + "--", + "--key", + "dev/root.jwk", + "sign", + "--root", + "demo", + "--publish", + "", + ]) + .current_dir(&root) + .output() + .map_err(|e| format!("Failed to run moq-token: {e}"))?; + + if !output.status.success() { + return Err(format!("moq-token failed: {}", String::from_utf8_lossy(&output.stderr))); + } + fs::write(root.join("dev/demo-cli.jwt"), output.stdout) + .map_err(|e| format!("Failed to write demo-cli.jwt: {e}"))?; + } + + // Generate root.jwt + if !file_exists(&root.join("dev/root.jwt")) { + let output = Command::new(cargo()) + .args([ + "run", + "--quiet", + "--bin", + "moq-token", + "--", + "--key", + "dev/root.jwk", + "sign", + "--root", + "", + "--subscribe", + "", + "--publish", + "", + "--cluster", + ]) + .current_dir(&root) + .output() + .map_err(|e| format!("Failed to run moq-token: {e}"))?; + + if !output.status.success() { + return Err(format!("moq-token failed: {}", String::from_utf8_lossy(&output.stderr))); + } + fs::write(root.join("dev/root.jwt"), output.stdout).map_err(|e| format!("Failed to write root.jwt: {e}"))?; + } + + Ok(()) } fn download_url(name: &str) -> Result<&'static str, String> { - match name { + match name { "bbb" => Ok("http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4"), "tos" => Ok("http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/TearsOfSteel.mp4"), "av1" => Ok("http://download.opencontent.netflix.com.s3.amazonaws.com/AV1/Sparks/Sparks-5994fps-AV1-10bit-1920x1080-2194kbps.mp4"), @@ -600,732 +572,700 @@ fn download_url(name: &str) -> Result<&'static str, String> { } fn download(name: &str) -> Result<(), String> { - let root = project_root(); - let mp4_path = root.join(format!("dev/{name}.mp4")); - let fmp4_path = root.join(format!("dev/{name}.fmp4")); - - // Download if not exists - if !file_exists(&mp4_path) { - let url = download_url(name)?; - println!("Downloading {name}.mp4..."); - run("curl", &["-fsSL", url, "-o", mp4_path.to_str().unwrap()])?; - } - - // Convert to fmp4 if not exists - if !file_exists(&fmp4_path) { - println!("Converting to fragmented MP4..."); - run( - "ffmpeg", - &[ - "-loglevel", - "error", - "-i", - mp4_path.to_str().unwrap(), - "-c:v", - "copy", - "-f", - "mp4", - "-movflags", - "cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame", - fmp4_path.to_str().unwrap(), - ], - )?; - } - - Ok(()) + let root = project_root(); + let mp4_path = root.join(format!("dev/{name}.mp4")); + let fmp4_path = root.join(format!("dev/{name}.fmp4")); + + // Download if not exists + if !file_exists(&mp4_path) { + let url = download_url(name)?; + println!("Downloading {name}.mp4..."); + run("curl", &["-fsSL", url, "-o", mp4_path.to_str().unwrap()])?; + } + + // Convert to fmp4 if not exists + if !file_exists(&fmp4_path) { + println!("Converting to fragmented MP4..."); + run( + "ffmpeg", + &[ + "-loglevel", + "error", + "-i", + mp4_path.to_str().unwrap(), + "-c:v", + "copy", + "-f", + "mp4", + "-movflags", + "cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame", + fmp4_path.to_str().unwrap(), + ], + )?; + } + + Ok(()) } fn ffmpeg_cmaf(input: &str) -> Command { - let mut cmd = Command::new("ffmpeg"); - cmd.args([ - "-hide_banner", - "-v", - "quiet", - "-stream_loop", - "-1", - "-re", - "-i", - input, - "-c", - "copy", - "-f", - "mp4", - "-movflags", - "cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame", - "-", - ]) - .current_dir(project_root()) - .stdout(Stdio::piped()); - cmd + let mut cmd = Command::new("ffmpeg"); + cmd.args([ + "-hide_banner", + "-v", + "quiet", + "-stream_loop", + "-1", + "-re", + "-i", + input, + "-c", + "copy", + "-f", + "mp4", + "-movflags", + "cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame", + "-", + ]) + .current_dir(project_root()) + .stdout(Stdio::piped()); + cmd } fn pub_fmp4(name: &str, url: &str, args: &[String]) -> Result<(), String> { - download(name)?; - run(&cargo(), &["build", "--bin", "moq"])?; - - let root = project_root(); - let input = root.join(format!("dev/{name}.fmp4")); - - let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) - .spawn() - .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; - - let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); - - let mut moq_args = vec![ - "run".to_string(), - "--bin".to_string(), - "moq".to_string(), - "--".to_string(), - ]; - moq_args.extend(args.iter().cloned()); - moq_args.extend([ - "publish".to_string(), - "--url".to_string(), - url.to_string(), - "--name".to_string(), - name.to_string(), - "fmp4".to_string(), - ]); - - let status = Command::new(cargo()) - .args(&moq_args) - .current_dir(&root) - .stdin(ffmpeg_stdout) - .status() - .map_err(|e| format!("Failed to run moq: {e}"))?; - - ffmpeg.wait().ok(); - - if status.success() { - Ok(()) - } else { - Err(format!("moq failed with status: {status}")) - } + download(name)?; + run(&cargo(), &["build", "--bin", "moq"])?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.fmp4")); + + let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); + + let mut moq_args = vec![ + "run".to_string(), + "--bin".to_string(), + "moq".to_string(), + "--".to_string(), + ]; + moq_args.extend(args.iter().cloned()); + moq_args.extend([ + "publish".to_string(), + "--url".to_string(), + url.to_string(), + "--name".to_string(), + name.to_string(), + "fmp4".to_string(), + ]); + + let status = Command::new(cargo()) + .args(&moq_args) + .current_dir(&root) + .stdin(ffmpeg_stdout) + .status() + .map_err(|e| format!("Failed to run moq: {e}"))?; + + ffmpeg.wait().ok(); + + if status.success() { + Ok(()) + } else { + Err(format!("moq failed with status: {status}")) + } } fn pub_hls(name: &str, relay: &str) -> Result<(), String> { - download(name)?; - - let root = project_root(); - let input = root.join(format!("dev/{name}.mp4")); - let out_dir = root.join(format!("dev/{name}")); - - // Clean and create output directory - if out_dir.exists() { - fs::remove_dir_all(&out_dir).map_err(|e| e.to_string())?; - } - fs::create_dir_all(&out_dir).map_err(|e| e.to_string())?; - - println!(">>> Generating HLS stream to disk (1280x720 + 256x144)..."); - - // Start ffmpeg in the background - let mut ffmpeg = Command::new("ffmpeg") - .args([ - "-hide_banner", - "-loglevel", - "warning", - "-re", - "-stream_loop", - "-1", - "-i", - input.to_str().unwrap(), - "-filter_complex", - "[0:v]split=2[v0][v1];[v0]scale=-2:720[v720];[v1]scale=-2:144[v144]", - "-map", - "[v720]", - "-map", - "[v144]", - "-map", - "0:a:0", - "-r", - "25", - "-preset", - "veryfast", - "-g", - "50", - "-keyint_min", - "50", - "-sc_threshold", - "0", - "-c:v:0", - "libx264", - "-profile:v:0", - "high", - "-level:v:0", - "4.1", - "-pix_fmt:v:0", - "yuv420p", - "-tag:v:0", - "avc1", - "-b:v:0", - "4M", - "-maxrate:v:0", - "4.4M", - "-bufsize:v:0", - "8M", - "-c:v:1", - "libx264", - "-profile:v:1", - "high", - "-level:v:1", - "4.1", - "-pix_fmt:v:1", - "yuv420p", - "-tag:v:1", - "avc1", - "-b:v:1", - "300k", - "-maxrate:v:1", - "330k", - "-bufsize:v:1", - "600k", - "-c:a", - "aac", - "-b:a", - "128k", - "-f", - "hls", - "-hls_time", - "2", - "-hls_list_size", - "6", - "-hls_flags", - "independent_segments+delete_segments", - "-hls_segment_type", - "fmp4", - "-master_pl_name", - "master.m3u8", - "-var_stream_map", - "v:0,agroup:audio,name:720 v:1,agroup:audio,name:144 a:0,agroup:audio,name:audio", - "-hls_segment_filename", - &format!("{}/v%v/segment_%09d.m4s", out_dir.display()), - &format!("{}/v%v/stream.m3u8", out_dir.display()), - ]) - .spawn() - .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; - - // Wait for master playlist - println!(">>> Waiting for HLS playlist generation..."); - let master_path = out_dir.join("master.m3u8"); - for _ in 0..60 { - if file_exists(&master_path) { - break; - } - std::thread::sleep(std::time::Duration::from_millis(500)); - } - - if !file_exists(&master_path) { - ffmpeg.kill().ok(); - return Err("master.m3u8 not generated in time".to_string()); - } - - // Wait for variant playlists - println!(">>> Waiting for variant playlists..."); - std::thread::sleep(std::time::Duration::from_secs(2)); - - // Run moq to ingest from local files - println!(">>> Running with --passthrough flag"); - let moq_result = run( - &cargo(), - &[ - "run", - "--bin", - "moq", - "--", - "publish", - "--url", - relay, - "--name", - name, - "hls", - "--playlist", - &format!("{}/master.m3u8", out_dir.display()), - "--passthrough", - ], - ); - - ffmpeg.kill().ok(); - ffmpeg.wait().ok(); - - moq_result + download(name)?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.mp4")); + let out_dir = root.join(format!("dev/{name}")); + + // Clean and create output directory + if out_dir.exists() { + fs::remove_dir_all(&out_dir).map_err(|e| e.to_string())?; + } + fs::create_dir_all(&out_dir).map_err(|e| e.to_string())?; + + println!(">>> Generating HLS stream to disk (1280x720 + 256x144)..."); + + // Start ffmpeg in the background + let mut ffmpeg = Command::new("ffmpeg") + .args([ + "-hide_banner", + "-loglevel", + "warning", + "-re", + "-stream_loop", + "-1", + "-i", + input.to_str().unwrap(), + "-filter_complex", + "[0:v]split=2[v0][v1];[v0]scale=-2:720[v720];[v1]scale=-2:144[v144]", + "-map", + "[v720]", + "-map", + "[v144]", + "-map", + "0:a:0", + "-r", + "25", + "-preset", + "veryfast", + "-g", + "50", + "-keyint_min", + "50", + "-sc_threshold", + "0", + "-c:v:0", + "libx264", + "-profile:v:0", + "high", + "-level:v:0", + "4.1", + "-pix_fmt:v:0", + "yuv420p", + "-tag:v:0", + "avc1", + "-b:v:0", + "4M", + "-maxrate:v:0", + "4.4M", + "-bufsize:v:0", + "8M", + "-c:v:1", + "libx264", + "-profile:v:1", + "high", + "-level:v:1", + "4.1", + "-pix_fmt:v:1", + "yuv420p", + "-tag:v:1", + "avc1", + "-b:v:1", + "300k", + "-maxrate:v:1", + "330k", + "-bufsize:v:1", + "600k", + "-c:a", + "aac", + "-b:a", + "128k", + "-f", + "hls", + "-hls_time", + "2", + "-hls_list_size", + "6", + "-hls_flags", + "independent_segments+delete_segments", + "-hls_segment_type", + "fmp4", + "-master_pl_name", + "master.m3u8", + "-var_stream_map", + "v:0,agroup:audio,name:720 v:1,agroup:audio,name:144 a:0,agroup:audio,name:audio", + "-hls_segment_filename", + &format!("{}/v%v/segment_%09d.m4s", out_dir.display()), + &format!("{}/v%v/stream.m3u8", out_dir.display()), + ]) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + // Wait for master playlist + println!(">>> Waiting for HLS playlist generation..."); + let master_path = out_dir.join("master.m3u8"); + for _ in 0..60 { + if file_exists(&master_path) { + break; + } + std::thread::sleep(std::time::Duration::from_millis(500)); + } + + if !file_exists(&master_path) { + ffmpeg.kill().ok(); + return Err("master.m3u8 not generated in time".to_string()); + } + + // Wait for variant playlists + println!(">>> Waiting for variant playlists..."); + std::thread::sleep(std::time::Duration::from_secs(2)); + + // Run moq to ingest from local files + println!(">>> Running with --passthrough flag"); + let moq_result = run( + &cargo(), + &[ + "run", + "--bin", + "moq", + "--", + "publish", + "--url", + relay, + "--name", + name, + "hls", + "--playlist", + &format!("{}/master.m3u8", out_dir.display()), + "--passthrough", + ], + ); + + ffmpeg.kill().ok(); + ffmpeg.wait().ok(); + + moq_result } fn pub_h264(name: &str, url: &str, args: &[String]) -> Result<(), String> { - download(name)?; - run(&cargo(), &["build", "--bin", "moq"])?; - - let root = project_root(); - let input = root.join(format!("dev/{name}.fmp4")); - - let mut ffmpeg = Command::new("ffmpeg") - .args([ - "-hide_banner", - "-v", - "quiet", - "-stream_loop", - "-1", - "-re", - "-i", - input.to_str().unwrap(), - "-c:v", - "copy", - "-an", - "-bsf:v", - "h264_mp4toannexb", - "-f", - "h264", - "-", - ]) - .current_dir(&root) - .stdout(Stdio::piped()) - .spawn() - .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; - - let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); - - let mut moq_args = vec![ - "run".to_string(), - "--bin".to_string(), - "moq".to_string(), - "--".to_string(), - ]; - moq_args.extend(args.iter().cloned()); - moq_args.extend([ - "publish".to_string(), - "--url".to_string(), - url.to_string(), - "--name".to_string(), - name.to_string(), - "--format".to_string(), - "annex-b".to_string(), - ]); - - let status = Command::new(cargo()) - .args(&moq_args) - .current_dir(&root) - .stdin(ffmpeg_stdout) - .status() - .map_err(|e| format!("Failed to run moq: {e}"))?; - - ffmpeg.wait().ok(); - - if status.success() { - Ok(()) - } else { - Err(format!("moq failed with status: {status}")) - } + download(name)?; + run(&cargo(), &["build", "--bin", "moq"])?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.fmp4")); + + let mut ffmpeg = Command::new("ffmpeg") + .args([ + "-hide_banner", + "-v", + "quiet", + "-stream_loop", + "-1", + "-re", + "-i", + input.to_str().unwrap(), + "-c:v", + "copy", + "-an", + "-bsf:v", + "h264_mp4toannexb", + "-f", + "h264", + "-", + ]) + .current_dir(&root) + .stdout(Stdio::piped()) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); + + let mut moq_args = vec![ + "run".to_string(), + "--bin".to_string(), + "moq".to_string(), + "--".to_string(), + ]; + moq_args.extend(args.iter().cloned()); + moq_args.extend([ + "publish".to_string(), + "--url".to_string(), + url.to_string(), + "--name".to_string(), + name.to_string(), + "--format".to_string(), + "annex-b".to_string(), + ]); + + let status = Command::new(cargo()) + .args(&moq_args) + .current_dir(&root) + .stdin(ffmpeg_stdout) + .status() + .map_err(|e| format!("Failed to run moq: {e}"))?; + + ffmpeg.wait().ok(); + + if status.success() { + Ok(()) + } else { + Err(format!("moq failed with status: {status}")) + } } fn pub_iroh(name: &str, url: &str, prefix: &str) -> Result<(), String> { - download(name)?; - run(&cargo(), &["build", "--bin", "moq"])?; - - let root = project_root(); - let input = root.join(format!("dev/{name}.fmp4")); - let broadcast_name = format!("{prefix}{name}"); - - let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) - .spawn() - .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; - - let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); - - let status = Command::new(cargo()) - .args([ - "run", - "--bin", - "moq", - "--", - "--iroh-enabled", - "publish", - "--url", - url, - "--name", - &broadcast_name, - "fmp4", - ]) - .current_dir(&root) - .stdin(ffmpeg_stdout) - .status() - .map_err(|e| format!("Failed to run moq: {e}"))?; - - ffmpeg.wait().ok(); - - if status.success() { - Ok(()) - } else { - Err(format!("moq failed with status: {status}")) - } + download(name)?; + run(&cargo(), &["build", "--bin", "moq"])?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.fmp4")); + let broadcast_name = format!("{prefix}{name}"); + + let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); + + let status = Command::new(cargo()) + .args([ + "run", + "--bin", + "moq", + "--", + "--iroh-enabled", + "publish", + "--url", + url, + "--name", + &broadcast_name, + "fmp4", + ]) + .current_dir(&root) + .stdin(ffmpeg_stdout) + .status() + .map_err(|e| format!("Failed to run moq: {e}"))?; + + ffmpeg.wait().ok(); + + if status.success() { + Ok(()) + } else { + Err(format!("moq failed with status: {status}")) + } } fn pub_gst() -> Result<(), String> { - println!("GStreamer plugin has moved to: https://github.com/moq-dev/gstreamer"); - println!("Install and use hang-gst directly for GStreamer functionality"); - Ok(()) + println!("GStreamer plugin has moved to: https://github.com/moq-dev/gstreamer"); + println!("Install and use hang-gst directly for GStreamer functionality"); + Ok(()) } fn serve_fmp4(name: &str, args: &[String]) -> Result<(), String> { - download(name)?; - run(&cargo(), &["build", "--bin", "moq"])?; - - let root = project_root(); - let input = root.join(format!("dev/{name}.fmp4")); - - let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) - .spawn() - .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; - - let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); - - let mut moq_args = vec![ - "run".to_string(), - "--bin".to_string(), - "moq".to_string(), - "--".to_string(), - ]; - moq_args.extend(args.iter().cloned()); - moq_args.extend([ - "serve".to_string(), - "--listen".to_string(), - "[::]:4443".to_string(), - "--tls-generate".to_string(), - "localhost".to_string(), - "--name".to_string(), - name.to_string(), - "fmp4".to_string(), - ]); - - let status = Command::new(cargo()) - .args(&moq_args) - .current_dir(&root) - .stdin(ffmpeg_stdout) - .status() - .map_err(|e| format!("Failed to run moq: {e}"))?; - - ffmpeg.wait().ok(); - - if status.success() { - Ok(()) - } else { - Err(format!("moq failed with status: {status}")) - } + download(name)?; + run(&cargo(), &["build", "--bin", "moq"])?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.fmp4")); + + let mut ffmpeg = ffmpeg_cmaf(input.to_str().unwrap()) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + let ffmpeg_stdout = ffmpeg.stdout.take().unwrap(); + + let mut moq_args = vec![ + "run".to_string(), + "--bin".to_string(), + "moq".to_string(), + "--".to_string(), + ]; + moq_args.extend(args.iter().cloned()); + moq_args.extend([ + "serve".to_string(), + "--listen".to_string(), + "[::]:4443".to_string(), + "--tls-generate".to_string(), + "localhost".to_string(), + "--name".to_string(), + name.to_string(), + "fmp4".to_string(), + ]); + + let status = Command::new(cargo()) + .args(&moq_args) + .current_dir(&root) + .stdin(ffmpeg_stdout) + .status() + .map_err(|e| format!("Failed to run moq: {e}"))?; + + ffmpeg.wait().ok(); + + if status.success() { + Ok(()) + } else { + Err(format!("moq failed with status: {status}")) + } } fn serve_hls(name: &str, port: &str) -> Result<(), String> { - download(name)?; - - let root = project_root(); - let input = root.join(format!("dev/{name}.mp4")); - let out_dir = root.join(format!("dev/{name}")); - - // Clean and create output directory - if out_dir.exists() { - fs::remove_dir_all(&out_dir).map_err(|e| e.to_string())?; - } - fs::create_dir_all(&out_dir).map_err(|e| e.to_string())?; - - println!(">>> Starting HLS stream generation..."); - println!(">>> Master playlist: http://localhost:{port}/master.m3u8"); - - // Start ffmpeg - let mut ffmpeg = Command::new("ffmpeg") - .args([ - "-loglevel", - "warning", - "-re", - "-stream_loop", - "-1", - "-i", - input.to_str().unwrap(), - "-map", - "0:v:0", - "-map", - "0:v:0", - "-map", - "0:a:0", - "-r", - "25", - "-preset", - "veryfast", - "-g", - "50", - "-keyint_min", - "50", - "-sc_threshold", - "0", - "-c:v:0", - "libx264", - "-profile:v:0", - "high", - "-level:v:0", - "4.1", - "-pix_fmt:v:0", - "yuv420p", - "-tag:v:0", - "avc1", - "-bsf:v:0", - "dump_extra", - "-b:v:0", - "4M", - "-vf:0", - "scale=1920:-2", - "-c:v:1", - "libx264", - "-profile:v:1", - "high", - "-level:v:1", - "4.1", - "-pix_fmt:v:1", - "yuv420p", - "-tag:v:1", - "avc1", - "-bsf:v:1", - "dump_extra", - "-b:v:1", - "300k", - "-vf:1", - "scale=256:-2", - "-c:a", - "aac", - "-b:a", - "128k", - "-f", - "hls", - "-hls_time", - "2", - "-hls_list_size", - "12", - "-hls_flags", - "independent_segments+delete_segments", - "-hls_segment_type", - "fmp4", - "-master_pl_name", - "master.m3u8", - "-var_stream_map", - "v:0,agroup:audio v:1,agroup:audio a:0,agroup:audio", - "-hls_segment_filename", - &format!("{}/v%v/segment_%09d.m4s", out_dir.display()), - &format!("{}/v%v/stream.m3u8", out_dir.display()), - ]) - .spawn() - .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; - - std::thread::sleep(std::time::Duration::from_secs(2)); - println!(">>> HTTP server: http://localhost:{port}/"); - - let http_result = run_in_dir( - "python3", - &["-m", "http.server", port], - &out_dir, - ); - - ffmpeg.kill().ok(); - ffmpeg.wait().ok(); - - http_result + download(name)?; + + let root = project_root(); + let input = root.join(format!("dev/{name}.mp4")); + let out_dir = root.join(format!("dev/{name}")); + + // Clean and create output directory + if out_dir.exists() { + fs::remove_dir_all(&out_dir).map_err(|e| e.to_string())?; + } + fs::create_dir_all(&out_dir).map_err(|e| e.to_string())?; + + println!(">>> Starting HLS stream generation..."); + println!(">>> Master playlist: http://localhost:{port}/master.m3u8"); + + // Start ffmpeg + let mut ffmpeg = Command::new("ffmpeg") + .args([ + "-loglevel", + "warning", + "-re", + "-stream_loop", + "-1", + "-i", + input.to_str().unwrap(), + "-map", + "0:v:0", + "-map", + "0:v:0", + "-map", + "0:a:0", + "-r", + "25", + "-preset", + "veryfast", + "-g", + "50", + "-keyint_min", + "50", + "-sc_threshold", + "0", + "-c:v:0", + "libx264", + "-profile:v:0", + "high", + "-level:v:0", + "4.1", + "-pix_fmt:v:0", + "yuv420p", + "-tag:v:0", + "avc1", + "-bsf:v:0", + "dump_extra", + "-b:v:0", + "4M", + "-vf:0", + "scale=1920:-2", + "-c:v:1", + "libx264", + "-profile:v:1", + "high", + "-level:v:1", + "4.1", + "-pix_fmt:v:1", + "yuv420p", + "-tag:v:1", + "avc1", + "-bsf:v:1", + "dump_extra", + "-b:v:1", + "300k", + "-vf:1", + "scale=256:-2", + "-c:a", + "aac", + "-b:a", + "128k", + "-f", + "hls", + "-hls_time", + "2", + "-hls_list_size", + "12", + "-hls_flags", + "independent_segments+delete_segments", + "-hls_segment_type", + "fmp4", + "-master_pl_name", + "master.m3u8", + "-var_stream_map", + "v:0,agroup:audio v:1,agroup:audio a:0,agroup:audio", + "-hls_segment_filename", + &format!("{}/v%v/segment_%09d.m4s", out_dir.display()), + &format!("{}/v%v/stream.m3u8", out_dir.display()), + ]) + .spawn() + .map_err(|e| format!("Failed to start ffmpeg: {e}"))?; + + std::thread::sleep(std::time::Duration::from_secs(2)); + println!(">>> HTTP server: http://localhost:{port}/"); + + let http_result = run_in_dir("python3", &["-m", "http.server", port], &out_dir); + + ffmpeg.kill().ok(); + ffmpeg.wait().ok(); + + http_result } fn web(url: &str) -> Result<(), String> { - let root = project_root(); - let demo_dir = root.join("js/hang-demo"); + let root = project_root(); + let demo_dir = root.join("js/hang-demo"); - let mut cmd = Command::new("bun"); - cmd.args(["run", "dev"]) - .current_dir(&demo_dir) - .env("VITE_RELAY_URL", url); + let mut cmd = Command::new("bun"); + cmd.args(["run", "dev"]) + .current_dir(&demo_dir) + .env("VITE_RELAY_URL", url); - let status = cmd.status().map_err(|e| format!("Failed to run bun: {e}"))?; + let status = cmd.status().map_err(|e| format!("Failed to run bun: {e}"))?; - if status.success() { - Ok(()) - } else { - Err(format!("bun failed with status: {status}")) - } + if status.success() { + Ok(()) + } else { + Err(format!("bun failed with status: {status}")) + } } fn clock(action: &str, url: &str, args: &[String]) -> Result<(), String> { - if action != "publish" && action != "subscribe" { - return Err(format!( - "action must be 'publish' or 'subscribe', got '{action}'" - )); - } + if action != "publish" && action != "subscribe" { + return Err(format!("action must be 'publish' or 'subscribe', got '{action}'")); + } - let mut cmd_args = vec![ - "run", - "--bin", - "moq-clock", - "--", - "--url", - url, - "--broadcast", - "clock", - ]; - let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect(); - cmd_args.extend(args_refs); - cmd_args.push(action); - - run(&cargo(), &cmd_args) + let mut cmd_args = vec!["run", "--bin", "moq-clock", "--", "--url", url, "--broadcast", "clock"]; + let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect(); + cmd_args.extend(args_refs); + cmd_args.push(action); + + run(&cargo(), &cmd_args) } fn check() -> Result<(), String> { - // JS checks - run("bun", &["install", "--frozen-lockfile"])?; - if is_tty() { - run("bun", &["run", "--filter=*", "--elide-lines=0", "check"])?; - } else { - run("bun", &["run", "--filter=*", "check"])?; - } - run("bun", &["biome", "check"])?; - - // Rust checks - run(&cargo(), &["check", "--all-targets", "--all-features"])?; - run( - &cargo(), - &[ - "clippy", - "--all-targets", - "--all-features", - "--", - "-D", - "warnings", - ], - )?; - run(&cargo(), &["fmt", "--all", "--check"])?; - - // Documentation warnings - run_with_env( - &cargo(), - &["doc", "--no-deps", "--workspace"], - &[("RUSTDOCFLAGS", "-D warnings")], - )?; - - // cargo-shear - run(&cargo(), &["shear"])?; - - // cargo-sort - run(&cargo(), &["sort", "--workspace", "--check"])?; - - // tofu checks (if available) - if command_exists("tofu") { - let root = project_root(); - run_in_dir("tofu", &["fmt", "-check", "-recursive"], &root.join("cdn"))?; - } - - // nix checks (if available) - if command_exists("nix") { - run("nix", &["flake", "check"])?; - } - - Ok(()) + // JS checks + run("bun", &["install", "--frozen-lockfile"])?; + if is_tty() { + run("bun", &["run", "--filter=*", "--elide-lines=0", "check"])?; + } else { + run("bun", &["run", "--filter=*", "check"])?; + } + run("bun", &["biome", "check"])?; + + // Rust checks + run(&cargo(), &["check", "--all-targets", "--all-features"])?; + run( + &cargo(), + &["clippy", "--all-targets", "--all-features", "--", "-D", "warnings"], + )?; + run(&cargo(), &["fmt", "--all", "--check"])?; + + // Documentation warnings + run_with_env( + &cargo(), + &["doc", "--no-deps", "--workspace"], + &[("RUSTDOCFLAGS", "-D warnings")], + )?; + + // cargo-shear + run(&cargo(), &["shear"])?; + + // cargo-sort + run(&cargo(), &["sort", "--workspace", "--check"])?; + + // tofu checks (if available) + if command_exists("tofu") { + let root = project_root(); + run_in_dir("tofu", &["fmt", "-check", "-recursive"], &root.join("cdn"))?; + } + + // nix checks (if available) + if command_exists("nix") { + run("nix", &["flake", "check"])?; + } + + Ok(()) } fn check_all() -> Result<(), String> { - check()?; - - println!("Checking all feature combinations for hang..."); - run( - &cargo(), - &[ - "hack", - "check", - "--package", - "hang", - "--each-feature", - "--no-dev-deps", - ], - ) + check()?; + + println!("Checking all feature combinations for hang..."); + run( + &cargo(), + &["hack", "check", "--package", "hang", "--each-feature", "--no-dev-deps"], + ) } fn test() -> Result<(), String> { - // JS tests - run("bun", &["install", "--frozen-lockfile"])?; - if is_tty() { - run("bun", &["run", "--filter=*", "--elide-lines=0", "test"])?; - } else { - run("bun", &["run", "--filter=*", "test"])?; - } - - // Rust tests - run(&cargo(), &["test", "--all-targets", "--all-features"]) + // JS tests + run("bun", &["install", "--frozen-lockfile"])?; + if is_tty() { + run("bun", &["run", "--filter=*", "--elide-lines=0", "test"])?; + } else { + run("bun", &["run", "--filter=*", "test"])?; + } + + // Rust tests + run(&cargo(), &["test", "--all-targets", "--all-features"]) } fn test_all() -> Result<(), String> { - test()?; + test()?; - println!("Testing all feature combinations for hang..."); - run( - &cargo(), - &["hack", "test", "--package", "hang", "--each-feature"], - ) + println!("Testing all feature combinations for hang..."); + run(&cargo(), &["hack", "test", "--package", "hang", "--each-feature"]) } fn fix() -> Result<(), String> { - // JS fixes - run("bun", &["install"])?; - run("bun", &["biome", "check", "--write"])?; - - // Rust fixes - run( - &cargo(), - &[ - "clippy", - "--fix", - "--allow-staged", - "--allow-dirty", - "--all-targets", - "--all-features", - ], - )?; - run(&cargo(), &["fmt", "--all"])?; - - // cargo-shear - run(&cargo(), &["shear", "--fix"])?; - - // cargo-sort - run(&cargo(), &["sort", "--workspace"])?; - - // tofu fixes (if available) - if command_exists("tofu") { - let root = project_root(); - run_in_dir("tofu", &["fmt", "-recursive"], &root.join("cdn"))?; - } - - Ok(()) + // JS fixes + run("bun", &["install"])?; + run("bun", &["biome", "check", "--write"])?; + + // Rust fixes + run( + &cargo(), + &[ + "clippy", + "--fix", + "--allow-staged", + "--allow-dirty", + "--all-targets", + "--all-features", + ], + )?; + run(&cargo(), &["fmt", "--all"])?; + + // cargo-shear + run(&cargo(), &["shear", "--fix"])?; + + // cargo-sort + run(&cargo(), &["sort", "--workspace"])?; + + // tofu fixes (if available) + if command_exists("tofu") { + let root = project_root(); + run_in_dir("tofu", &["fmt", "-recursive"], &root.join("cdn"))?; + } + + Ok(()) } fn build() -> Result<(), String> { - run("bun", &["run", "--filter=*", "build"])?; - run(&cargo(), &["build"]) + run("bun", &["run", "--filter=*", "build"])?; + run(&cargo(), &["build"]) } fn update() -> Result<(), String> { - run("bun", &["update"])?; - run("bun", &["outdated"])?; + run("bun", &["update"])?; + run("bun", &["outdated"])?; - // Update patch versions - run(&cargo(), &["update"])?; + // Update patch versions + run(&cargo(), &["update"])?; - // Update incompatible versions - run(&cargo(), &["upgrade", "--incompatible"])?; + // Update incompatible versions + run(&cargo(), &["upgrade", "--incompatible"])?; - // Update nix flake - run("nix", &["flake", "update"]) + // Update nix flake + run("nix", &["flake", "update"]) } fn console_relay() -> Result<(), String> { - run("tokio-console", &["http://127.0.0.1:6680"]) + run("tokio-console", &["http://127.0.0.1:6680"]) } fn console_pub() -> Result<(), String> { - run("tokio-console", &["http://127.0.0.1:6681"]) + run("tokio-console", &["http://127.0.0.1:6681"]) } fn doc() -> Result<(), String> { - let root = project_root(); - run_in_dir("bun", &["run", "dev"], &root.join("doc")) + let root = project_root(); + run_in_dir("bun", &["run", "dev"], &root.join("doc")) } fn throttle() -> Result<(), String> { - run("dev/throttle", &[]) + run("dev/throttle", &[]) } From 6e304add06768337e831c376d690d858f4c124b0 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Thu, 5 Feb 2026 12:01:48 -0800 Subject: [PATCH 4/5] Update docs. --- .github/workflows/check.yml | 2 +- CLAUDE.md | 18 +- README.md | 35 ++- doc/app/cli.md | 8 +- doc/index.md | 2 +- doc/js/@moq/hang-demo.md | 2 +- doc/setup/dev.md | 46 ++-- doc/setup/index.md | 13 +- flake.nix | 1 - justfile | 480 ------------------------------------ 10 files changed, 62 insertions(+), 545 deletions(-) delete mode 100644 justfile diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 291aaec53..27cd4a555 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -28,4 +28,4 @@ jobs: cache-on-failure: true # Run checks with cached dependencies - - run: nix develop --command just check-all + - run: nix develop --command ./x check all diff --git a/CLAUDE.md b/CLAUDE.md index d21dd42ce..1edbbd4b5 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -10,9 +10,9 @@ MoQ (Media over QUIC) is a next-generation live media delivery protocol providin ```bash # Code quality and testing -just check # Run all tests and linting -just fix # Auto-fix linting issues -just build # Build all packages +./x check # Run all tests and linting +./x fix # Auto-fix linting issues +./x build # Build all packages ``` ## Architecture @@ -53,26 +53,26 @@ Key architectural rule: The CDN/relay does not know anything about media. Anythi ## Development Tips -1. The project uses `just` as the task runner - check `justfile` for all available commands +1. The project uses `./x` (or `cargo x`) as the task runner - check `rs/x/src/main.rs` for all available commands 2. For Rust development, the workspace is configured in the `rs/Cargo.toml` 3. For JS/TS development, bun workspaces are used with configuration in `js/package.json` ## Tooling - **TypeScript**: Always use `bun` for all package management and script execution (not npm, yarn, or pnpm) -- **Common**: Use `just` for common development tasks +- **Common**: Use `./x` (or `cargo x`) for common development tasks - **Rust**: Use `cargo` for Rust-specific operations ## Testing Approach -- Run `just check` to execute all tests and linting. -- Run `just fix` to automatically fix formating and easy things. +- Run `./x check` to execute all tests and linting. +- Run `./x fix` to automatically fix formating and easy things. - Rust tests are integrated within source files ## Workflow When making changes to the codebase: 1. Make your code changes -2. Run `just fix` to auto-format and fix linting issues -3. Run `just check` to verify everything passes +2. Run `./x fix` to auto-format and fix linting issues +3. Run `./x check` to verify everything passes 4. Commit and push changes diff --git a/README.md b/README.md index ab56f86a8..e98ca2b91 100644 --- a/README.md +++ b/README.md @@ -38,20 +38,19 @@ In production, you'll need a proper domain and a matching TLS certificate via [L ```sh # Runs a relay, demo media, and the web server -nix develop -c just dev +nix develop -c ./x dev ``` Then visit [https://localhost:8080](https://localhost:8080) to see the demo. Note that this uses an insecure HTTP fetch for local development only; in production you'll need a proper domain + TLS certificate. -*TIP:* If you've installed [nix-direnv](https://github.com/nix-community/nix-direnv), then only `just dev` is required. +*TIP:* If you've installed [nix-direnv](https://github.com/nix-community/nix-direnv), then only `./x dev` is required. ### Full Setup If you don't like Nix, then you can install dependencies manually: **Requirements:** -- [Just](https://github.com/casey/just) - [Rust](https://www.rust-lang.org/tools/install) - [Bun](https://bun.sh/) - [FFmpeg](https://ffmpeg.org/download.html) @@ -60,10 +59,10 @@ If you don't like Nix, then you can install dependencies manually: **Run it:** ```sh # Install some more dependencies -just install +./x install # Runs a relay, demo media, and the web server -just dev +./x dev ``` Then visit [http://localhost:5173](http://localhost:5173) to see the demo. @@ -147,24 +146,24 @@ Read the specifications: ## Development ```sh # See all available commands -just +./x --help # Build everything -just build +./x build # Run tests and linting -just check +./x check # Automatically fix some linting errors -just fix +./x fix # Run the demo manually -just relay # Terminal 1: Start relay server -just pub tos # Terminal 2: Publish a demo video using ffmpeg -just web # Terminal 3: Start web server +./x relay # Terminal 1: Start relay server +./x pub fmp4 tos # Terminal 2: Publish a demo video using ffmpeg +./x web # Terminal 3: Start web server ``` -There are more commands: check out the [justfile](justfile), [rs/justfile](rs/justfile), and [js/justfile](js/justfile). +The task runner is implemented in [rs/x](rs/x) and can be invoked via `./x` or `cargo x`. ## Iroh support @@ -183,7 +182,7 @@ You can run a demo like this: ```sh # Terminal 1: Start a relay server -just relay --iroh-enabled +./x relay --iroh-enabled # Copy the endpoint id printed at "iroh listening" # Terminal 2: Publish via moq-lite over raw iroh QUIC @@ -193,18 +192,18 @@ just relay --iroh-enabled # We set an `anon/` prefix to match the broadcast name the web ui expects # Because moq-lite does not have headers if using raw QUIC, only the hostname # in the URL can be used. -just pub-iroh bbb iroh://ENDPOINT_ID anon/ +./x pub iroh bbb iroh://ENDPOINT_ID anon/ # Alternatively you can use WebTransport over HTTP/3 over iroh, # which allows to set a path prefix in the URL: -just pub-iroh bbb h3+iroh://ENDPOINT_ID/anon +./x pub iroh bbb h3+iroh://ENDPOINT_ID/anon # Terminal 3: Start web server -just web +./x web ``` Then open [localhost:5173](http://localhost:5173) and watch BBB, pushed from terminal 1 via iroh to the relay running in terminal 2, from where the browser fetches it over regular WebTransport. -`just serve` serves a video via iroh alongside regular QUIC (it enables the `iroh` feature). This repo currently does not provide a native viewer, so you can't subscribe to it directly. However, you can use the [watch example from iroh-live](https://github.com/n0-computer/iroh-live/blob/main/iroh-live/examples/watch.rs) to view a video published via `moq-native`. +`./x serve fmp4` serves a video via iroh alongside regular QUIC (it enables the `iroh` feature). This repo currently does not provide a native viewer, so you can't subscribe to it directly. However, you can use the [watch example from iroh-live](https://github.com/n0-computer/iroh-live/blob/main/iroh-live/examples/watch.rs) to view a video published via `moq-native`. ## License diff --git a/doc/app/cli.md b/doc/app/cli.md index b0d9a6f81..6b61c4465 100644 --- a/doc/app/cli.md +++ b/doc/app/cli.md @@ -114,10 +114,10 @@ The repository includes helper commands for test content: ```bash # Publish Big Buck Bunny -just pub bbb https://relay.example.com/anon +./x pub fmp4 bbb https://relay.example.com/anon # Publish Tears of Steel -just pub tos https://relay.example.com/anon +./x pub fmp4 tos https://relay.example.com/anon ``` ## Clock Synchronization @@ -126,10 +126,10 @@ Publish and subscribe to clock broadcasts for testing: ```bash # Publish a clock -just clock publish https://relay.example.com/anon +./x clock publish https://relay.example.com/anon # Subscribe to a clock -just clock subscribe https://relay.example.com/anon +./x clock subscribe https://relay.example.com/anon ``` ## Debugging diff --git a/doc/index.md b/doc/index.md index 71803b57a..a229b6f66 100644 --- a/doc/index.md +++ b/doc/index.md @@ -86,7 +86,7 @@ Get up and running in seconds with [Nix](https://nixos.org/download.html), or be ```bash # Runs a relay, media publisher, and the web server -nix develop -c just dev +nix develop -c ./x dev ``` If everything works, a browser window will pop up demoing how to both publish and watch content via the web. diff --git a/doc/js/@moq/hang-demo.md b/doc/js/@moq/hang-demo.md index 7a1e1c682..fc0d2936b 100644 --- a/doc/js/@moq/hang-demo.md +++ b/doc/js/@moq/hang-demo.md @@ -12,7 +12,7 @@ Follow the [Quick Start](/setup/) guide to get started. You can target a remote relay instead of a local one with the command: ```bash -just web https://cdn.moq.dev/anon +./x web https://cdn.moq.dev/anon ``` ## Watch Demo diff --git a/doc/setup/dev.md b/doc/setup/dev.md index 2cb85cc7f..c90ce01ef 100644 --- a/doc/setup/dev.md +++ b/doc/setup/dev.md @@ -8,37 +8,37 @@ Still here? You must be a Big Buck Bunny fan. This guide covers the rest of the stuff you can run locally. -## Just -We use [Just](https://github.com/casey/just) to run helper commands. -It's *just* a fancier `Makefile` so you don't have to remember all the commands. +## Task Runner +We use a Rust-based task runner ([rs/x](https://github.com/moq-dev/moq/blob/main/rs/x)) to run helper commands. +Invoke it via `./x` or `cargo x`. ### Common Commands ```bash # List all available commands -just +./x --help # Run the demo -just dev +./x dev # This is equivalent to 3 terminal tabs: -# just relay -# just web -# just pub bbb +# ./x relay +# ./x web +# ./x pub fmp4 bbb # Make sure the code compiles and passes linting -just check +./x check # Auto-fix linting errors -just fix +./x fix # Run the tests -just test +./x test # Publish a HLS broadcast (CMAF) over MoQ -just pub-hls tos +./x pub hls tos ``` -Want more? See the [justfile](https://github.com/moq-dev/moq/blob/main/justfile) for all commands. +Want more? Run `./x --help` or see [rs/x/src/main.rs](https://github.com/moq-dev/moq/blob/main/rs/x/src/main.rs) for all commands. ### The Internet Most of the commands default to `http://localhost:4443/anon`. @@ -56,19 +56,19 @@ Anything you publish is public and discoverable... so be careful and don't abuse ```bash # Run the web server, pointing to the public relay # NOTE: The `bbb` demo on moq.dev uses a different path so it won't show up. -just web https://cdn.moq.dev/anon +./x web https://cdn.moq.dev/anon # Publish Tears of Steel, watch it via https://moq.dev/watch?name=tos -just pub tos https://cdn.moq.dev/anon +./x pub fmp4 tos https://cdn.moq.dev/anon # Publish a clock broadcast -just clock publish https://cdn.moq.dev/anon +./x clock publish https://cdn.moq.dev/anon # Subscribe to said clock broadcast (different tab) -just clock subscribe https://cdn.moq.dev/anon +./x clock subscribe https://cdn.moq.dev/anon # Publish an authentication broadcast -just pub av1 https://cdn.moq.dev/?jwt=not_a_real_token_ask_for_one +./x pub fmp4 av1 https://cdn.moq.dev/?jwt=not_a_real_token_ask_for_one ``` ## Debugging @@ -78,14 +78,14 @@ You can set the logging level with the `RUST_LOG` environment variable. ```bash # Print the most verbose logs -RUST_LOG=trace just dev +RUST_LOG=trace ./x dev ``` If you're getting a panic, use `RUST_BACKTRACE=1` to get a backtrace. ```bash # Print a backtrace on panic. -RUST_BACKTRACE=1 just dev +RUST_BACKTRACE=1 ./x dev ``` @@ -103,15 +103,15 @@ Recommended extensions: ## Contributing -Run `just fix` before pushing your changes, otherwise CI will yell at you. -It runs `just check` so that's the easiest way to debug any issues. +Run `./x fix` before pushing your changes, otherwise CI will yell at you. +It runs `./x check` so that's the easiest way to debug any issues. Please don't submit a vibe coded PR unless you understand it. `You're absolutely right!` is not always good enough. ## Onwards -`just dev` runs three processes that normally, should run on separate hosts. +`./x dev` runs three processes that normally, should run on separate hosts. Learn how to run them [in production](/setup/prod). Or take a detour and: diff --git a/doc/setup/index.md b/doc/setup/index.md index ec33788d8..a39e83065 100644 --- a/doc/setup/index.md +++ b/doc/setup/index.md @@ -29,13 +29,13 @@ Install the following: Then run the demo: ```bash # Runs the demo using pinned dependencies -nix develop -c just dev +nix develop -c ./x dev ``` If you install `direnv`, then the Nix shell will be loaded whenever you `cd` into the repo: ```bash -# Run the demo... in 9 keystrokes -just dev +# Run the demo... in 7 keystrokes +./x dev ``` @@ -43,7 +43,6 @@ just dev If you don't like Nix or enjoy suffering with Windows, then you can manually install the dependencies: -- [Just](https://github.com/casey/just) - [Rust](https://www.rust-lang.org/tools/install) - [Bun](https://bun.sh/) - [FFmpeg](https://ffmpeg.org/download.html) @@ -52,16 +51,16 @@ If you don't like Nix or enjoy suffering with Windows, then you can manually ins Then run: ```bash # Install additional dependencies, usually linters -just install +./x install # Run the demo -just dev +./x dev ``` When in doubt, check the [Nix Flake](https://github.com/moq-dev/moq/blob/main/flake.nix) for the full list of dependencies. ## What's Happening? -The `just dev` command starts three components: +The `./x dev` command starts three components: - [moq-relay](/app/relay/): A server that routes live data between publishers and subscribers. - [moq-cli](/app/cli): A CLI that publishes video content piped from `ffmpeg`. diff --git a/flake.nix b/flake.nix index fa389c7df..0aefbbd8f 100644 --- a/flake.nix +++ b/flake.nix @@ -49,7 +49,6 @@ # Rust dependencies rustDeps = with pkgs; [ rust-toolchain - just pkg-config glib libressl diff --git a/justfile b/justfile deleted file mode 100644 index 94dbfb821..000000000 --- a/justfile +++ /dev/null @@ -1,480 +0,0 @@ -#!/usr/bin/env just --justfile - -# Using Just: https://github.com/casey/just?tab=readme-ov-file#installation - -set quiet - -# List all of the available commands. -default: - just --list - -# Install any dependencies. -install: - bun install - cargo install --locked cargo-shear cargo-sort cargo-upgrades cargo-edit cargo-hack - -# Alias for dev. -all: dev - -# Run the relay, web server, and publish bbb. -dev: - # Install any JS dependencies. - bun install - - # Build the rust packages so `cargo run` has a head start. - cargo build - - # Then run the relay with a slight head start. - # It doesn't matter if the web beats BBB because we support automatic reloading. - bun run concurrently --kill-others --names srv,bbb,web --prefix-colors auto \ - "just relay" \ - "sleep 1 && just pub bbb http://localhost:4443/anon" \ - "sleep 2 && just web http://localhost:4443/anon" - - -# Run a localhost relay server without authentication. -relay *args: - # Run the relay server overriding the provided configuration file. - TOKIO_CONSOLE_BIND=127.0.0.1:6680 cargo run --bin moq-relay -- dev/relay.toml {{args}} - -# Run a cluster of relay servers -cluster: - # Install any JS dependencies. - bun install - - # Generate auth tokens if needed - @just auth-token - - # Build the Rust packages so `cargo run` has a head start. - cargo build --bin moq-relay - - # Then run a BOATLOAD of services to make sure they all work correctly. - # Publish the funny bunny to the root node. - # Publish the robot fanfic to the leaf node. - bun run concurrently --kill-others --names root,leaf,bbb,tos,web --prefix-colors auto \ - "just root" \ - "sleep 1 && just leaf" \ - "sleep 2 && just pub bbb http://localhost:4444/demo?jwt=$(cat dev/demo-cli.jwt)" \ - "sleep 3 && just pub tos http://localhost:4443/demo?jwt=$(cat dev/demo-cli.jwt)" \ - "sleep 4 && just web http://localhost:4443/demo?jwt=$(cat dev/demo-web.jwt)" - -# Run a localhost root server, accepting connections from leaf nodes. -root: auth-key - # Run the root server with a special configuration file. - cargo run --bin moq-relay -- dev/root.toml - -# Run a localhost leaf server, connecting to the root server. -leaf: auth-token - # Run the leaf server with a special configuration file. - cargo run --bin moq-relay -- dev/leaf.toml - -# Generate a random secret key for authentication. -# By default, this uses HMAC-SHA256, so it's symmetric. -# If some one wants to contribute, public/private key pairs would be nice. -auth-key: - @if [ ! -f "dev/root.jwk" ]; then \ - rm -f dev/*.jwt; \ - cargo run --bin moq-token -- --key "dev/root.jwk" generate; \ - fi - -# Generate authentication tokens for local development -# demo-web.jwt - allows publishing to demo/me/* and subscribing to demo/* -# demo-cli.jwt - allows publishing to demo/* but no subscribing -# root.jwt - allows publishing and subscribing to all paths -auth-token: auth-key - @if [ ! -f "dev/demo-web.jwt" ]; then \ - cargo run --quiet --bin moq-token -- --key "dev/root.jwk" sign \ - --root "demo" \ - --subscribe "" \ - --publish "me" \ - > dev/demo-web.jwt ; \ - fi - - @if [ ! -f "dev/demo-cli.jwt" ]; then \ - cargo run --quiet --bin moq-token -- --key "dev/root.jwk" sign \ - --root "demo" \ - --publish "" \ - > dev/demo-cli.jwt ; \ - fi - - @if [ ! -f "dev/root.jwt" ]; then \ - cargo run --quiet --bin moq-token -- --key "dev/root.jwk" sign \ - --root "" \ - --subscribe "" \ - --publish "" \ - --cluster \ - > dev/root.jwt ; \ - fi - -# Download the video and convert it to a fragmented MP4 that we can stream -download name: - @if [ ! -f "dev/{{name}}.mp4" ]; then \ - curl -fsSL $(just download-url {{name}}) -o "dev/{{name}}.mp4"; \ - fi - - @if [ ! -f "dev/{{name}}.fmp4" ]; then \ - ffmpeg -loglevel error -i "dev/{{name}}.mp4" \ - -c:v copy \ - -f mp4 -movflags cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame \ - "dev/{{name}}.fmp4"; \ - fi - -# Returns the URL for a test video. -download-url name: - @case {{name}} in \ - bbb) echo "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4" ;; \ - tos) echo "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/TearsOfSteel.mp4" ;; \ - av1) echo "http://download.opencontent.netflix.com.s3.amazonaws.com/AV1/Sparks/Sparks-5994fps-AV1-10bit-1920x1080-2194kbps.mp4" ;; \ - hevc) echo "https://test-videos.co.uk/vids/jellyfish/mp4/h265/1080/Jellyfish_1080_10s_30MB.mp4" ;; \ - *) echo "unknown" && exit 1 ;; \ - esac - -# Convert an h264 input file to CMAF (fmp4) format to stdout. -ffmpeg-cmaf input output='-' *args: - ffmpeg -hide_banner -v quiet \ - -stream_loop -1 -re \ - -i "{{input}}" \ - -c copy \ - -f mp4 -movflags cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame {{args}} {{output}} - -# Publish a video using ffmpeg to the localhost relay server -# NOTE: The `http` means that we perform insecure certificate verification. -# Switch it to `https` when you're ready to use a real certificate. -pub name url="http://localhost:4443/anon" *args: - # Download the sample media. - just download "{{name}}" - # Pre-build the binary so we don't queue media while compiling. - cargo build --bin moq - # Publish the media with the moq cli. - just ffmpeg-cmaf "dev/{{name}}.fmp4" |\ - cargo run --bin moq -- \ - {{args}} publish --url "{{url}}" --name "{{name}}" fmp4 - -pub-iroh name url prefix="": - # Download the sample media. - just download "{{name}}" - # Pre-build the binary so we don't queue media while compiling. - cargo build --bin moq - # Publish the media with the moq cli. - just ffmpeg-cmaf "dev/{{name}}.fmp4" |\ - cargo run --bin moq -- \ - --iroh-enabled publish --url "{{url}}" --name "{{prefix}}{{name}}" fmp4 - -# Generate and ingest an HLS stream from a video file. -pub-hls name relay="http://localhost:4443/anon": - #!/usr/bin/env bash - set -euo pipefail - - just download "{{name}}" - - INPUT="dev/{{name}}.mp4" - OUT_DIR="dev/{{name}}" - - rm -rf "$OUT_DIR" - mkdir -p "$OUT_DIR" - - echo ">>> Generating HLS stream to disk (1280x720 + 256x144)..." - - # Start ffmpeg in the background to generate HLS - ffmpeg -hide_banner -loglevel warning -re -stream_loop -1 -i "$INPUT" \ - -filter_complex "\ - [0:v]split=2[v0][v1]; \ - [v0]scale=-2:720[v720]; \ - [v1]scale=-2:144[v144]" \ - -map "[v720]" -map "[v144]" -map 0:a:0 \ - -r 25 -preset veryfast -g 50 -keyint_min 50 -sc_threshold 0 \ - -c:v:0 libx264 -profile:v:0 high -level:v:0 4.1 -pix_fmt:v:0 yuv420p -tag:v:0 avc1 \ - -b:v:0 4M -maxrate:v:0 4.4M -bufsize:v:0 8M \ - -c:v:1 libx264 -profile:v:1 high -level:v:1 4.1 -pix_fmt:v:1 yuv420p -tag:v:1 avc1 \ - -b:v:1 300k -maxrate:v:1 330k -bufsize:v:1 600k \ - -c:a aac -b:a 128k \ - -f hls -hls_time 2 -hls_list_size 6 \ - -hls_flags independent_segments+delete_segments \ - -hls_segment_type fmp4 \ - -master_pl_name master.m3u8 \ - -var_stream_map "v:0,agroup:audio,name:720 v:1,agroup:audio,name:144 a:0,agroup:audio,name:audio" \ - -hls_segment_filename "$OUT_DIR/v%v/segment_%09d.m4s" \ - "$OUT_DIR/v%v/stream.m3u8" & - - - FFMPEG_PID=$! - - # Wait for master playlist to be generated - echo ">>> Waiting for HLS playlist generation..." - for i in {1..30}; do - if [ -f "$OUT_DIR/master.m3u8" ]; then - break - fi - sleep 0.5 - done - - if [ ! -f "$OUT_DIR/master.m3u8" ]; then - kill $FFMPEG_PID 2>/dev/null || true - echo "Error: master.m3u8 not generated in time" - exit 1 - fi - - # Wait for individual playlists to be generated (they're referenced in master.m3u8) - # Give ffmpeg a bit more time to generate the variant playlists - echo ">>> Waiting for variant playlists..." - sleep 2 - for i in {1..20}; do - # Check if at least one variant playlist exists - if [ -f "$OUT_DIR/v0/stream.m3u8" ] || [ -f "$OUT_DIR/v720/stream.m3u8" ] || [ -f "$OUT_DIR/v144/stream.m3u8" ] || [ -f "$OUT_DIR/vaudio/stream.m3u8" ]; then - break - fi - sleep 0.5 - done - - # Trap to clean up ffmpeg on exit - CLEANUP_CALLED=false - cleanup() { - if [ "$CLEANUP_CALLED" = "true" ]; then - return - fi - CLEANUP_CALLED=true - echo "Shutting down..." - kill $FFMPEG_PID 2>/dev/null || true - # Wait a bit for ffmpeg to finish - sleep 0.5 - # Force kill if still running - kill -9 $FFMPEG_PID 2>/dev/null || true - } - trap cleanup SIGINT SIGTERM EXIT - - # Run moq to ingest from local files - echo ">>> Running with --passthrough flag" - cargo run --bin moq -- publish --url "{{relay}}" --name "{{name}}" hls --playlist "$OUT_DIR/master.m3u8" --passthrough - EXIT_CODE=$? - - # Cleanup after cargo run completes (success or failure) - cleanup - - # Exit with the same code as cargo run - exit $EXIT_CODE - -# Publish a video using H.264 Annex B format to the localhost relay server -pub-h264 name url="http://localhost:4443/anon" *args: - # Download the sample media. - just download "{{name}}" - - # Pre-build the binary so we don't queue media while compiling. - cargo build --bin moq - - # Run ffmpeg and pipe H.264 Annex B output to moq - ffmpeg -hide_banner -v quiet \ - -stream_loop -1 -re \ - -i "dev/{{name}}.fmp4" \ - -c:v copy -an \ - -bsf:v h264_mp4toannexb \ - -f h264 \ - - | cargo run --bin moq -- publish --url "{{url}}" --name "{{name}}" --format annex-b {{args}} - -# Publish/subscribe using gstreamer - see https://github.com/moq-dev/gstreamer -pub-gst name url='http://localhost:4443/anon': - @echo "GStreamer plugin has moved to: https://github.com/moq-dev/gstreamer" - @echo "Install and use hang-gst directly for GStreamer functionality" - -# Subscribe to a video using gstreamer - see https://github.com/moq-dev/gstreamer -sub name url='http://localhost:4443/anon': - @echo "GStreamer plugin has moved to: https://github.com/moq-dev/gstreamer" - @echo "Install and use hang-gst directly for GStreamer functionality" - -# Publish a video using ffmpeg directly from moq to the localhost -# To also serve via iroh, pass --iroh-enabled as last argument. -serve name *args: - # Download the sample media. - just download "{{name}}" - - # Pre-build the binary so we don't queue media while compiling. - cargo build --bin moq - - # Run ffmpeg and pipe the output to moq - just ffmpeg-cmaf "dev/{{name}}.fmp4" |\ - cargo run --bin moq -- \ - {{args}} serve --listen "[::]:4443" --tls-generate "localhost" \ - --name "{{name}}" fmp4 - -# Run the web server -web url='http://localhost:4443/anon': - cd js/hang-demo && VITE_RELAY_URL="{{url}}" bun run dev - -# Publish the clock broadcast -# `action` is either `publish` or `subscribe` -clock action url="http://localhost:4443/anon" *args: - @if [ "{{action}}" != "publish" ] && [ "{{action}}" != "subscribe" ]; then \ - echo "Error: action must be 'publish' or 'subscribe', got '{{action}}'" >&2; \ - exit 1; \ - fi - - cargo run --bin moq-clock -- --url "{{url}}" --broadcast "clock" {{args}} {{action}} - -# Run the CI checks -check: - #!/usr/bin/env bash - set -euo pipefail - - # Run the Javascript checks. - bun install --frozen-lockfile - if tty -s; then - bun run --filter='*' --elide-lines=0 check - else - bun run --filter='*' check - fi - bun biome check - - # Run the (slower) Rust checks. - cargo check --all-targets --all-features - cargo clippy --all-targets --all-features -- -D warnings - cargo fmt --all --check - - # Check documentation warnings (only workspace crates, not dependencies) - RUSTDOCFLAGS="-D warnings" cargo doc --no-deps --workspace - - # requires: cargo install cargo-shear - cargo shear - - # requires: cargo install cargo-sort - cargo sort --workspace --check - - # Only run the tofu checks if tofu is installed. - if command -v tofu &> /dev/null; then (cd cdn && just check); fi - - # Only run the nix checks if nix is installed. - if command -v nix &> /dev/null; then nix flake check; fi - -# Run comprehensive CI checks including all feature combinations (requires cargo-hack) -check-all: - #!/usr/bin/env bash - set -euo pipefail - - # Run the standard checks first - just check - - # Check all feature combinations for the hang crate - # requires: cargo install cargo-hack - echo "Checking all feature combinations for hang..." - cargo hack check --package hang --each-feature --no-dev-deps - -# Run the unit tests -test: - #!/usr/bin/env bash - set -euo pipefail - - # Run the Javascript tests. - bun install --frozen-lockfile - if tty -s; then - bun run --filter='*' --elide-lines=0 test - else - bun run --filter='*' test - fi - - cargo test --all-targets --all-features - -# Run comprehensive tests including all feature combinations (requires cargo-hack) -test-all: - #!/usr/bin/env bash - set -euo pipefail - - # Run the standard tests first - just test - - # Test all feature combinations for the hang crate - # requires: cargo install cargo-hack - echo "Testing all feature combinations for hang..." - cargo hack test --package hang --each-feature - -# Automatically fix some issues. -fix: - # Fix the Javascript dependencies. - bun install - bun biome check --write - - # Fix the Rust issues. - cargo clippy --fix --allow-staged --allow-dirty --all-targets --all-features - cargo fmt --all - - # requires: cargo install cargo-shear - cargo shear --fix - - # requires: cargo install cargo-sort - cargo sort --workspace - - if command -v tofu &> /dev/null; then (cd cdn && just fix); fi - -# Upgrade any tooling -update: - bun update - bun outdated - - # Update any patch versions - cargo update - - # Requires: cargo install cargo-upgrades cargo-edit - cargo upgrade --incompatible - - # Update the Nix flake. - nix flake update - -# Build the packages -build: - bun run --filter='*' build - cargo build - -# Generate and serve an HLS stream from a video for testing pub-hls -serve-hls name port="8000": - #!/usr/bin/env bash - set -euo pipefail - - just download "{{name}}" - - INPUT="dev/{{name}}.mp4" - OUT_DIR="dev/{{name}}" - - rm -rf "$OUT_DIR" - mkdir -p "$OUT_DIR" - - echo ">>> Starting HLS stream generation..." - echo ">>> Master playlist: http://localhost:{{port}}/master.m3u8" - - cleanup() { - echo "Shutting down..." - kill $(jobs -p) 2>/dev/null || true - exit 0 - } - trap cleanup SIGINT SIGTERM - - ffmpeg -loglevel warning -re -stream_loop -1 -i "$INPUT" \ - -map 0:v:0 -map 0:v:0 -map 0:a:0 \ - -r 25 -preset veryfast -g 50 -keyint_min 50 -sc_threshold 0 \ - -c:v:0 libx264 -profile:v:0 high -level:v:0 4.1 -pix_fmt:v:0 yuv420p -tag:v:0 avc1 -bsf:v:0 dump_extra -b:v:0 4M -vf:0 "scale=1920:-2" \ - -c:v:1 libx264 -profile:v:1 high -level:v:1 4.1 -pix_fmt:v:1 yuv420p -tag:v:1 avc1 -bsf:v:1 dump_extra -b:v:1 300k -vf:1 "scale=256:-2" \ - -c:a aac -b:a 128k \ - -f hls \ - -hls_time 2 -hls_list_size 12 \ - -hls_flags independent_segments+delete_segments \ - -hls_segment_type fmp4 \ - -master_pl_name master.m3u8 \ - -var_stream_map "v:0,agroup:audio v:1,agroup:audio a:0,agroup:audio" \ - -hls_segment_filename "$OUT_DIR/v%v/segment_%09d.m4s" \ - "$OUT_DIR/v%v/stream.m3u8" & - - sleep 2 - echo ">>> HTTP server: http://localhost:{{port}}/" - cd "$OUT_DIR" && python3 -m http.server {{port}} - -# Connect tokio-console to the relay server (port 6680) -relay-console: - tokio-console http://127.0.0.1:6680 - -# Connect tokio-console to the publisher (port 6681) -pub-console: - tokio-console http://127.0.0.1:6681 - -# Serve the documentation locally. -doc: - cd doc && bun run dev - -# Throttle UDP traffic for testing (macOS only, requires sudo) -throttle: - dev/throttle From f3ef5c61836bfb45082116feb9bb5fdc9aa84cf1 Mon Sep 17 00:00:00 2001 From: Luke Curley Date: Thu, 5 Feb 2026 12:03:43 -0800 Subject: [PATCH 5/5] Add a dummy justfile. --- justfile | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 justfile diff --git a/justfile b/justfile new file mode 100644 index 000000000..e68d25de7 --- /dev/null +++ b/justfile @@ -0,0 +1,7 @@ +# DEPRECATED: Use `./x` or `cargo x` instead. +# This justfile forwards all commands to the new task runner. + +[positional-arguments] +[no-cd] +@_default *args: + cargo x {{args}}