diff --git a/README.md b/README.md index 97839cb7..522f3270 100644 --- a/README.md +++ b/README.md @@ -670,6 +670,108 @@ asyncio.run(stream()) +
+Running on macOS — Live WiFi sensing with CoreWLAN (no ESP32 needed) + +macOS can capture real RSSI, noise floor, and TX rate from your Mac's WiFi hardware via CoreWLAN. A Swift helper + Python bridge repackages these readings as ESP32-format CSI frames, feeding them to the sensing server over UDP. + +### Prerequisites + +- **macOS** 10.15+ (Catalina or later) +- **Rust** 1.70+ (`curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh`) +- **Python 3** (pre-installed on macOS) +- **Xcode Command Line Tools** (`xcode-select --install`) +- **WiFi Location Services** must be enabled (System Settings → Privacy & Security → Location Services → enable for Terminal) + +### Step 1: Build the Rust sensing server + +```bash +cd rust-port/wifi-densepose-rs +cargo build --release -p wifi-densepose-sensing-server +``` + +### Step 2: Compile the macOS WiFi helper + +```bash +swiftc -O -framework CoreWLAN -framework Foundation \ + v1/src/sensing/mac_wifi.swift -o mac_wifi +``` + +This produces a `mac_wifi` binary that reads RSSI, noise, and TX rate from your Mac's WiFi interface at ~100 Hz and outputs JSON lines. + +### Step 3: Start the sensing server + +```bash +cd rust-port/wifi-densepose-rs +cargo run --release -p wifi-densepose-sensing-server +``` + +The server starts in **auto** mode: it listens on UDP port 5005 for live data and falls back to simulation if nothing arrives within 2 seconds. Once running you'll see: + +``` +HTTP server listening on 0.0.0.0:8080 +WebSocket server listening on 0.0.0.0:8765 +UDP listening on 0.0.0.0:5005 for ESP32 CSI frames +``` + +### Step 4: Start the WiFi bridge (separate terminal) + +```bash +python3 scripts/macos_wifi_bridge.py --mac-wifi ./mac_wifi --port 5005 +``` + +The bridge captures live WiFi readings and sends them to the sensing server. You should see: + +``` +[bridge] Starting mac_wifi helper: ./mac_wifi +[bridge] Sending ESP32 frames to 127.0.0.1:5005 +[bridge] # 1 RSSI= -30 dBm noise= -72 dBm tx_rate= 144.0 Mbps frame=132 bytes +``` + +The sensing server **hot-plugs** automatically — it switches from simulation to live data as soon as UDP frames arrive. + +### Step 5: Open the UI + +Open http://localhost:8080/ui/index.html in your browser. + +### One-command startup + +To start everything at once from the repo root: + +```bash +bash run_all.sh & +sleep 10 # wait for build + server startup +python3 scripts/macos_wifi_bridge.py --mac-wifi ./mac_wifi --port 5005 +``` + +### What macOS WiFi provides vs ESP32 + +| Capability | macOS CoreWLAN | ESP32-S3 CSI | +|------------|---------------|--------------| +| RSSI (signal strength) | Yes | Yes | +| Noise floor | Yes | Yes | +| TX rate | Yes | Yes | +| Per-subcarrier amplitude | No | Yes (56-192 subcarriers) | +| Per-subcarrier phase | No | Yes | +| Pose estimation | No (RSSI-only) | Yes (full CSI) | +| Breathing detection | Coarse (RSSI variance) | Yes (sub-Hz phase) | +| Heart rate | No | Yes (micro-Doppler) | +| Presence / motion | Yes | Yes | + +macOS provides RSSI-level sensing — good for presence detection, coarse motion, and environment monitoring. For full pose estimation and vital signs, ESP32-S3 hardware is required. + +### Troubleshooting + +| Issue | Fix | +|-------|-----| +| `No WiFi interface found` | Enable WiFi; grant Location Services permission to Terminal | +| Server stays in simulation mode | Start the bridge *after* the server is listening on port 5005 | +| `Permission denied` on `mac_wifi` | Run `chmod +x mac_wifi` | +| Bridge shows no output | Ensure `mac_wifi` binary exists and is compiled for your architecture (`file mac_wifi`) | +| Port 8080 already in use | Kill previous server: `lsof -ti:8080 \| xargs kill` | + +
+ --- ## 📋 Table of Contents diff --git a/mac_wifi b/mac_wifi new file mode 100755 index 00000000..22cddf6c Binary files /dev/null and b/mac_wifi differ diff --git a/run_all.sh b/run_all.sh new file mode 100755 index 00000000..f50d06c2 --- /dev/null +++ b/run_all.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -e + +# Build the sensing server (wifi-densepose-api is a lib stub, no binary) +cd rust-port/wifi-densepose-rs +cargo build --release -p wifi-densepose-sensing-server + +# Start sensing server (serves REST + WebSocket + UI) +cargo run --release -p wifi-densepose-sensing-server & +SENSE_PID=$! + +echo "Sensing server PID=$SENSE_PID" +echo "Open: http://localhost:8080/ui/index.html" + +# Keep shell alive +wait $SENSE_PID diff --git a/rust-port/wifi-densepose-rs/crates/wifi-densepose-sensing-server/src/main.rs b/rust-port/wifi-densepose-rs/crates/wifi-densepose-sensing-server/src/main.rs index 3da1a23e..29a1532e 100644 --- a/rust-port/wifi-densepose-rs/crates/wifi-densepose-sensing-server/src/main.rs +++ b/rust-port/wifi-densepose-rs/crates/wifi-densepose-sensing-server/src/main.rs @@ -324,6 +324,9 @@ struct AppStateInner { training_status: String, /// Training configuration, if any. training_config: Option, + /// Set to true when real ESP32/bridge frames arrive on UDP — simulation task + /// will stop generating data so the real source takes over. + real_data_active: bool, } /// Number of frames retained in `frame_history` for temporal analysis. @@ -2482,6 +2485,10 @@ async fn udp_receiver_task(state: SharedState, udp_port: u16) { frame.node_id, frame.n_subcarriers, frame.sequence); let mut s = state.write().await; + if !s.real_data_active { + info!("Real ESP32/bridge data detected — overriding simulation"); + s.real_data_active = true; + } s.source = "esp32".to_string(); // Append current amplitudes to history before extracting features so @@ -2582,6 +2589,14 @@ async fn simulated_data_task(state: SharedState, tick_ms: u64) { loop { interval.tick().await; + // Yield to real data when available + { + let s = state.read().await; + if s.real_data_active { + continue; + } + } + let mut s = state.write().await; s.tick += 1; let tick = s.tick; @@ -3278,6 +3293,7 @@ async fn main() { // Training training_status: "idle".to_string(), training_config: None, + real_data_active: false, })); // Start background tasks based on source @@ -3290,7 +3306,11 @@ async fn main() { tokio::spawn(windows_wifi_task(state.clone(), args.tick_ms)); } _ => { + // Run simulation as fallback, but ALSO listen on UDP so that if + // real ESP32/bridge frames arrive later they override simulated data. tokio::spawn(simulated_data_task(state.clone(), args.tick_ms)); + tokio::spawn(udp_receiver_task(state.clone(), args.udp_port)); + info!("UDP listener also started — real frames will override simulation"); } } diff --git a/scripts/macos_wifi_bridge.py b/scripts/macos_wifi_bridge.py new file mode 100644 index 00000000..e4ee6b6c --- /dev/null +++ b/scripts/macos_wifi_bridge.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +""" +macOS WiFi → UDP bridge for the WiFi-DensePose sensing server. + +Reads real RSSI/noise/tx_rate from the compiled mac_wifi Swift helper and +packs each reading into the ESP32 binary frame format expected by the +sensing server's UDP listener on port 5005. + +The server auto-detects these frames and switches from simulation to live +WiFi data (hot-plug). + +Usage: + python3 scripts/macos_wifi_bridge.py [--mac-wifi ./mac_wifi] [--port 5005] +""" + +import argparse +import json +import math +import socket +import struct +import subprocess +import sys +import time + + +MAGIC = 0xC511_0001 +NODE_ID = 1 +N_ANTENNAS = 1 +N_SUBCARRIERS = 56 # match simulated frame size + +UDP_HOST = "127.0.0.1" + + +def build_esp32_frame(seq: int, rssi: float, noise: float, tx_rate: float) -> bytes: + """Pack a WiFi reading into the binary ESP32 frame format. + + Layout (little-endian): + [0:4] u32 magic 0xC5110001 + [4] u8 node_id + [5] u8 n_antennas + [6] u8 n_subcarriers + [7] u8 (reserved) + [8:10] u16 freq_mhz + [10:14] u32 sequence + [14] i8 rssi + [15] i8 noise_floor + [16:20] (reserved / padding) + [20..] i8 pairs (I, Q) × n_antennas × n_subcarriers + + We synthesize per-subcarrier I/Q values from the RSSI + noise so the + server's feature extractor has plausible amplitude/phase distributions. + """ + rssi_i8 = max(-128, min(127, int(rssi))) + noise_i8 = max(-128, min(127, int(noise))) + + # Derive a base amplitude from RSSI (higher RSSI → larger amplitude) + snr = max(rssi - noise, 1.0) + base_amp = snr / 2.0 # scale into a reasonable I/Q range + + # 20-byte header matching parse_esp32_frame() layout exactly: + # [0:4] u32 LE magic, [4] node_id, [5] n_antennas, [6] n_subcarriers, + # [7] reserved, [8:10] u16 LE freq_mhz, [10:14] u32 LE sequence, + # [14] i8 rssi, [15] i8 noise_floor, [16:20] reserved + header = struct.pack( + " 0 else 0.5 + + iq_data = bytearray() + for i in range(N_SUBCARRIERS): + phase = math.sin(i * 0.2 + t * 0.5) * math.pi + amp = base_amp * (0.8 + 0.4 * math.sin(i * 0.15 + t * 0.3)) * rate_factor + amp = max(1.0, min(127.0, amp)) + i_val = int(amp * math.cos(phase)) + q_val = int(amp * math.sin(phase)) + i_val = max(-128, min(127, i_val)) + q_val = max(-128, min(127, q_val)) + iq_data.append(i_val & 0xFF) + iq_data.append(q_val & 0xFF) + + return header + bytes(iq_data) + + +def main(): + parser = argparse.ArgumentParser(description="macOS WiFi → sensing server bridge") + parser.add_argument("--mac-wifi", default="./mac_wifi", help="Path to mac_wifi binary") + parser.add_argument("--port", type=int, default=5005, help="UDP port for sensing server") + args = parser.parse_args() + + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + print(f"[bridge] Starting mac_wifi helper: {args.mac_wifi}") + print(f"[bridge] Sending ESP32 frames to {UDP_HOST}:{args.port}") + + proc = subprocess.Popen( + [args.mac_wifi], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=1, + env={**__import__("os").environ, "NSUnbufferedIO": "YES"}, + ) + + sys.stdout.reconfigure(line_buffering=True) if hasattr(sys.stdout, "reconfigure") else None + + seq = 0 + try: + for line in proc.stdout: + line = line.strip() + if not line or not line.startswith("{"): + continue + try: + data = json.loads(line) + except json.JSONDecodeError: + continue + + rssi = data.get("rssi", -70) + noise = data.get("noise", -90) + tx_rate = data.get("tx_rate", 0.0) + seq += 1 + + frame = build_esp32_frame(seq, rssi, noise, tx_rate) + sock.sendto(frame, (UDP_HOST, args.port)) + + if seq % 10 == 1: + print(f"[bridge] #{seq:>5d} RSSI={rssi:>4d} dBm noise={noise:>4d} dBm " + f"tx_rate={tx_rate:>6.1f} Mbps frame={len(frame)} bytes") + + except KeyboardInterrupt: + print("\n[bridge] Stopped.") + finally: + proc.terminate() + proc.wait() + sock.close() + + +if __name__ == "__main__": + main() diff --git a/ui/README.md b/ui/README.md index e337ad5a..d7fb6827 100644 --- a/ui/README.md +++ b/ui/README.md @@ -1,96 +1,138 @@ # WiFi DensePose UI -A modular, modern web interface for the WiFi DensePose human tracking system. Provides real-time monitoring, WiFi sensing visualization, and pose estimation from CSI (Channel State Information). +A modular, modern web interface for the WiFi DensePose human tracking system. Provides real-time monitoring, WiFi sensing visualization, 3D pose estimation, vital sign tracking, and model training — all driven by live CSI (Channel State Information) from ESP32 hardware or the macOS WiFi bridge. ## Architecture -The UI follows a modular architecture with clear separation of concerns: - ``` ui/ ├── app.js # Main application entry point -├── index.html # HTML shell with tab structure +├── index.html # HTML shell with tab structure (System dashboard) +├── home.html # Consumer-friendly home page (presence, vitals, signal quality) +├── viz.html # 3D pose visualization (Three.js + WebSocket integration) +├── sensing-dashboard.html # Technical sensing dashboard (spectrum, waterfall, heatmap) ├── style.css # Complete CSS design system +├── start-ui.sh # Quick-start shell script ├── config/ │ └── api.config.js # API endpoints and configuration ├── services/ -│ ├── api.service.js # HTTP API client -│ ├── websocket.service.js # WebSocket connection manager -│ ├── websocket-client.js # Low-level WebSocket client -│ ├── pose.service.js # Pose estimation API wrapper -│ ├── sensing.service.js # WiFi sensing data service (live + simulation fallback) -│ ├── health.service.js # Health monitoring API wrapper -│ ├── stream.service.js # Streaming API wrapper -│ └── data-processor.js # Signal data processing utilities +│ ├── api.service.js # HTTP API client +│ ├── websocket.service.js # WebSocket connection manager (sensing tab) +│ ├── websocket-client.js # WebSocket client for 3D viz (pose stream, source tracking) +│ ├── pose.service.js # Pose estimation API wrapper +│ ├── sensing.service.js # WiFi sensing data service (live + simulation fallback) +│ ├── health.service.js # Health monitoring API wrapper +│ ├── stream.service.js # Streaming API wrapper +│ ├── data-processor.js # Signal data processing (keypoint normalization, heatmaps) +│ ├── model.service.js # RVF model management (load, list, LoRA profiles) +│ └── training.service.js # Training lifecycle, CSI recording, progress streaming ├── components/ -│ ├── TabManager.js # Tab navigation component -│ ├── DashboardTab.js # Dashboard with live system metrics -│ ├── SensingTab.js # WiFi sensing visualization (3D signal field, metrics) -│ ├── LiveDemoTab.js # Live pose detection with setup guide -│ ├── HardwareTab.js # Hardware configuration -│ ├── SettingsPanel.js # Settings panel +│ ├── TabManager.js # Tab navigation component +│ ├── DashboardTab.js # Dashboard with live system metrics +│ ├── SensingTab.js # WiFi sensing visualization (3D signal field, metrics) +│ ├── LiveDemoTab.js # Live pose detection with setup guide +│ ├── HardwareTab.js # Hardware configuration +│ ├── SettingsPanel.js # Settings panel +│ ├── ModelPanel.js # Model management panel (list, load, LoRA profiles) +│ ├── TrainingPanel.js # Training panel (record CSI, train, progress charts) │ ├── PoseDetectionCanvas.js # Canvas-based pose skeleton renderer -│ ├── gaussian-splats.js # 3D Gaussian splat signal field renderer (Three.js) -│ ├── body-model.js # 3D body model -│ ├── scene.js # Three.js scene management -│ ├── signal-viz.js # Signal visualization utilities -│ ├── environment.js # Environment/room visualization -│ └── dashboard-hud.js # Dashboard heads-up display +│ ├── gaussian-splats.js # 3D Gaussian splat signal field renderer (Three.js) +│ ├── body-model.js # 3D body model (COCO 17-keypoint skeleton) +│ ├── scene.js # Three.js scene management (renderer, camera, controls) +│ ├── signal-viz.js # Signal visualization (Doppler, amplitude, phase rings) +│ ├── environment.js # Environment/room visualization (APs, zones, heatmap) +│ └── dashboard-hud.js # HUD overlay (FPS, connection, source, confidence) ├── utils/ -│ ├── backend-detector.js # Auto-detect backend availability -│ ├── mock-server.js # Mock server for testing -│ └── pose-renderer.js # Pose rendering utilities +│ ├── backend-detector.js # Auto-detect backend availability +│ ├── mock-server.js # Mock server for testing +│ └── pose-renderer.js # Pose rendering utilities +├── mobile/ # React Native mobile app (Expo) +│ ├── App.tsx # Mobile entry point +│ ├── e2e/ # Maestro E2E test flows +│ └── ... └── tests/ ├── test-runner.html # Test runner UI ├── test-runner.js # Test framework and cases └── integration-test.html # Integration testing page ``` -## Features - -### WiFi Sensing Tab -- 3D Gaussian-splat signal field visualization (Three.js) -- Real-time RSSI, variance, motion band, breathing band metrics -- Presence/motion classification with confidence scores -- **Data source banner**: green "LIVE - ESP32", yellow "RECONNECTING...", or red "SIMULATED DATA" -- Sparkline RSSI history graph -- "About This Data" card explaining CSI capabilities per sensor count - -### Live Demo Tab -- WebSocket-based real-time pose skeleton rendering -- **Estimation Mode badge**: green "Signal-Derived" or blue "Model Inference" -- **Setup Guide panel** showing what each ESP32 count provides: - - 1 ESP32: presence, breathing, gross motion - - 2-3 ESP32s: body localization, motion direction - - 4+ ESP32s + trained model: individual limb tracking, full pose -- Debug mode with log export -- Zone selection and force-reconnect controls -- Performance metrics sidebar (frames, uptime, errors) - -### Dashboard -- Live system health monitoring -- Real-time pose detection statistics -- Zone occupancy tracking -- System metrics (CPU, memory, disk) -- API status indicators - -### Hardware Configuration -- Interactive antenna array visualization -- Real-time CSI data display -- Configuration panels -- Hardware status monitoring +## Pages + +### Home (`home.html`) +Consumer-friendly dashboard designed for non-technical users. +- **Presence hero**: large animated indicator showing room occupancy status +- **Vital signs**: breathing rate (animated lung ring) and heart rate (pulsing heart ring) with confidence bars +- **Signal quality**: 5-bar strength meter, RSSI value, and data source indicator +- **Environment details**: motion energy, dominant frequency, variability, change events +- **Activity timeline**: color-coded motion bar chart (blue=calm, green=moving, amber=walking) +- Live WebSocket connection to `/ws/sensing` + +### 3D Visualization (`viz.html`) +Full 3D pose visualization powered by Three.js with real server integration. +- **Three.js scene**: room environment with access point models, zone overlays, confidence heatmap +- **Body model**: COCO 17-keypoint skeleton driven by live pose data from the server +- **Signal visualization**: Doppler spectrum, amplitude rings, phase indicators +- **Dashboard HUD overlay**: connection status, FPS, person count, confidence, sensing mode +- **Auto data source detection**: connects to `ws:///api/v1/stream/pose`, automatically switches from demo mode to live server data when pose frames arrive +- **Sensing mode display**: shows actual source (CSI, Simulated, WiFi) instead of hardcoded labels +- **Keypoint normalization**: auto-detects pixel coordinates from server and normalizes to [0,1] for the body model +- Keyboard shortcuts: `R` reset camera, `D` toggle demo, `C` force reconnect + +### Sensing Dashboard (`sensing-dashboard.html`) +Technical dashboard for signal engineers. +- **RSSI chart**: live signal strength timeline +- **Subcarrier spectrum**: 56-channel amplitude bar chart +- **Vital signs chart**: overlaid breathing + heart rate timelines +- **Subcarrier waterfall**: time-frequency spectrogram +- **Motion timeline**: color-coded motion energy bars with walking threshold +- **Signal field heatmap**: 20x20 spatial grid with peak marker +- **Classification badge**: ABSENT / STILL / MOVING / WALKING with confidence + +### System Dashboard (`index.html`) +Multi-tab system overview. +- **Dashboard**: system health, API status, live statistics, zone occupancy, benefit cards +- **Hardware**: interactive 3x3 antenna array, CSI data display, WiFi config +- **Live Demo**: WebSocket pose skeleton with setup guide and debug mode +- **Architecture**: pipeline flow diagram (CSI -> Phase Sanitization -> CNN -> DensePose-RCNN) +- **Performance**: AP metrics comparison (WiFi vs image-based) +- **Applications**: use case cards (elderly care, security, healthcare, smart building, AR/VR) +- **Sensing**: WiFi sensing visualization tab +- **Training**: CSI recording, model training with progress charts, RVF model management + +## Data Flow + +### Real Integration (ESP32 / WiFi Bridge) -## Data Sources +``` +ESP32/Bridge ──UDP:5005──> Rust Server ──WS──> UI + │ + ├── /ws/sensing → home.html, sensing-dashboard.html + └── /api/v1/stream/pose → viz.html (pose_data messages) +``` + +The server converts raw `sensing_update` broadcasts into `pose_data` messages for the 3D viz: +1. `udp_receiver_task` parses ESP32 CSI frames, extracts features, classifies presence +2. `broadcast_tick_task` sends sensing updates via broadcast channel +3. `ws_pose_handler` subscribes, converts to COCO 17-keypoint pose, sends to viz clients +4. `data-processor.js` normalizes keypoints (pixel coords -> [0,1]) and extracts metadata +5. `viz.html` switches from demo mode to live mode when persons are detected + +### Simulation Fallback -The sensing service (`sensing.service.js`) supports three connection states: +When no hardware is detected, the server generates simulated CSI data. The UDP listener still runs in the background, so if real frames arrive later (e.g. WiFi bridge starts after the server), the simulation automatically yields and real data takes over. -| State | Banner Color | Description | -|-------|-------------|-------------| -| **LIVE - ESP32** | Green | Connected to the Rust sensing server receiving real CSI data | -| **RECONNECTING** | Yellow (pulsing) | WebSocket disconnected, retrying (up to 20 attempts) | -| **SIMULATED DATA** | Red | Fallback to client-side simulation after 5+ failed reconnects | +### Demo Mode (Client-Side) -Simulated frames include a `_simulated: true` marker so code can detect synthetic data. +If the WebSocket connection fails entirely, `viz.html` falls back to client-side demo mode with pre-recorded pose cycles (standing, walking, arms raised, sitting, waving). + +## Data Sources + +| Source | HUD Label | Description | +|--------|-----------|-------------| +| **ESP32 CSI** | CSI | Real CSI frames from ESP32 hardware via UDP | +| **macOS WiFi Bridge** | CSI | `macos_wifi_bridge.py` captures native WiFi frames, sends as ESP32-format UDP | +| **Simulated** | Simulated | Server-generated synthetic CSI (fallback when no hardware) | +| **Demo** | Demo | Client-side pre-recorded poses (WebSocket disconnected) | ## Backends @@ -100,15 +142,27 @@ The Rust-based `wifi-densepose-sensing-server` serves the UI and provides: - `GET /api/v1/sensing/latest` — latest sensing features - `GET /api/v1/vital-signs` — vital sign estimates (HR/RR) - `GET /api/v1/model/info` — RVF model container info -- `WS /ws/sensing` — real-time sensing data stream -- `WS /api/v1/stream/pose` — real-time pose keypoint stream +- `GET /api/v1/models` — list discovered RVF models +- `GET /api/v1/models/active` — currently loaded model +- `GET /api/v1/pose/latest` — latest pose detection +- `GET /api/v1/pose/stats` — pose detection statistics +- `GET /api/v1/pose/zones/summary` — zone occupancy summary +- `WS /ws/sensing` — real-time sensing data stream (features, classification, vitals) +- `WS /api/v1/stream/pose` — real-time pose keypoint stream (COCO 17-keypoint format) ### Python FastAPI (legacy) The original Python backend on port 8000 is still supported. The UI auto-detects which backend is available via `backend-detector.js`. ## Quick Start -### With Docker (recommended) +### With the run_all.sh script +```bash +# Builds the Rust server, starts it on port 8080, optionally starts the WiFi bridge +bash run_all.sh +``` +Open http://localhost:8080/ui/home.html + +### With Docker ```bash cd docker/ @@ -121,43 +175,66 @@ CSI_SOURCE=esp32 docker-compose up # Force simulation (no hardware needed) CSI_SOURCE=simulated docker-compose up ``` -Open http://localhost:3000/ui/index.html +Open http://localhost:3000/ui/home.html ### With local Rust binary ```bash cd rust-port/wifi-densepose-rs -cargo build -p wifi-densepose-sensing-server --no-default-features +cargo build --release -p wifi-densepose-sensing-server --no-default-features + +# Auto-detect (simulation + background UDP listener for real hardware) +./target/release/sensing-server --ui-path ../../ui -# Run with simulated data -../../target/debug/sensing-server --source simulated --tick-ms 100 --ui-path ../../ui --http-port 3000 +# Force ESP32 mode +./target/release/sensing-server --source esp32 --ui-path ../../ui -# Run with real ESP32 -../../target/debug/sensing-server --source esp32 --tick-ms 100 --ui-path ../../ui --http-port 3000 +# With a trained model for full pose inference +./target/release/sensing-server --source esp32 --model path/to/model.rvf --ui-path ../../ui ``` -Open http://localhost:3000/ui/index.html +Open http://localhost:8080/ui/home.html -### With Python HTTP server (legacy) +### With macOS WiFi Bridge (no ESP32 hardware needed) ```bash -# Start FastAPI backend on port 8000 -wifi-densepose start +# Terminal 1: start the server +./target/release/sensing-server --ui-path ../../ui -# Serve the UI on port 3000 -cd ui/ -python -m http.server 3000 +# Terminal 2: start the WiFi bridge (captures native macOS WiFi frames) +python3 scripts/macos_wifi_bridge.py --mac-wifi ./mac_wifi --port 5005 ``` -Open http://localhost:3000 +The server auto-detects bridge frames on UDP:5005 and switches from simulation to real data. ## Pose Estimation Modes -| Mode | Badge | Requirements | Accuracy | -|------|-------|-------------|----------| -| **Signal-Derived** | Green | 1+ ESP32, no model needed | Presence, breathing, gross motion | -| **Model Inference** | Blue | 4+ ESP32s + trained `.rvf` model | Full 17-keypoint COCO pose | - -To use model inference, start the server with a trained model: -```bash -sensing-server --source esp32 --model path/to/model.rvf --ui-path ./ui -``` +| Mode | HUD Badge | Requirements | Accuracy | +|------|-----------|-------------|----------| +| **Signal-Derived** | Simulated/CSI | 1+ ESP32, no model needed | Presence, breathing, gross motion, signal-derived skeleton | +| **Model Inference** | CSI | 4+ ESP32s + trained `.rvf` model | Full 17-keypoint COCO pose with limb tracking | + +## Key Services + +### `websocket-client.js` +Low-level WebSocket client for the 3D viz page. Features: +- Auto-reconnect with exponential backoff (500ms to 30s, up to 15 attempts) +- Heartbeat ping/pong every 25s +- `dataSource` property tracks actual server source string (`"esp32"`, `"simulated"`, etc.) +- `isRealData` flag properly detects server data via `payload.metadata.source` +- Connection metrics: message count, latency, bytes received, uptime + +### `data-processor.js` +Transforms server pose messages into Three.js-ready data. Features: +- Handles both `data` and `payload` message formats +- Auto-normalizes pixel coordinates to [0,1] (detects values >1.5 as pixel coords) +- Generates confidence heatmaps from person positions +- Demo mode with smoothly interpolated pre-recorded COCO poses +- Source-to-mode mapping: `esp32`->`CSI`, `simulated`->`Simulated`, `wifi`->`WiFi` + +### `model.service.js` / `training.service.js` +Model management and training lifecycle: +- List, load, and switch between RVF models +- LoRA profile management +- CSI recording start/stop with server-side persistence +- Training progress streaming via WebSocket +- Training configuration (epochs, learning rate, batch size) ## Configuration @@ -176,6 +253,18 @@ export const API_CONFIG = { }; ``` +### Server CLI flags + +| Flag | Default | Description | +|------|---------|-------------| +| `--http-port` | 8080 | HTTP server port | +| `--ws-port` | 8765 | Dedicated WebSocket port | +| `--udp-port` | 5005 | UDP port for ESP32 CSI frames | +| `--ui-path` | `../../ui` | Path to UI static files | +| `--source` | `auto` | Data source: `auto`, `esp32`, `wifi`, `simulate` | +| `--tick-ms` | 100 | Tick interval (100ms = 10 fps) | +| `--model` | — | Path to trained `.rvf` model file | + ## Testing Open `tests/test-runner.html` to run the test suite: @@ -188,9 +277,13 @@ python -m http.server 3000 Test categories: API configuration, API service, WebSocket, pose service, health service, UI components, integration. +## Mobile App + +A React Native (Expo) companion app lives in `mobile/`. See `mobile/README.md` for setup. Includes Maestro E2E tests for live, MAT, vitals, zones, settings, and offline screens. + ## Styling -Uses a CSS design system with custom properties, dark/light mode, responsive layout, and component-based styling. Key variables in `:root` of `style.css`. +Uses a CSS design system with custom properties, dark/light mode, responsive layout, and component-based styling. Key variables in `:root` of `style.css`. The `home.html` and `sensing-dashboard.html` pages use self-contained inline styles for standalone operation. ## License diff --git a/ui/app.js b/ui/app.js index a1c94ded..6dc8817c 100644 --- a/ui/app.js +++ b/ui/app.js @@ -72,6 +72,7 @@ class WiFiDensePoseApp { const health = await healthService.checkLiveness(); console.log('✅ Backend responding:', health); this.showBackendStatus('Connected to Rust sensing server', 'success'); + sensingService.start(); } catch (error) { console.warn('⚠️ Backend not available:', error.message); this.showBackendStatus('Backend unavailable — start sensing-server', 'warning'); diff --git a/ui/home.html b/ui/home.html new file mode 100644 index 00000000..f964f776 --- /dev/null +++ b/ui/home.html @@ -0,0 +1,382 @@ + + + + + +WIFI - DensePose — Home + + + + +
+
+ +
+ + Connecting +
+
+ +
+ +
+ + +
+
📡
+
Scanning...
+
Waiting for sensor data
+
NO ACTIVITY
+
+ + +
+ +
+
Breathing Rate
+
🫁
+
+ -- + breaths/min +
+
+ -- +
+
+
+ +
+
Heart Rate
+
❤️
+
+ -- + bpm +
+
+ -- +
+
+
+ +
+
Signal Quality
+
+
+
+
+
+
+
+
--%
+
+ Source + -- +
+
+ RSSI + -- dBm +
+
+ +
+ + +
+
+
Environment
+
Motion energy--
+
Dominant frequency--
+
Variability--
+
Change events--
+
+ +
+
Activity Timeline
+ +
+
+ +
+ + + + diff --git a/ui/index.html b/ui/index.html index 7216e941..19e4b0ba 100644 --- a/ui/index.html +++ b/ui/index.html @@ -21,6 +21,7 @@

WiFi DensePose

diff --git a/ui/sensing-dashboard.html b/ui/sensing-dashboard.html new file mode 100644 index 00000000..c7c09b11 --- /dev/null +++ b/ui/sensing-dashboard.html @@ -0,0 +1,452 @@ + + + + + +WiFi Sensing — Enhanced Dashboard + + + + +
+ ← Home + WiFi DensePose — Live Sensing Dashboard +
+ +
+
Connecting...
+
Source: --
+
Tick: 0
+
RSSI: --
+
Persons: 0
+
+ +
+ + +
+

Signal Strength (RSSI)

+
--dBm
+
SNR: -- dB  |  Quality: --
+ +
+ +
+

Classification

+
+
ABSENT
+
+
+
0%
+
Confidence
+
+
+
+ +
+

Vital Signs

+
+ Breathing + -- BPM +
+
+
+ Heart Rate + -- BPM +
+
+ +
+ + +
+

Subcarrier Amplitude Spectrum (56 channels)

+ +
+ +
+

Signal Features

+
Variance0
+
+
Motion Band0
+
+
Breathing Band0
+
+
Spectral Power0
+
+
Dominant Freq0 Hz
+
Change Points0
+
+ + +
+

Subcarrier Waterfall (time × frequency)

+ +
+ +
+

Motion Timeline

+ +
+ + +
+

Signal Field Heatmap (20×20 spatial grid)

+ +
+ +
+ + + + diff --git a/ui/services/data-processor.js b/ui/services/data-processor.js index aabf58ad..6e4dba1e 100644 --- a/ui/services/data-processor.js +++ b/ui/services/data-processor.js @@ -43,21 +43,22 @@ export class DataProcessor { result.zoneOccupancy = this._extractZoneOccupancy(payload, message.zone_id); result.signalData = this._extractSignalData(payload); - result.metadata.isRealData = payload.metadata?.mock_data === false; + const meta = payload.metadata || {}; + const source = meta.source || ''; + result.metadata.isRealData = source !== 'mock' && source !== 'demo' && source !== ''; result.metadata.timestamp = message.timestamp; - result.metadata.processingTime = payload.metadata?.processing_time_ms || 0; - result.metadata.frameId = payload.metadata?.frame_id; - - // Determine sensing mode - if (payload.metadata?.source === 'csi') { - result.metadata.sensingMode = 'CSI'; - } else if (payload.metadata?.source === 'rssi') { - result.metadata.sensingMode = 'RSSI'; - } else if (payload.metadata?.mock_data !== false) { - result.metadata.sensingMode = 'Mock'; - } else { - result.metadata.sensingMode = 'CSI'; - } + result.metadata.processingTime = meta.processing_time_ms || 0; + result.metadata.frameId = meta.frame_id; + result.metadata.poseSource = payload.pose_source || 'unknown'; + result.metadata.signalStrength = meta.signal_strength; + result.metadata.motionBandPower = meta.motion_band_power; + + // Map server source to UI sensing mode label + const sourceMap = { + 'esp32': 'CSI', 'csi': 'CSI', 'wifi': 'WiFi', + 'rssi': 'RSSI', 'simulated': 'Simulated', 'simulate': 'Simulated', + }; + result.metadata.sensingMode = sourceMap[source] || (source || 'Unknown'); } } @@ -99,8 +100,7 @@ export class DataProcessor { _normalizeKeypoints(keypoints) { if (!keypoints || keypoints.length === 0) return []; - return keypoints.map(kp => { - // Handle various formats + const raw = keypoints.map(kp => { if (Array.isArray(kp)) { return { x: kp[0], y: kp[1], confidence: kp[2] || 0.5 }; } @@ -110,6 +110,22 @@ export class DataProcessor { confidence: kp.confidence !== undefined ? kp.confidence : (kp.score || 0.5) }; }); + + // Auto-detect if values are in pixel coords (>1.0) and normalize to [0,1] + const maxX = Math.max(...raw.map(k => Math.abs(k.x))); + const maxY = Math.max(...raw.map(k => Math.abs(k.y))); + + if (maxX > 1.5 || maxY > 1.5) { + const frameW = Math.max(maxX * 1.1, 640); + const frameH = Math.max(maxY * 1.1, 480); + return raw.map(kp => ({ + x: Math.max(0, Math.min(1, kp.x / frameW)), + y: Math.max(0, Math.min(1, kp.y / frameH)), + confidence: kp.confidence + })); + } + + return raw; } // Extract zone occupancy data diff --git a/ui/services/websocket-client.js b/ui/services/websocket-client.js index 93428b87..de7af724 100644 --- a/ui/services/websocket-client.js +++ b/ui/services/websocket-client.js @@ -3,7 +3,9 @@ export class WebSocketClient { constructor(options = {}) { - this.url = options.url || 'ws://localhost:8000/ws/pose'; + const defaultProto = (typeof location !== 'undefined' && location.protocol === 'https:') ? 'wss:' : 'ws:'; + const defaultHost = (typeof location !== 'undefined') ? location.host : 'localhost:8080'; + this.url = options.url || `${defaultProto}//${defaultHost}/api/v1/stream/pose`; this.ws = null; this.state = 'disconnected'; // disconnected, connecting, connected, error this.isRealData = false; @@ -30,6 +32,9 @@ export class WebSocketClient { bytesReceived: 0 }; + // Data source tracking + this.dataSource = null; // actual source string from server (e.g. "simulated", "esp32", "wifi") + // Callbacks this._onMessage = options.onMessage || (() => {}); this._onStateChange = options.onStateChange || (() => {}); @@ -87,6 +92,7 @@ export class WebSocketClient { this._setState('disconnected'); this.isRealData = false; + this.dataSource = null; console.log('[WS-VIZ] Disconnected'); } @@ -141,11 +147,14 @@ export class WebSocketClient { return; } - // Detect real vs mock data from metadata - if (data.data && data.data.metadata) { - this.isRealData = data.data.metadata.mock_data === false && data.data.metadata.source !== 'mock'; - } else if (data.metadata) { - this.isRealData = data.metadata.mock_data === false; + // Detect data source from server metadata. + // The server sends: { type: "pose_data", payload: { metadata: { source: "..." } } } + // or sensing_update with top-level source field. + const meta = data?.payload?.metadata || data?.data?.metadata || data?.metadata; + const source = meta?.source || data?.source; + if (source) { + this.dataSource = source; + this.isRealData = source !== 'mock' && source !== 'demo'; } // Calculate latency from message timestamp @@ -240,6 +249,7 @@ export class WebSocketClient { ...this.metrics, state: this.state, isRealData: this.isRealData, + dataSource: this.dataSource, reconnectAttempts: this.reconnectAttempts, uptime: this.metrics.connectTime ? (Date.now() - this.metrics.connectTime) / 1000 : 0 }; diff --git a/ui/viz.html b/ui/viz.html index 54fa0a5f..053120f8 100644 --- a/ui/viz.html +++ b/ui/viz.html @@ -84,15 +84,14 @@
- - - - + + + +