-
Notifications
You must be signed in to change notification settings - Fork 26
API Reference
Technical documentation for the Quest WebRTC AI Video Processing System components.
The system consists of four main subsystems:
- Video Pipeline - Quest camera access and Unity integration
- WebRTC System - Real-time video streaming and signaling
- AI Integration - Decart AI service communication
- VR Interface - Quest UI and controller integration
File: Assets/Samples/DecartAI-Quest/Scripts/WebCamTextureManager.cs
Main component for Quest camera access via Unity's WebCamTexture API.
public PassthroughCameraEye Eye { get; set; }
// Camera eye selection (Left/Right)
// Default: PassthroughCameraEye.Left
public Vector2Int RequestedResolution { get; set; }
// Target camera resolution
// Default: 1280x704
public WebCamTexture WebCamTexture { get; private set; }
// Unity WebCamTexture instance for camera accesspublic async void StartCamera()
// Initiates camera discovery and WebCamTexture creation
// Handles permissions and device validation
public void StopCamera()
// Stops camera capture and releases resources
// Call in OnDestroy() for proper cleanupvar cameraManager = GetComponent<WebCamTextureManager>();
cameraManager.Eye = PassthroughCameraEye.Left;
cameraManager.RequestedResolution = new Vector2Int(1280, 704);
cameraManager.StartCamera();File: Assets/PassthroughCameraApiSamples/Scripts/PassthroughCameraUtils.cs
Low-level Android Camera2 API integration for Quest hardware.
public static bool IsSupported { get; }
// Returns true if passthrough cameras are available
// Quest 3/3S with Horizon OS v74+ only
public static bool ArePermissionsGranted { get; }
// Checks both Android and Horizon OS camera permissionspublic static PassthroughCameraDevice[] GetAvailableDevices()
// Returns array of available Quest cameras
// Includes metadata like eye position and resolution capabilities
public static PassthroughCameraDevice GetDevice(PassthroughCameraEye eye)
// Gets specific camera by eye position (Left/Right)public struct PassthroughCameraDevice
{
public string Id; // Android Camera2 device ID
public PassthroughCameraEye Eye; // Left/Right eye position
public CameraSource Source; // Camera type (Passthrough)
public Vector2Int MaxResolution; // Maximum supported resolution
public string DeviceName; // Human-readable device name
}File: Assets/PassthroughCameraApiSamples/Scripts/PassthroughCameraPermissions.cs
Runtime permission management for Quest camera access.
public static readonly string[] CameraPermissions = {
"android.permission.CAMERA", // Standard Android camera
"horizonos.permission.HEADSET_CAMERA" // Quest-specific (v74+)
};public static void RequestCameraPermissions()
// Requests all required camera permissions
// Shows system permission dialog
public static bool HasCameraPermissions()
// Returns true if all camera permissions granted
// Check before attempting camera accessFile: Assets/Samples/DecartAI-Quest/Scripts/WebRTCConnection.cs
Unity MonoBehaviour managing WebRTC lifecycle and video streaming.
[Header("Connection Settings")]
public string WebSocketServerAddress =
"wss://bouncer.mirage.decart.ai/ws?model=decart-v2v-v2.0-704p";
[Header("Video Settings")]
public Camera StreamingCamera; // Unity Camera for video capture
public Vector2Int VideoResolution = new Vector2Int(1280, 704);
public bool UseImmersiveSetup = false; // Alternative video capture modepublic void StartVideoTransmission()
// Initiates video streaming to AI service
// Configures VP8 encoding and bitrate
public void StopVideoTransmission()
// Stops video streaming and cleans up resources
public IEnumerator CreateOffer()
// Creates WebRTC offer with optimized VP8 settings
// Configures bitrate (4Mbps startup, 2Mbps sustained)// Optimal settings for Quest + AI processing
var parameters = transceiver.Sender.GetParameters();
foreach (var encoding in parameters.encodings) {
encoding.maxBitrate = 4000000UL; // 4Mbps startup
encoding.minBitrate = 1000000UL; // 1Mbps minimum
encoding.maxFramerate = 16U; // 16fps target
encoding.scaleResolutionDownBy = 1.0;
}File: Assets/SimpleWebRTC/Scripts/WebRTCManager.cs
Core WebRTC logic, signaling protocol, and AI prompt management.
public async void Connect(string webSocketUrl, bool isVideoSender, bool isVideoReceiver)
// Establishes WebSocket connection to AI service
// Configures peer connection for bidirectional video
public void Disconnect()
// Closes WebSocket and peer connections
// Performs cleanup of all WebRTC resourcesThe manager includes 152 predefined AI style prompts:
public Dictionary<string, string> prompts = new Dictionary<string, string> {
{"Frozen", "Realistic 3D video game environment characterized by..."},
{"Cyberpunk", "Hyper-realistic cyberpunk cityscape with neon lighting..."},
{"Anime", "Japanese animation style with vibrant colors..."},
// ... 149 more prompts
};// Session initialization
var initMessage = new outboundInitMessage {
type = "initialize_session",
fps = 16,
session_id = System.Guid.NewGuid().ToString(),
product = "miragevr",
prompt = selectedPrompt
};
// Prompt updates
var promptMessage = new outboundPromptMessage {
type = "queue_prompt",
prompt = customPrompt,
should_enrich = true // AI enhancement for custom prompts
};pc.OnIceCandidate = candidate => {
var candidateMessage = new outboundCandidateInitMessage {
type = "ice-candidate",
sdpMid = candidate.SdpMid,
sdpMLineIndex = candidate.SdpMLineIndex ?? 0,
candidate = candidate.Candidate
};
ws.SendText(JsonUtility.ToJson(candidateMessage));
};File: Assets/Samples/DecartAI-Quest/Scripts/WebRTCController.cs
User interface and input handling for Quest controllers.
[Header("UI Display Components")]
public RawImage canvasRawImage; // Local camera preview
public RawImage receivedVideoImage; // AI-processed video display
public TMP_Text promptNameText; // Current style name displayvoid Update() {
// A Button - Previous style
if (OVRInput.GetDown(OVRInput.Button.One)) {
webRTCManager.SendPrompt(GetPreviousPrompt());
}
// B Button - Next style
if (OVRInput.GetDown(OVRInput.Button.Two)) {
webRTCManager.SendPrompt(GetNextPrompt());
}
}private IEnumerator FindReceivedVideo() {
yield return new WaitForSeconds(0.5f);
var receivingObjects = GameObject.FindObjectsByType<RawImage>(FindObjectsSortMode.None);
foreach (var rawImage in receivingObjects) {
if (rawImage.name.Contains("Receiving-RawImage") && rawImage.texture != null) {
receivedVideoImage.texture = rawImage.texture;
break;
}
}
}File: Assets/Samples/DecartAI-Quest/Scripts/Voice/VoiceManager.cs
Integration with Meta Voice SDK for natural language commands.
[SerializeField] private AppVoiceExperience appVoiceExperience;
private void Start() {
// Full transcription (when user releases trigger)
appVoiceExperience.VoiceEvents.OnFullTranscription.AddListener(OnFullTranscription);
// Partial transcription (live while speaking)
appVoiceExperience.VoiceEvents.OnPartialTranscription.AddListener(OnPartialTranscription);
}
private void OnFullTranscription(string transcription) {
webRTCController.QueueCustomPrompt(transcription);
}public void QueueCustomPrompt(string prompt) {
// Send user's voice command as AI style prompt
webRTCManager.SendCustomPrompt(prompt, shouldEnrich: true);
promptNameText.text = $"Custom: {prompt}";
}WebSocket communication uses JSON message formats:
// Outbound Messages (Client → AI Service)
public class outboundInitMessage {
public string type = "initialize_session";
public int fps;
public string session_id;
public string product;
public string prompt;
}
public class outboundPromptMessage {
public string type = "queue_prompt";
public string prompt;
public bool should_enrich;
}
public class outboundOfferMessage {
public string type = "offer";
public string sdp;
}
// Inbound Messages (AI Service → Client)
public class inboundAnswerMessage {
public string type = "answer";
public string sdp;
}
public class inboundIceCandidateMessage {
public string type = "ice-candidate";
public string sdpMid;
public int sdpMLineIndex;
public string candidate;
}// Video Configuration
Resolution: 1280×704 (balanced quality/performance)
Frame Rate: 16fps (matches AI processing speed)
Codec: VP8 with adaptive bitrate
// Network Configuration
Startup Bitrate: 4Mbps (fast quality establishment)
Sustained Bitrate: 2Mbps (efficient bandwidth usage)
Minimum Bitrate: 1Mbps (fallback for poor connections)
// Unity Settings
Rendering Pipeline: URP (Universal Render Pipeline)
Graphics API: OpenGLES3 (Vulkan disabled)
Scripting Backend: IL2CPP
Target Architecture: ARM64// Proper cleanup patterns
void OnDestroy() {
// WebCamTexture cleanup
if (webCamTexture != null) {
webCamTexture.Stop();
webCamTexture = null;
}
// WebRTC cleanup
webRTCManager?.Disconnect();
// Event unsubscription
if (appVoiceExperience != null) {
appVoiceExperience.VoiceEvents.OnFullTranscription.RemoveListener(OnFullTranscription);
}
}// Camera initialization errors
try {
cameraManager.StartCamera();
} catch (System.Exception e) {
Debug.LogError($"Camera initialization failed: {e.Message}");
// Fallback: Show error UI, disable camera features
}
// WebRTC connection errors
webRTCManager.OnConnectionStateChanged += (state) => {
switch (state) {
case RTCIceConnectionState.Failed:
Debug.LogError("WebRTC connection failed - retrying...");
StartCoroutine(RetryConnection());
break;
case RTCIceConnectionState.Disconnected:
Debug.LogWarning("WebRTC connection lost - attempting reconnection...");
break;
}
};
// Permission handling
if (!PassthroughCameraPermissions.HasCameraPermissions()) {
PassthroughCameraPermissions.RequestCameraPermissions();
// Wait for permission callback before proceeding
}public class MyQuestApp : MonoBehaviour {
[SerializeField] private WebCamTextureManager cameraManager;
[SerializeField] private WebRTCConnection webrtcConnection;
[SerializeField] private WebRTCController webrtcController;
async void Start() {
// 1. Check device compatibility
if (!PassthroughCameraUtils.IsSupported) {
Debug.LogError("Quest passthrough not supported");
return;
}
// 2. Request permissions
if (!PassthroughCameraPermissions.HasCameraPermissions()) {
PassthroughCameraPermissions.RequestCameraPermissions();
return;
}
// 3. Initialize camera
cameraManager.StartCamera();
// 4. Wait for camera ready
await WaitForCamera();
// 5. Start WebRTC connection
webrtcConnection.StartVideoTransmission();
}
private async Task WaitForCamera() {
while (cameraManager.WebCamTexture == null ||
!cameraManager.WebCamTexture.isPlaying) {
await Task.Delay(100);
}
}
}public void SendCustomStyle(string styleDescription) {
var webRTCManager = FindObjectOfType<WebRTCManager>();
if (webRTCManager != null) {
webRTCManager.SendCustomPrompt(styleDescription, shouldEnrich: true);
}
}
// Example usage
SendCustomStyle("Transform this into a medieval castle with stone walls and torch lighting");See Also:
- Setup Guide - Installation instructions
- Troubleshooting - Common issues and solutions
- FAQ - Frequently asked questions