diff --git a/.gitignore b/.gitignore index 0bab26c22a..be283a97a6 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ example/pubspec.lock pubspec.lock example/ios/Podfile.lock +example/macos/Podfile.lock GeneratedPluginRegistrant.java example/android/.gradle WorkspaceSettings.xcsettings diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a1b4f9d9f..5622dfa68c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,785 +1,19 @@ # Changelog --------------------------------------------- -[0.11.1] - 2024-06-17 +[0.0.6] - 2025.03.04 +- get remote participent's track from the stream id. -* [macOS] Downgrade macOS system dependencies to 10.14. +[0.0.5] - 2025.02.24 +- Dependencies updated to latest version. -[0.11.0] - 2024-06-17 +[0.0.4] - 2024.10.18 +- Dependencies updated to latest version. -* [Native] upgrade libwebrtc to m125.6422. +[0.0.3] - 2024.07.17 +[0.0.2] - 2024.07.17 -[0.10.8] - 2024-06-05 +- Windows Support Fix -* [iOS] fix(platform_view): fit cover works wrong (#1593) -* [iOS/macOS] fix: Fix the issue that the video is not displayed when using 'video': true (#1592) -* [Web] bump dart_webrtc to 1.4.6. +[0.0.1] - 2024.07.17 -[0.10.7] - 2024-05-30 - -* [iOS] feat: add PlatformView Renderer for iOS. (#1569) -* [iOS] fix: audio session control for iOS. (#1590) - -[0.10.6] - 2024-05-13 - -* [Web] Some important fixes for web. - -[0.10.5] - 2024-05-13 - -* [Android] fix: make MediaDeviceInfo (Audio deviceId, label, groupId) consistent. (#1583) - -[0.10.4] - 2024-05-06 - -* [iOS/macOS] chore: update swift webrtc sdks to 114.5735.10 (#1576) -* [Android] fix: actually call selectAudioOutput in enableSpeakerButPreferBluetooth -* [iOS] fix: remember speakerphone mode for ensureAudioSession (#1568) -* [Windows/Linux] Fix handling of unimplemented method (#1563) - -[0.10.3] - 2024-04-09 - -* [iOS/macOS] Fix compilation warning for iOS/macOS. - -[0.10.2] - 2024-04-08 - -* [Native/Web] feat: add keyRingSize/discardFrameWhenCryptorNotReady to KeyProviderOptions. - -[0.10.1] - 2024-04-08 - -* [Web] fix renderer issue for web. - -[0.10.0] - 2024-04-08 - -* [Web] move to package:web. - -[0.9.48+hotfix.1] - 2024-02-05 - -* [Android] bump version for libwebrtc. - -[0.9.48] - 2024-02-05 - -* [Android] bump version for libwebrtc. -* [iOS] Supports ensureAudioSsession method for iOS only. (#1514) -* [Android] fix android wrong display size. (#1508). - -[0.9.47] - 2023-11-29 - -* [Windows/Linux] fix: Check the invalid value of candidate and session description. (#1484) -* [Windows/Linux/macOS] fix: screen sharing issue for desktop. -* [Web] fix: platformViewRegistry getter is deprecated (#1485) -* [Dart] Throw exception for set src object (#1491). - -[0.9.46] - 2023-10-25 - -* [iOS/macOS] fix: Crop video output size to target settings. (#1472) -* [Android] fix: Fix bluetooth sco not stopping after room disconnect (#1475) - -[0.9.45] - 2023-09-27 - -* [iOS/macOS] fix: send message on non-platform thread. -* [Windows] fix: fix setSrcObj with trackId for Windows. -* [Windows] fix: fix "unlock of unowned mutex" error when call "captureFrame()" func on windows. - -[0.9.44] - 2023-09-25 - -* [Windows] fix: fix Renderer bug for Windows. -* [Native] fix: Use independent threads to process frame encryption/decryption -* [Native] fix: Correct handle SIF frame -* [Native] fix: Fix a fault tolerance judgment failure - -[0.9.43] - 2023-09-20 - -* [Native] fix: send frame cryptor events from signaling thread. -* [Native] fix: h264 freeze when using E2EE. - -[0.9.42+hotfix.1] - 2023-09-15 - -* [Windows/Linux] fix: fix cannot start vp8/h264 encoder correctly. - -[0.9.42] - 2023-09-15 - -* [Dart/Native] feat: add more framcryptor api (#1444) -* [Dart/Native] feat: support scalability mode (#1442) -* [Android] fix: Turn off audio routing in non communication modes (#1438) - -* [Android] feat: Add more control over android audio options. - -[0.9.41] - 2023-08-30 - -* [Android] feat: Add more control over android audio options. - -[0.9.40] - 2023-08-16 - -* [Windows/Linux] fix: nullptr checking for sender/receiver for getStats. - -[0.9.39] - 2023-08-14 - -* [Dart/Native] feat: add async methods for getting pc states. - -[0.9.38] - 2023-08-11 - -* [Android] fix: Expose helper to clearCommunicationDevice on AudioManager.AUDIOFOCUS_LOSS -* [Android] feat: support force SW codec list for android, and disable HW codec for VP9 by default. -* [Android] fix: issue for audio device switch (#1417) -* [Android/iOS] feat: Added setZoom method to support camera zooming while streaming. (#1412). - -[0.9.37] - 2023-08-07 - -* [Native] fix: Skip set_sdp_fmtp_line if sdpFmtpLine is empty. -* [Android] fix: fix android earpiece not being replaced after wired headset is disconnected. -* [Dart] fix: partially rebuild RTCVideoView when renderVideo value changes. -* [Android] feat: expose android audio modes. -* [Android] feat: support forceSWCodec for Android. -* [Linux] fix: add $ORIGIN to rpath. - -[0.9.36] - 2023-07-13 - -* [Native] upgrade libwebrtc to m114.5735.02. -* [Windows/Linux] Add implementation to MediaStreamTrack.captureFrame() for linux/windows. -* [Darwin/Android] Support to ignore network adapters used for ICE on Android, iOS and macOS. - -[0.9.35] - 2023-06-30 - -* [iOS] feat: expose audio mode for ios. -* [Darwin] fix: compiler warning for Darwin. -* [Dart] Fix setMicrophoneMute() not awaitable. -* [Native] Update libwebrtc to m114. -* [Dart/Web] Separate frame cryptor to dart-webrtc. - -[0.9.34] - 2023-06-14 - -* [Web] fix facingMode for flutter web mobile. - -[0.9.33] - 2023-06-08 - -* [Android] fix frame drops for android. - -[0.9.32] - 2023-05-30 - -* [Android] fix issue for get user audio. -* [Android] fix getStats throw LinkedHasMap exception. - -[0.9.31] - 2023-05-23 - -* [Darwin] Improve iOS/macOS H264 encoder (Upgrade to WebRTC-SDK M104.5112.17). - -[0.9.30+hotfix.2] - 2023-05-18 - -* [Windows/Linux] fix bug for eventchannel proxy. -* [Windows/Linux] fix: crash for pc.close/dispose on win/linux. (#1360) - -[0.9.30+hotfix.1] - 2023-05-17 - -* [Windows/Linux] Fix compiler error. - -[0.9.30] - 2023-05-16 - -* [Darwin] Handle exceptions for frame rate settings for darinw. (#1351) -* [Android] Fix bluetooth device enumerate. (#1349) -* [Darwin/Android/Windows/Linux] Added maxIPv6Networks configuration (#1350) -* [iOS] Fix: broadcast extension not found fallback logic (#1347) -* [Android] Move the call of capturer.stopCapture() outside the main thread to avoid blocking of flutter method call. -* [Windows/Linux] Fix the crash issue of video room (#1343) - -[0.9.29+hotfix.1] - 2023-05-08 - -* [Android] fix: application context null when app is terminated. -* [Android/iOS] feat: add way to enable speaker but prefer bluetooth. - -[0.9.28] - 2023-05-08 - -* [Windows/Linux] fix: use the correct transceiver id. -* [Windows/Linux] fix: Support restart camera for Windows/Linux. - -[0.9.27] - 2023-04-27 - -* [Darwin/Android/Windows/Linux] feat: framecryptor. -* [Windows/Linux] Fix the type/code mistake. -* [Windows/Linux] Fix uneffective RTPTransceiver::GetCurrentDirection. -* [Windows/Linux] RTPtransceiver::getCurrentDirection returns correct value. - -[0.9.26] - 2023-04-16 - -* [iOS/macOS] motify h264 profile-level-id to support high resolution. -* [Dawrin/Android/Windows] feat: add RTCDegradationPreference to RTCRtpParameters. - -[0.9.25] - 2023-04-10 - -* [Dawrin/Android/Windows] Add `addStreams` to `RTCRtpSender` -* [Android] fix: label for Wired Headset. (#1305) -* [Dawrin/Android/Windows] Feat/media stream track get settings (#1294) -* [Android/iOS] Fix track lookup in the platform specific code for Android and iOS (#1289) -* [iOS] fix: ICE Connectivity doesn't establish with DualSIM iPhones. -* [Android] Switch to webrtc hosted on maven central (#1288) - -[0.9.24] - 2023-03-07 - -* [iOS] avaudiosession mode changed to AVAudioSessionModeVideoChat (#1285) -* [macOS] fix memory leak for screen capture. - -[0.9.23] - 2023-02-17 - -* [Windows/Linux] Updated libwebrtc binary for windows/linux to fix two crashes. - -[0.9.22] - 2023-02-14 - -* [iOS] fix: Without any setActive for rtc session, libwebrtc manages the session counter by itself. (#1266) -* [dart] fix: remove rtpsender.dispose. -* [web] fix video renderer issue for safari. -* [macOS] Fixed macOS desktop capture crash with simulcast enabled. -* [macOS] Fix the crash when setting the fps of the virtual camera. - -[0.9.21] - 2023-02-10 - -* [Web] Fix: RTCRtpParameters.fromJsObject for Firefox. -* [Web] Add bufferedamountlow. -* [Android] Fixed frame capturer returning images with wrong colors (#1258). -* [Windows] bug fix. - -[0.9.20] - 2023-02-03 - -* [Dawrin/Android/Windows] Add getCapabilities/setCodecPreferences methods -* [Darwin] buffered amount -* [Linux] Fixed audio device name buffer size -* [Android] Start audioswitch and only activate it when needed -* [Darwin] Fix typo which broke GcmCryptoSuites - -[0.9.19] - 2023-01-10 - -* [Dart] Fix getStats: change 'track' to 'trackId' (#1199) -* [Android] keep the audio switch after stopping (#1202) -* [Dart] Enhance RTC video view with placeholder builder property (#1206) -* [Android] Use forked version of audio switch to avoid BLUETOOTH_CONNECT permission (#1218) - -[0.9.18] - 2022-12-12 - -* [Web] Bump dart_webrtc to 1.0.12, Convert iceconnectionstate to connectionstate for Firefox. -* [Android] Start AudioSwitchManager only when audio track added (fix #1163) (#1196) -* [iOS] Implement detachFromEngineForRegistrar (#1192) -* [iOS] Handle Platform Exception on addCandidate (#1190) -* [Native] Code format with clang-format. - -[0.9.17] - 2022-11-28 - -* [Android] Update android webrtc version to 104.5112.05 -* [iOS] Update WebRTC.xframework version to 104.5112.07 - -[0.9.16] - 2022-11-14 - -* [Linux] Fixed compiler error for flutter 3.3.8. -* [Linux] Remove 32-bit precompiled binaries. -* [Linux] Supports linux-x64 and linux-arm64. - -[0.9.15] - 2022-11-13 - -* [Linux] Add Linux Support. - -[0.9.14] - 2022-11-12 - -* [iOS] Fix setSpeakerOn has no effect after change AVAudioSession mode to playback. - -[0.9.13] - 2022-11-12 - -* [Dart] Change MediaStream.clone to async. -* [iOS] Fixed the bug that the mic indicator light was still on when mic recording was stopped. -* [iOS/macOS/Android/Windows] Allow sdpMLineIndex to be null when addCandidate. -* [macOS] Frame capture support for MacOS. -* [Android] Add enableCpuOveruseDetection configuration (#1165). -* [Android] Update comments (#1164). - -[0.9.12] - 2022-11-02 - -* [iOS] Fixed the problem that iOS earphones and speakers do not switch. -* [Windows] fix bug for rtpSender->RemoveTrack/pc->getStats. -* [iOS] Return groupId. -* [Web] MediaRecorder.startWeb() should expose the timeslice parameter. -* [iOS] Implement RTCPeerConnectionDelegate didRemoveIceCandidates method. -* [iOS] fix disposing Broadcast Sharing stream. - -[0.9.11] - 2022-10-16 - -* [iOS] fix audio route/setSpeakerphoneOn issues. -* [Windows] fix: Have same remote streams id then found wrong MediaStream. -* [Dart] feat: RTCVideoRenderer supports specific trackId when setting MediaStream. - -[0.9.9+hotfix.1] - 2022-10-12 - -* [Darwin] Fix getStats for darwin when trackId is NSNull. - -[0.9.9] - 2022-10-12 - -* [Darwin/Android/Windows] Support getStats for RtpSender/RtpReceiver (Migrate from Legacy to Standard Stats for getStats). -* [Android] Dispose streams and connections. -* [Android] Support rtp transceiver direction type 4. -* [Web] Update dart_webrtc dependendency. - -[0.9.8] - 2022-09-30 - -* [Android] fix: Make sure local stream/track dispose correctly. -* [Android] Remove bluetooth permission on peerConnectionInit. -* [iOS] Fix system sound interruption on iOS (#1099). -* [Android] Fix: call mode on app start (#1097). -* [Dart] Avoid renderer initialization multiple times (#1067). - -[0.9.7] - 2022-09-13 - -* [Windows] Support sendDtmf. -* [Windows] Fixed getStats. - -[0.9.6] - 2022-09-06 - -* [Dart] The dc created by didOpenDataChannel needs to set state to open. -* [Dart] Added callback onFirstFrameRendered. - -[0.9.5] - 2022-08-30 - -* [Android] fix: Fix crash when using multiple renderers. -* [Android] fix bug with track dispose cannot close video -* [Andorid/iOS/macOS/Windows] Fix bug of missing events in data-channel. - -[0.9.4] - 2022-08-22 - -* [Andorid/iOS/macOS/Windows] New audio input/output selection API, ondevicechange event is used to monitor audio device changes. - -[0.9.3] - 2022-08-15 - -* [Windows/macOS] Fix UI freeze when getting thumbnails. - -[0.9.2] - 2022-08-09 - -* [Android] update libwebrtc to com.github.webrtc-sdk:android:104.5112.01. -* [iOS/macOS] update WebRTC-SDK to 104.5112.02. -* [Windows] update libwebrtc.dll to 104.5112.02. - -[0.9.1] - 2022-08-01 - -* [iOS] fix : iOS app could not change camera resolutions cause by wrong datatype in the video Contraints. -* [Darwin] bump version for .podspec. - -[0.9.0] - 2022-07-27 - -* [macOS] Added screen-sharing support for macOS -* [Windows] Added screen-sharing support for Windows -* [iOS/macOS] fix: Fix compile warning for Darwin -* [Darwin/Android/Windows] fix: Fix typo peerConnectoinEvent -> peerConnectionEvent for EventChannel name (#1019) - -[0.8.12] - 2022-07-15 - -* [Darwin]: fix: camera release. - -[0.8.11] - 2022-07-11 - -* [Windows] Fix variant exception of findLongInt. (#990) -* [Windows] fix unable to get username/credential when parsing iceServers containing urls -* [iOS] Fix RTCAudioSession properties set with libwebrtc m97, Fixes #987. - -[0.8.10] - 2022-06-28 - -* [iOS] IPC Broadcast Upload Extension support for Screenshare - -[0.8.9] - 2022-06-08 - -* [Android] Fixes DataChannel issue described in #974 -* [iOS] Fixes DataChannel issue described in #974 -* [Dawrin/Android/Windows] Split data channel's webrtc id from our internal id (#961) -* [Windows] Update to m97. -* [Windows] Add PeerConnectionState -* [Windows] Fix can't open mic alone when built-in AEC is enabled. - -[0.8.8] - 2022-05-31 - -* [Android] Added onBufferedAmountChange callback which will return currentBuffer and changedBuffer and implemented bufferedAmount. -* [Android] Added onBufferedAmountLow callback which will return currentBuffer ans will be called if bufferedAmountLowThreshold is set a value. - -[0.8.7] - 2022-05-18 - -* [iOS/macOS] fix: Use RTCYUVHelper instead of external libyuv library (#954). -* [iOS/macOS] Flutter 3.0 crash fixes, setStreamHandler on main thread (#953) -* [Android] Use mavenCentral() instead of jcenter() (#952) -* [Windows] Use uint8_t* instead of string in DataChannel::Send method, fix binary send bug. -* [Android] fix: "Reply already submitted" error and setVolume() not working on remote streams. - -[0.8.6] - 2022-05-08 - -* [Web/Android/iOS/macOS] Support null tracks in replaceTrack/setTrack. -* [macOS] Remove absolute path from resolved spec to make checksum stable. -* [Android] Android 12 bluetooth permissions. -* [Dart] fix wrong id type for data-channel. -* [Android] Release i420 Buffer in FrameCapturer. - -[0.8.5] - 2022-04-01 - -* [Dart] Expose RTCDataChannel.id (#898) -* [Android] Enable H264 high profile for SimulcastVideoEncoderFactoryWrapper (#890) - -[0.8.4] - 2022-03-28 - -* [Android] Fix simulcast factory not sending back EncoderInfo (#891) -* [Android] fix: correct misspell in method screenRequestPermissions (#876) - -[0.8.3] - 2022-03-01 - -* [Android/iOS] Update android/ios webrtc native sdk versions. -* [Windows] Feature of selecting i/o audio devices by passing sourceId and/or deviceId constraints (#851). - -[0.8.2] - 2022-02-08 - -* [Android/iOS/macOS/Web] Add restartIce. - -[0.8.1] - 2021-12-29 - -* [Android/iOS] Bump webrtc-sdk version to 93.4577.01. - -[0.8.0] - 2021-12-05 - -* [Dart] Refactor: Use webrtc interface. (#777) -* [iOS] Fix crashes for FlutterRPScreenRecorder stop. -* [Web] Don't stop tracks when disposing MediaStream (#760) -* [Windows] Add the necessary parameters for onRemoveTrack (#763) -* [Example] Properly start foreground service in example (#764) -* [Android] Fix crash for Android, close #757 and #734. -* [Dart] Fix typo in deprecated annotations. -* [iOS] Fix IOS captureFrame and add support for remote stream captureFrame (#778) -* [Windows] Fix parsing stun configuration (#789) -* [Windows] Fix mute (#792) -* [iOS/Android/Windows] New video constraints syntax (#790) - -[0.7.1] - 2021-11-04 - -* [iOS/macOS] Update framework. -* [Android] Update framework. -* [Windows] Implement mediaStreamTrackSetEnable (#756). -* [iOS/macOS] Enable audio capture when acquiring track. -* [Android] Call stopCaptureWithCompletionHandler instead (#748) -* [Windows] Fix bug for windows. - -[0.7.0+hotfix.2] - 2021-10-21 - -* [iOS/macOS] Update .podspec for Darwin. - -[0.7.0+hotfix.1] - 2021-10-21 - -* [Android] Fix bug for createDataChannel. - -[0.7.0] - 2021-10-20 - -* [Android] Enable Android simulcast (#731) -* [macOS] Use pre-compiled WebRTC for macOS. (#717) -* [iOS/macOS] Fix the correct return value of createDataChannel under darwin. -* [Windows] Fix using the wrong id to listen datachannel events. -* [Dart] Fix(mediaStreamTrackSetEnable): remote track is unavaiable (#723). - -[0.6.10+hotfix.1] - 2021-10-01 - -* [Web] Fix compiler errors for web. - -[0.6.10] - 2021-10-01 - -* [iOS] Fix bug for RtpTransceiver.getCurrentDirection. -* [Dart] Improve MethodChannel calling. - -[0.6.9] - 2021-10-01 - -* [iOS] Update WebRTC build (#707). -* [Windows] Add Unified-Plan support for windows. (#688) -* [iOS] Improve audio handling on iOS (#705) - -[0.6.8] - 2021-09-27 - -* [Android] Use ApplicationContext to verify permissions when activity is null. -* [iOS] Add support for lightning microphone. (#693) -* [Windows] Fix FlutterMediaStream::GetSources. -* [Web] Fix Flutter 2.5.0 RTCVideoRendererWeb bug (#681) -* [Web] Bug fix (#679) - -[0.6.7] - 2021-09-08 - -* [Android] upgrade webrtc sdk to m92.92.4515. -* [Web] `addTransceiver` bug fix (#675) -* [Web] Use low-level jsutil to call createOffer/createrAnswer to solve the issue on safari/firefox. -* [Dart] Fix currentDirection/direction implementation confusion. - -[0.6.6] - 2021.09.01 - -* [Sponsorship] Thanks for LiveKit sponsorship. -* [Web] Avoid removing all audio elements when stopping a single video renderer (#667) -* [Web] Properly cleanup srcObject to avoid accidental dispose -* [Dart] Removed warnings (#647) -* [Web] Switch transferFromImageBitmap to be invoked using js.callMethod (#631) -* [Web] Fix sending binary data over DataChannel in web implementation. (#634) -* [Darwin] Nullable return for GetLocalDescription/GetRemoteDiscription -* [Darwin] Fix incorrect argument name at RTCRtpSender (#600) - -[0.6.5] - 2021.06.18 - -* [Android] Falling back to the first available camera fix #580 -* [Android] Fix application exit null-pointer exception (#582) -* [Dart] Add label getter to DataChannel Interface (#585) -* [Dart] Fix exception raised at RTCPeerConnection.removeTrack and RTCRtpSender.setParameters (#588) -* [Dart] Fix: null check (#595) -* [Dart] Fix: null check for RTCRtpTransceiverNative.fromMap - -[0.6.4] - 2021.05.02 - -* [Android] Fix getting screen capture on Huawei only successful in the first time. (#523) -* [Android] Add configuration "cryptoOptions" in parseRTCConfiguration(). -* [Dart] Change getLocalDescription,getRemoteDescription,RTCRtpSenderWeb.track returns to nullable. -* [Dart] Fixed bug in RTCPeerConnectionWeb.removeTrack. -* [Dart] Change MediaStreamTrack.captureFrame returns to ByteBuffer to compatible with web API. -* [Dart] Do null safety check in onRemoveStream,onRemoveTrack and MediaStream.getTrackById. -* [Android] Add reStartCamera method when the camera is preempted by other apps. -* [Web] Refactored RTCVideoRendererWeb and RTCVideoViewWeb, using video and audio HTML tags to render audio and video streams separately. - -[0.6.3] - 2021.04.03 - -* [Dart] Change RTCRtpSender.track to nullable. -* [Web] Fix RTCVideoView/Renderer pauses when changing child in IndexedStack. - -[0.6.2] - 2021.04.02 - -* [Dart] Use enumerateDevices instead of getSources. -* [Android] Use flutter_background to fix screen capture example. - -[0.6.1] - 2021.04.02 - -* [Darwin] Fixed getting crash when call setLocalDescription multiple time. -* [Dart] Get more pub scores. - -[0.6.0] - 2021.04.01 - -* [Sponsorship] Thanks for Stream sponsorship (#475) -* [Android] Fixed a crash when switching cameras on Huawei devices. -* [Windows] Correct signalingState & iceConnectionState event name on Windows. (#502) -* [Dart] Clip behaviour. (#511) -* [Dart] null-safety (@wer-mathurin Thanks for the hard work). -* [Dart] Fix setMicrophoneMute (#466) -* [Web] Fix pc.addTransceiver method, fix RTCRtpMediaType to string, fix (#437) -* [Android] fix sdpSemantics issue (#478) - -[0.6.0-nullsafety.0] - 2021.03.22 - -* [Dart] null-safety (@wer-mathurin Thanks for the hard work). - -[0.5.8] - 2021.01.26 - -* [Web] Support selecting audio output. -* [Web] Fix issue for getDisplayMedia with audio. -* [Windows] Add Windows Support. -* [macOS] Fix compile error for macos. -* [Dart] Add FilterQuality to RTCVideoView. -* [iOS/Android] Unified plan gettracks. -* [iOS/Android] Bluetooth switching enabled when switching `enableSpeakerphone` value (if they are connected). #201 (#435) -* [Android] Increase necessary Android min SDK version after add Unified-Plan API. - -[0.5.7] - 2020.11.21 - -* [Web] Fix events callback for peerconnection. - -[0.5.6] - 2020.11.21 - -* [Android/Darwin/Web] Add onResize event for RTCVideoRenderer. - -[0.5.5] - 2020.11.21 - -* [Android/Darwin] Fix Simulcast issue. - -[0.5.4] - 2020.11.21 - -* [Native/Web] Add videoWidth/videoHeight getter for RTCVideoRenderer. -* [Web] Add optional parameter track to call getStats. - -[0.5.3] - 2020.11.21 - -* Fix bug. - -[0.5.2] - 2020.11.19 - -* Improve web code - -[0.5.1] - 2020.11.19 - -* Improve unfied-plan API for web. -* Add getTransceivers,getSenders, getReceivers methods. - -[0.5.0+1] - 2020.11.18 - -* Remove dart-webrtc and reuse the code in dart:html - because the code generated by package:js cannot be run in dart2js. - -[0.5.0] - 2020.11.15 - -* [Web] Add Unified-Plan for Flutter Web. -* [Web] Add video frame mirror support for web. -* [Web] Support Simulcast for web. -* [Web] Use dart-webrtc as flutter web plugin. -* [Android/Darwin] Fix crash when unset streamIds in RtpTransceiverInit. -* [Dart]Change the constraints of createOffer/createAnswer as optional. -* [iOS]Fix adding track to stream igal committed (#413) - -[0.4.1] - 2020.11.11 - -* Add transceiver to onTrack events. -* Remove unnecessary log printing. -* Fixed a crash caused by using GetTransceivers under non-unified-plan, - close #389. -* FIX - Invalid type inference (#392) -* [Web]Add onEnded and onMuted for Web (#387) -* [Darwin]Fix PeerConnectionState for darwin. -* [Darwin] Fix compilation warning under darwin. -* [Android] Fixed 'Sender is null' issue when removing track. (#401) -* [iOS] fix removeTrack methodChannel response, onTrack's `stream` and `track` not being registered in native. -* [Darwin/Android] `RtpSender` `setParameters` functionality. - -[0.4.0] - 2020.10.14 - -* Support Unified-Plan for Android/iOS/macOS. -* Add PeerConnectionState and add RTCTrackEvent.. -* [Android] Upgrade GoogleWebRTC@android to 1.0.32006. -* [iOS] Upgrade GoogleWebRTC@ios to 1.1.31999. -* Api standardization across implementation (#364), thanks @wer-mathurin. - -[0.3.3] - 2020.09.14 - -* Add RTCDTMFSender for mobile, web and macOS. -* Improve RenegotiationNeededCallback. -* Refactor RTCVideoView for web and solve the resize problem. -* Reduce code size. - -[0.3.2] - 2020.09.11 - -* Reorganize the directory structure. -* Replace class name navigator to MediaDevices. -* Downgrade pedantic version to 1.9.0. - -[0.3.1] - 2020.09.11 - -* [Dart] Apply pedantic linter and more rigorous analysis options. - -[0.3.0+1] - 2020.09.06 - -* [Dart] FIX - missing null check onIceGatheringState (web) - -[0.3.0] - 2020.09.05 - -* [Dart] Improve RTCVideoView. -* [Android] Refactors Android plugin alongside the embedding V2 migration. -* [Dart] Fix .ownerTag not defined for web. -* [Dart] Added label as read only property. -* [macOS] Updated WebRTC framework to work with AppStoreConnect. -* [Dart] Make 'constraints' argument optional. -* [Dart] Make createOffer constraints optional. -* [iOS/Android/Web] Adding createLocalMediaStream method to PeerConnectionFactory. -* [Web] Fixing multiple video renderers on the same HTML page for Flutter Web. -* [iOS] Add peerConnectionId to data channel EventChannel. -* [Android] Add library module ProGuard configuration file. -* [iOS] Fix typo in render size change check condition -* [README] Add missed Android usage hint. - -[0.2.8] - 2020.04.22 - -* [macOS/iOS] Fix typo in render size change check condition. -* [macOS] Fix hot restart videoCapturer crash. -* [Android] Fix Android crash when getUserVideo. - -[0.2.7] - 2020.03.15 - -* [macOS] Fix crash with H264 HW Encoder. -* [Web] Add addTransceiver API. -* [Android] Removed duplicate method that was causing compilation error. -* [iOS] Use MrAlek Libyuv pod fixing incompatibility with FirebaseFirestore. -* [iOS] Upgrade GoogleWebRTC dependency to 1.1.29400. - -[0.2.6] - 2020.02.03 - -* Fixed the interruption of the Bluetooth headset that was playing music after the plugin started. - -[0.2.4] - 2020.02.03 - -* Fixed bug. - -[0.2.3] - 2020.02.03 - -* Fixed bug for enableSpeakerphone (Android/iOS). -* Fix RtcVideoView not rebuild when setState called and renderer is changed. -* Fix Android frame orientation. - -[0.2.2] - 2019.12.13 - -* Removed the soft link of WebRTC.framework to fix compile errors of macos version when third-party flutter app depends on plugins - -[0.2.1] - 2019.12.12 - -* Code format. -* Remove unused files. - -[0.2.0] - 2019.12.12 - -* Add support for macOS (channel dev). -* Add support for Flutter Web (channel dev). -* Add hasTorch support for Android (Camera2 API) and iOS. -* Fix(PeerConnections) split dispose and close -* Add microphone mute support for Android/iOS. -* Add enable speakerphone support for Android/iOS. -* Fix 'createIceServer' method Invalid value error (Android). -* Store SignalingState/IceGatheringState/IceConnectionState in RTCPeerConnection. -* Fixed rendering issues caused by remote MediaStream using the same msid/label when using multiple PeerConntions. - -[0.1.7] - 2019.05.16 - -* Add RTCDataChannelMessage for data channel and remove base64 type. -* Add streaming API for datachannel messages and state changes. -* Remove cloudwebrtc prefix in the plugin method/event channel name. -* Other bug fixes. - -[0.1.6] - 2019.03.31 - -* Add getConfiguration/setConfiguration methods for Peerconnection. -* Add object fit for RTCVideoView. - -[0.1.5] - 2019.03.27 - -* Remove unnecessary parameter for getLocalDescription method. - -[0.1.4] - 2019.03.26 - -* switchCamera method now returns future with isFrontCamera as result -* Fix camera stuck in rare cases -* Add getLocalDescription/getRemoteDescription methods - -[0.1.3] - 2019.03.25 - -* Add horizontal flip (mirror) function for RTCVideoView. -* Fixed ScreenCapture preview aspect ratio for Android. - -[0.1.2] - 2019.03.24 - -* Fix compilation failure caused by invalid code. - -[0.1.1] - 2019.03.24 - -* Migrated to AndroidX using Refactoring from Andoid Studio -* Fix mediaStreamTrackSetEnable not working. -* Fix iOS can't render video when resolution changes. -* Some code style changes. - -[0.1.0] - 2019.01.21 - -* Fix camera switch issues. -* Support data channel, please use the latest demo to test. -* Support screen sharing, but the work is not perfect, there is a problem with the local preview. - -[0.0.3] - 2018.12.20 - -* Update WebRTC to 1.0.25821. -* Implemented MediaStreamTrack.setVolume(). -* Add public getter for texture id. -* Fixed getUserMedia does not work for capturing audio only. - -[0.0.2] - 2018.11.04 - -* Add 'enabled' method for MediaStreamTrack. -* Implement switch camera. -* Add arm64-v8a and x86_64 architecture support for android. - -[0.0.1] - 2018.05.30 - -* Initial release. +- Initial release. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index bbab725aab..0000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,7 +0,0 @@ -# Contributing - -We love contributions from everyone, whether it's raising an issue, reporting a bug, adding a feature, or helping improve a document. -Maintaining the flutter-webrtc plugin for all platforms is not an easy task, so everything you do is support for the project. - -# Pull Request -We recommend that you create a related issue before PR so that others can find the answers they want in the issues. diff --git a/README.md b/README.md index f8387d1160..2436fa121a 100644 --- a/README.md +++ b/README.md @@ -1,48 +1,20 @@ -# Flutter-WebRTC - -[![Financial Contributors on Open Collective](https://opencollective.com/flutter-webrtc/all/badge.svg?label=financial+contributors)](https://opencollective.com/flutter-webrtc) [![pub package](https://img.shields.io/pub/v/flutter_webrtc.svg)](https://pub.dartlang.org/packages/flutter_webrtc) [![Gitter](https://badges.gitter.im/flutter-webrtc/Lobby.svg)](https://gitter.im/flutter-webrtc/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![slack](https://img.shields.io/badge/join-us%20on%20slack-gray.svg?longCache=true&logo=slack&colorB=brightgreen)](https://join.slack.com/t/flutterwebrtc/shared_invite/zt-q83o7y1s-FExGLWEvtkPKM8ku_F8cEQ) +# VideoSDK-WebRTC WebRTC plugin for Flutter Mobile/Desktop/Web -
-

-Sponsored with 💖   by
- -Stream Chat - -
-Enterprise Grade APIs for Feeds, Chat, & Video. Try the Flutter Video tutorial 💬 -

- -
-

- -LiveKit - -
- LiveKit - Open source WebRTC infrastructure -

- ## Functionality | Feature | Android | iOS | [Web](https://flutter.dev/web) | macOS | Windows | Linux | [Embedded](https://github.com/sony/flutter-elinux) | [Fuchsia](https://fuchsia.dev/) | | :-------------: | :-------------:| :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | :-----: | | Audio/Video | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | | | Data Channel | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | | -| Screen Capture | :heavy_check_mark: | [:heavy_check_mark:(*)](https://github.com/flutter-webrtc/flutter-webrtc/wiki/iOS-Screen-Sharing) | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | | +| Screen Capture | :heavy_check_mark: | [:heavy_check_mark:(*)](https://github.com/VideoSDK-webrtc/VideoSDK-webrtc/wiki/iOS-Screen-Sharing) | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | | | Unified-Plan | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | | | Simulcast | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | [WIP] | | | MediaRecorder | :warning: | :warning: | :heavy_check_mark: | | | | | | -| End to End Encryption | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | +| SFrame/FrameCryptor | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | | Insertable Streams | | | | | | | | | -Additional platform/OS support from the other community - -- flutter-tizen: https://github.com/flutter-tizen/plugins/tree/master/packages/flutter_webrtc -- flutter-elinux(WIP): https://github.com/sony/flutter-elinux-plugins/issues/7 - -Add `flutter_webrtc` as a [dependency in your pubspec.yaml file](https://flutter.io/using-packages/). - ### iOS Add the following entry to your _Info.plist_ file, located in `/ios/Runner/Info.plist`: @@ -109,53 +81,3 @@ android { ``` If necessary, in the same `build.gradle` you will need to increase `minSdkVersion` of `defaultConfig` up to `23` (currently default Flutter generator set it to `16`). - -### Important reminder -When you compile the release apk, you need to add the following operations, -[Setup Proguard Rules](https://github.com/flutter-webrtc/flutter-webrtc/commit/d32dab13b5a0bed80dd9d0f98990f107b9b514f4) - -## Contributing - -The project is inseparable from the contributors of the community. - -- [CloudWebRTC](https://github.com/cloudwebrtc) - Original Author -- [RainwayApp](https://github.com/rainwayapp) - Sponsor -- [亢少军](https://github.com/kangshaojun) - Sponsor -- [ION](https://github.com/pion/ion) - Sponsor -- [reSipWebRTC](https://github.com/reSipWebRTC) - Sponsor -- [沃德米科技](https://github.com/woodemi)-[36记手写板](https://www.36notes.com) - Sponsor -- [阿斯特网络科技有限公司](https://www.astgo.net/) - Sponsor - -### Example - -For more examples, please refer to [flutter-webrtc-demo](https://github.com/cloudwebrtc/flutter-webrtc-demo/). - -## Contributors - -### Code Contributors - -This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. - - -### Financial Contributors - -Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/flutter-webrtc/contribute)] - -#### Individuals - - - -#### Organizations - -Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/flutter-webrtc/contribute)] - - - - - - - - - - - diff --git a/android/build.gradle b/android/build.gradle index a94e70c0a7..80bae1cce0 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -1,8 +1,8 @@ -group 'com.cloudwebrtc.webrtc' +group 'live.videosdk.webrtc' version '1.0-SNAPSHOT' buildscript { - ext.kotlin_version = '1.6.10' + ext.kotlin_version = '1.7.10' repositories { google() mavenCentral() @@ -27,7 +27,7 @@ apply plugin: 'kotlin-android' android { if (project.android.hasProperty("namespace")) { - namespace 'com.cloudwebrtc.webrtc' + namespace 'live.videosdk.webrtc' } compileSdkVersion 31 @@ -52,7 +52,7 @@ android { } dependencies { - implementation 'io.github.webrtc-sdk:android:125.6422.02' + implementation 'io.github.webrtc-sdk:android:125.6422.03' implementation 'com.github.davidliu:audioswitch:89582c47c9a04c62f90aa5e57251af4800a62c9a' implementation 'androidx.annotation:annotation:1.1.0' implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" diff --git a/android/gradle/wrapper/gradle-wrapper.jar b/android/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000..ccebba7710 Binary files /dev/null and b/android/gradle/wrapper/gradle-wrapper.jar differ diff --git a/android/gradle/wrapper/gradle-wrapper.properties b/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000..42defcc94b --- /dev/null +++ b/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip +networkTimeout=10000 +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/android/gradlew b/android/gradlew new file mode 100755 index 0000000000..79a61d421c --- /dev/null +++ b/android/gradlew @@ -0,0 +1,244 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/android/gradlew.bat b/android/gradlew.bat new file mode 100644 index 0000000000..6689b85bee --- /dev/null +++ b/android/gradlew.bat @@ -0,0 +1,92 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/android/local.properties b/android/local.properties new file mode 100644 index 0000000000..832c3e0adf --- /dev/null +++ b/android/local.properties @@ -0,0 +1,8 @@ +## This file must *NOT* be checked into Version Control Systems, +# as it contains information specific to your local configuration. +# +# Location of the SDK. This is only used by Gradle. +# For customization when using a Version Control System, please read the +# header note. +#Wed Jul 17 14:41:04 IST 2024 +sdk.dir=/Users/halimarajwani/Library/Android/sdk diff --git a/android/proguard-rules.pro b/android/proguard-rules.pro index 6ce9896196..21aa43f343 100644 --- a/android/proguard-rules.pro +++ b/android/proguard-rules.pro @@ -1,3 +1,3 @@ # Flutter WebRTC --keep class com.cloudwebrtc.webrtc.** { *; } +-keep class live.videosdk.webrtc.** { *; } -keep class org.webrtc.** { *; } diff --git a/android/src/main/AndroidManifest.xml b/android/src/main/AndroidManifest.xml index 903cbff16e..2e10918fa2 100644 --- a/android/src/main/AndroidManifest.xml +++ b/android/src/main/AndroidManifest.xml @@ -1,3 +1,3 @@ + package="live.videosdk.webrtc"> diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java b/android/src/main/java/live/videosdk/webrtc/CameraEventsHandler.java similarity index 57% rename from android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java rename to android/src/main/java/live/videosdk/webrtc/CameraEventsHandler.java index e355b0c953..f8b888b1a4 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/CameraEventsHandler.java +++ b/android/src/main/java/live/videosdk/webrtc/CameraEventsHandler.java @@ -1,46 +1,89 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import android.util.Log; import org.webrtc.CameraVideoCapturer; class CameraEventsHandler implements CameraVideoCapturer.CameraEventsHandler { + + + public enum CameraState { + NEW, + OPENING, + OPENED, + CLOSED, + DISCONNECTED, + ERROR, + FREEZED + } + private final static String TAG = FlutterWebRTCPlugin.TAG; // Camera error handler - invoked when camera can not be opened // or any camera exception happens on camera thread. + private CameraState state = CameraState.NEW; + + public void waitForCameraOpen() { + Log.d(TAG, "CameraEventsHandler.waitForCameraOpen"); + while (state != CameraState.OPENED && state != CameraState.ERROR) { + try { + Thread.sleep(1); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + public void waitForCameraClosed() { + Log.d(TAG, "CameraEventsHandler.waitForCameraClosed"); + while (state != CameraState.CLOSED && state != CameraState.ERROR) { + try { + Thread.sleep(1); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + @Override public void onCameraError(String errorDescription) { Log.d(TAG, String.format("CameraEventsHandler.onCameraError: errorDescription=%s", errorDescription)); + state = CameraState.ERROR; } // Called when camera is disconnected. @Override public void onCameraDisconnected() { Log.d(TAG, "CameraEventsHandler.onCameraDisconnected"); + state = CameraState.DISCONNECTED; } // Invoked when camera stops receiving frames @Override public void onCameraFreezed(String errorDescription) { Log.d(TAG, String.format("CameraEventsHandler.onCameraFreezed: errorDescription=%s", errorDescription)); + state = CameraState.FREEZED; } // Callback invoked when camera is opening. @Override public void onCameraOpening(String cameraName) { Log.d(TAG, String.format("CameraEventsHandler.onCameraOpening: cameraName=%s", cameraName)); + state = CameraState.OPENING; } // Callback invoked when first camera frame is available after camera is opened. @Override public void onFirstFrameAvailable() { Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); + state = CameraState.OPENED; } // Callback invoked when camera closed. @Override public void onCameraClosed() { Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); + state = CameraState.CLOSED; + } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java b/android/src/main/java/live/videosdk/webrtc/DataChannelObserver.java similarity index 96% rename from android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java rename to android/src/main/java/live/videosdk/webrtc/DataChannelObserver.java index 83f316a036..b09569959b 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/DataChannelObserver.java +++ b/android/src/main/java/live/videosdk/webrtc/DataChannelObserver.java @@ -1,7 +1,7 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; -import com.cloudwebrtc.webrtc.utils.AnyThreadSink; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import live.videosdk.webrtc.utils.AnyThreadSink; +import live.videosdk.webrtc.utils.ConstraintsMap; import org.webrtc.DataChannel; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java b/android/src/main/java/live/videosdk/webrtc/FlutterRTCFrameCryptor.java similarity index 98% rename from android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java rename to android/src/main/java/live/videosdk/webrtc/FlutterRTCFrameCryptor.java index 1592cc2cfe..b5294594a5 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCFrameCryptor.java +++ b/android/src/main/java/live/videosdk/webrtc/FlutterRTCFrameCryptor.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import android.util.Log; @@ -22,9 +22,9 @@ import io.flutter.plugin.common.MethodChannel.MethodCallHandler; import io.flutter.plugin.common.MethodChannel.Result; -import com.cloudwebrtc.webrtc.utils.AnyThreadSink; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; -import com.cloudwebrtc.webrtc.utils.ConstraintsArray; +import live.videosdk.webrtc.utils.AnyThreadSink; +import live.videosdk.webrtc.utils.ConstraintsMap; +import live.videosdk.webrtc.utils.ConstraintsArray; public class FlutterRTCFrameCryptor { @@ -152,8 +152,8 @@ private FrameCryptorAlgorithm frameCryptorAlgorithmFromInt(int algorithm) { switch (algorithm) { case 0: return FrameCryptorAlgorithm.AES_GCM; - case 1: - return FrameCryptorAlgorithm.AES_CBC; + // case 1: + // return FrameCryptorAlgorithm.AES_CBC; default: return FrameCryptorAlgorithm.AES_GCM; } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java b/android/src/main/java/live/videosdk/webrtc/FlutterRTCVideoRenderer.java similarity index 97% rename from android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java rename to android/src/main/java/live/videosdk/webrtc/FlutterRTCVideoRenderer.java index 61500ac844..b82328d67b 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterRTCVideoRenderer.java +++ b/android/src/main/java/live/videosdk/webrtc/FlutterRTCVideoRenderer.java @@ -1,11 +1,11 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import android.util.Log; import android.graphics.SurfaceTexture; -import com.cloudwebrtc.webrtc.utils.AnyThreadSink; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; -import com.cloudwebrtc.webrtc.utils.EglUtils; +import live.videosdk.webrtc.utils.AnyThreadSink; +import live.videosdk.webrtc.utils.ConstraintsMap; +import live.videosdk.webrtc.utils.EglUtils; import java.util.List; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java b/android/src/main/java/live/videosdk/webrtc/FlutterWebRTCPlugin.java similarity index 85% rename from android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java rename to android/src/main/java/live/videosdk/webrtc/FlutterWebRTCPlugin.java index b2458a06de..b2133789d3 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java +++ b/android/src/main/java/live/videosdk/webrtc/FlutterWebRTCPlugin.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import android.app.Activity; import android.app.Application; @@ -11,9 +11,14 @@ import androidx.lifecycle.Lifecycle; import androidx.lifecycle.LifecycleOwner; -import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; -import com.cloudwebrtc.webrtc.utils.AnyThreadSink; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; +import live.videosdk.webrtc.audio.AudioSwitchManager; +import live.videosdk.webrtc.utils.AnyThreadSink; +import live.videosdk.webrtc.utils.ConstraintsMap; + +import live.videosdk.webrtc.audio.AudioProcessingController; + +import org.webrtc.ExternalAudioProcessingFactory; +import org.webrtc.MediaStreamTrack; import io.flutter.embedding.engine.plugins.FlutterPlugin; import io.flutter.embedding.engine.plugins.activity.ActivityAware; @@ -22,7 +27,7 @@ import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodChannel; -import io.flutter.plugin.common.PluginRegistry.Registrar; + import io.flutter.view.TextureRegistry; /** @@ -41,28 +46,29 @@ public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventC public EventChannel.EventSink eventSink; public FlutterWebRTCPlugin() { + sharedSingleton = this; } - /** - * Plugin registration. - */ - public static void registerWith(Registrar registrar) { - final FlutterWebRTCPlugin plugin = new FlutterWebRTCPlugin(); + public static FlutterWebRTCPlugin sharedSingleton; - plugin.startListening(registrar.context(), registrar.messenger(), registrar.textures()); + public AudioProcessingController getAudioProcessingController() { + return methodCallHandler.audioProcessingController; + } - if (registrar.activeContext() instanceof Activity) { - plugin.methodCallHandler.setActivity((Activity) registrar.activeContext()); - } - application = ((Application) registrar.context().getApplicationContext()); - application.registerActivityLifecycleCallbacks(plugin.observer); + public MediaStreamTrack getTrackForId(String trackId, String peerConnectionId) { + return methodCallHandler.getTrackForId(trackId, peerConnectionId); + } + + public LocalTrack getLocalTrack(String trackId) { + return methodCallHandler.getLocalTrack(trackId); + } - registrar.addViewDestroyListener(view -> { - plugin.stopListening(); - return false; - }); + public MediaStreamTrack getRemoteTrack(String trackId) { + return methodCallHandler.getRemoteTrack(trackId); } + + @Override public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) { startListening(binding.getApplicationContext(), binding.getBinaryMessenger(), diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java b/android/src/main/java/live/videosdk/webrtc/GetUserMediaImpl.java similarity index 65% rename from android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java rename to android/src/main/java/live/videosdk/webrtc/GetUserMediaImpl.java index d48e73d305..059605eae5 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/GetUserMediaImpl.java +++ b/android/src/main/java/live/videosdk/webrtc/GetUserMediaImpl.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import android.Manifest; import android.app.Activity; @@ -9,15 +9,9 @@ import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.Point; -import android.graphics.Rect; -import android.hardware.Camera; -import android.hardware.Camera.Parameters; -import android.hardware.camera2.CameraAccessException; -import android.hardware.camera2.CameraCaptureSession; -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CameraDevice; + import android.hardware.camera2.CameraManager; -import android.hardware.camera2.CaptureRequest; + import android.media.AudioDeviceInfo; import android.media.projection.MediaProjection; import android.media.projection.MediaProjectionManager; @@ -29,38 +23,39 @@ import android.os.Looper; import android.os.ResultReceiver; import android.provider.MediaStore; -import android.util.DisplayMetrics; + import android.util.Log; -import android.util.Range; +import android.util.Pair; import android.util.SparseArray; import android.view.Display; -import android.view.Surface; + import android.view.WindowManager; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; -import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; -import com.cloudwebrtc.webrtc.audio.AudioUtils; -import com.cloudwebrtc.webrtc.record.AudioChannel; -import com.cloudwebrtc.webrtc.record.AudioSamplesInterceptor; -import com.cloudwebrtc.webrtc.record.MediaRecorderImpl; -import com.cloudwebrtc.webrtc.record.OutputAudioSamplesInterceptor; -import com.cloudwebrtc.webrtc.utils.Callback; -import com.cloudwebrtc.webrtc.utils.ConstraintsArray; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; -import com.cloudwebrtc.webrtc.utils.EglUtils; -import com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils; -import com.cloudwebrtc.webrtc.utils.ObjectType; -import com.cloudwebrtc.webrtc.utils.PermissionUtils; - +import live.videosdk.webrtc.audio.AudioSwitchManager; +import live.videosdk.webrtc.record.AudioChannel; +import live.videosdk.webrtc.record.AudioSamplesInterceptor; +import live.videosdk.webrtc.record.MediaRecorderImpl; +import live.videosdk.webrtc.record.OutputAudioSamplesInterceptor; +import live.videosdk.webrtc.utils.Callback; +import live.videosdk.webrtc.utils.ConstraintsArray; +import live.videosdk.webrtc.utils.ConstraintsMap; +import live.videosdk.webrtc.audio.LocalAudioTrack; +import live.videosdk.webrtc.utils.EglUtils; +import live.videosdk.webrtc.utils.MediaConstraintsUtils; +import live.videosdk.webrtc.utils.ObjectType; +import live.videosdk.webrtc.utils.PermissionUtils; +import live.videosdk.webrtc.video.LocalVideoTrack; +import live.videosdk.webrtc.video.VideoCapturerInfo; import org.webrtc.AudioSource; import org.webrtc.AudioTrack; import org.webrtc.Camera1Capturer; import org.webrtc.Camera1Enumerator; import org.webrtc.Camera2Capturer; import org.webrtc.Camera2Enumerator; -import org.webrtc.CameraEnumerationAndroid.CaptureFormat; +import org.webrtc.Camera2Helper; import org.webrtc.CameraEnumerator; import org.webrtc.CameraVideoCapturer; import org.webrtc.MediaConstraints; @@ -72,9 +67,11 @@ import org.webrtc.VideoSource; import org.webrtc.VideoTrack; import org.webrtc.audio.JavaAudioDeviceModule; - +import org.webrtc.Camera1Helper; +import org.webrtc.VideoFrame; +import org.webrtc.CapturerObserver; +import org.webrtc.Size; import java.io.File; -import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -82,11 +79,12 @@ import io.flutter.plugin.common.MethodChannel.Result; + /** * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce * complexity and to (somewhat) separate concerns. */ -class GetUserMediaImpl { +public class GetUserMediaImpl { private static final int DEFAULT_WIDTH = 1280; private static final int DEFAULT_HEIGHT = 720; private static final int DEFAULT_FPS = 30; @@ -103,7 +101,7 @@ class GetUserMediaImpl { static final String TAG = FlutterWebRTCPlugin.TAG; - private final Map mVideoCapturers = new HashMap<>(); + private final Map mVideoCapturers = new HashMap<>(); private final Map mSurfaceTextureHelpers = new HashMap<>(); private final StateProvider stateProvider; private final Context applicationContext; @@ -266,23 +264,22 @@ private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { * @param isFacing 'user' mapped with 'front' is true (default) 'environment' mapped with 'back' * is false * @param sourceId (String) use this sourceId and ignore facing mode if specified. - * @return VideoCapturer can invoke with startCapture/stopCapture null + * @return Pair of deviceName to VideoCapturer. Can invoke with startCapture/stopCapture null * if not matched camera with specified facing mode. */ - private Map createVideoCapturer( - CameraEnumerator enumerator, boolean isFacing, String sourceId) { - VideoCapturer videoCapturer = null; - Map result = new HashMap(); + private Pair createVideoCapturer( + CameraEnumerator enumerator, boolean isFacing, String sourceId, CameraEventsHandler cameraEventsHandler) { + VideoCapturer videoCapturer; // if sourceId given, use specified sourceId first final String[] deviceNames = enumerator.getDeviceNames(); if (sourceId != null && !sourceId.equals("")) { for (String name : deviceNames) { if (name.equals(sourceId)) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + videoCapturer = enumerator.createCapturer(name, cameraEventsHandler); if (videoCapturer != null) { Log.d(TAG, "create user specified camera " + name + " succeeded"); - result.put(name, videoCapturer); - return result; + + return new Pair<>(name, videoCapturer); } else { Log.d(TAG, "create user specified camera " + name + " failed"); break; // fallback to facing mode @@ -295,12 +292,11 @@ private Map createVideoCapturer( String facingStr = isFacing ? "front" : "back"; for (String name : deviceNames) { if (enumerator.isFrontFacing(name) == isFacing) { - videoCapturer = enumerator.createCapturer(name, new CameraEventsHandler()); + videoCapturer = enumerator.createCapturer(name, cameraEventsHandler); if (videoCapturer != null) { Log.d(TAG, "Create " + facingStr + " camera " + name + " succeeded"); - result.put(name, videoCapturer); - return result; + return new Pair<>(name, videoCapturer); } else { Log.e(TAG, "Create " + facingStr + " camera " + name + " failed"); } @@ -308,13 +304,13 @@ private Map createVideoCapturer( } // falling back to the first available camera - if (videoCapturer == null && deviceNames.length > 0) { - videoCapturer = enumerator.createCapturer(deviceNames[0], new CameraEventsHandler()); + if (deviceNames.length > 0) { + videoCapturer = enumerator.createCapturer(deviceNames[0], cameraEventsHandler); Log.d(TAG, "Falling back to the first available camera"); - result.put(deviceNames[0], videoCapturer); + return new Pair<>(deviceNames[0], videoCapturer); } - return result; + return null; } /** @@ -376,7 +372,11 @@ private ConstraintsMap getUserAudio(ConstraintsMap constraints, MediaStream stre if (deviceId != null) { try { - setPreferredInputDevice(deviceId); + + + if (VERSION.SDK_INT >= VERSION_CODES.M) { + setPreferredInputDevice(Integer.parseInt(deviceId)); + } } catch (Exception e) { Log.e(TAG, "setPreferredInputDevice failed", e); } @@ -385,7 +385,7 @@ private ConstraintsMap getUserAudio(ConstraintsMap constraints, MediaStream stre AudioTrack track = pcFactory.createAudioTrack(trackId, audioSource); stream.addTrack(track); - stateProvider.putLocalTrack(track.id(), track); + stateProvider.putLocalTrack(track.id(), new LocalAudioTrack(track)); ConstraintsMap trackParams = new ConstraintsMap(); trackParams.putBoolean("enabled", track.enabled()); @@ -396,7 +396,9 @@ private ConstraintsMap getUserAudio(ConstraintsMap constraints, MediaStream stre trackParams.putBoolean("remote", false); if (deviceId == null) { - deviceId = "" + getPreferredInputDevice(preferredInput); + if (VERSION.SDK_INT >= VERSION_CODES.M) { + deviceId = "" + getPreferredInputDevice(preferredInput); + } } ConstraintsMap settings = new ConstraintsMap(); @@ -515,7 +517,7 @@ protected void onReceiveResult(int requestCode, Bundle resultData) { private void getDisplayMedia(final Result result, final MediaStream mediaStream, final Intent mediaProjectionData) { /* Create ScreenCapture */ - MediaStreamTrack[] tracks = new MediaStreamTrack[1]; + VideoTrack displayTrack = null; VideoCapturer videoCapturer = null; videoCapturer = new OrientationAwareScreenCapturer( @@ -550,7 +552,7 @@ public void onStop() { Point size = new Point(); display.getRealSize(size); - VideoCapturerInfo info = new VideoCapturerInfo(); + VideoCapturerInfoEx info = new VideoCapturerInfoEx(); info.width = size.x; info.height = size.y; info.fps = DEFAULT_FPS; @@ -563,41 +565,32 @@ public void onStop() { String trackId = stateProvider.getNextTrackUUID(); mVideoCapturers.put(trackId, info); - tracks[0] = pcFactory.createVideoTrack(trackId, videoSource); + displayTrack = pcFactory.createVideoTrack(trackId, videoSource); ConstraintsArray audioTracks = new ConstraintsArray(); ConstraintsArray videoTracks = new ConstraintsArray(); ConstraintsMap successResult = new ConstraintsMap(); - for (MediaStreamTrack track : tracks) { - if (track == null) { - continue; - } + if (displayTrack != null) { + String id = displayTrack.id(); - String id = track.id(); + LocalVideoTrack displayLocalVideoTrack = new LocalVideoTrack(displayTrack); + videoSource.setVideoProcessor(displayLocalVideoTrack); - if (track instanceof AudioTrack) { - mediaStream.addTrack((AudioTrack) track); - } else { - mediaStream.addTrack((VideoTrack) track); - } - stateProvider.putLocalTrack(id, track); + stateProvider.putLocalTrack(id, displayLocalVideoTrack); ConstraintsMap track_ = new ConstraintsMap(); - String kind = track.kind(); + String kind = displayTrack.kind(); - track_.putBoolean("enabled", track.enabled()); + track_.putBoolean("enabled", displayTrack.enabled()); track_.putString("id", id); track_.putString("kind", kind); track_.putString("label", kind); - track_.putString("readyState", track.state().toString()); + track_.putString("readyState", displayTrack.state().toString()); track_.putBoolean("remote", false); - if (track instanceof AudioTrack) { - audioTracks.pushMap(track_); - } else { - videoTracks.pushMap(track_); - } + videoTracks.pushMap(track_); + mediaStream.addTrack(displayTrack); } String streamId = mediaStream.getId(); @@ -742,61 +735,130 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi isFacing = facingMode == null || !facingMode.equals("environment"); String deviceId = getSourceIdConstraint(videoConstraintsMap); - Map result = createVideoCapturer(cameraEnumerator, isFacing, deviceId); + CameraEventsHandler cameraEventsHandler = new CameraEventsHandler(); + Pair result = createVideoCapturer(cameraEnumerator, isFacing, deviceId, cameraEventsHandler); if (result == null) { return null; } - if (deviceId == null) { - deviceId = result.keySet().iterator().next(); - } + deviceId = result.first; + VideoCapturer videoCapturer = result.second; - VideoCapturer videoCapturer = result.get(deviceId); + if (facingMode == null && cameraEnumerator.isFrontFacing(deviceId)) { + facingMode = "user"; + } else if (facingMode == null && cameraEnumerator.isBackFacing(deviceId)) { + facingMode = "environment"; + } + // else, leave facingMode as it was PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory(); VideoSource videoSource = pcFactory.createVideoSource(false); String threadName = Thread.currentThread().getName() + "_texture_camera_thread"; SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(threadName, EglUtils.getRootEglBaseContext()); - videoCapturer.initialize( - surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); + if (surfaceTextureHelper == null) { + Log.e(TAG, "surfaceTextureHelper is null"); + return null; + } + CapturerObserver customCapturerObserver = new CapturerObserver() { + WebRTCService webRTCService = WebRTCService.getInstance(); + @Override + public void onCapturerStarted(boolean success) { + Log.d(TAG, "Capturer started: " + success); + videoSource.getCapturerObserver().onCapturerStarted(success); + } + + @Override + public void onCapturerStopped() { + Log.d(TAG, "Capturer stopped"); + videoSource.getCapturerObserver().onCapturerStopped(); + } + + @Override + public void onFrameCaptured(VideoFrame frame) { + try{ + if(webRTCService.getVideoProcessor() != null && frame != null){ + VideoFrame processedFrame = webRTCService.getVideoProcessor().onFrameReceived(frame); + if(processedFrame != null){ + videoSource.getCapturerObserver().onFrameCaptured(processedFrame); + }else{ + videoSource.getCapturerObserver().onFrameCaptured(frame); + } + } else{ + videoSource.getCapturerObserver().onFrameCaptured(frame); + } + + } catch(Exception e){ + videoSource.getCapturerObserver().onFrameCaptured(frame); + } + } + }; + + videoCapturer.initialize(surfaceTextureHelper, applicationContext, customCapturerObserver); + // videoCapturer.initialize( + // surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); - VideoCapturerInfo info = new VideoCapturerInfo(); + VideoCapturerInfoEx info = new VideoCapturerInfoEx(); Integer videoWidth = getConstrainInt(videoConstraintsMap, "width"); - info.width = videoWidth != null + int targetWidth = videoWidth != null ? videoWidth : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minWidth") ? videoConstraintsMandatory.getInt("minWidth") : DEFAULT_WIDTH; Integer videoHeight = getConstrainInt(videoConstraintsMap, "height"); - info.height = videoHeight != null + int targetHeight = videoHeight != null ? videoHeight : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minHeight") ? videoConstraintsMandatory.getInt("minHeight") : DEFAULT_HEIGHT; Integer videoFrameRate = getConstrainInt(videoConstraintsMap, "frameRate"); - info.fps = videoFrameRate != null + int targetFps = videoFrameRate != null ? videoFrameRate : videoConstraintsMandatory != null && videoConstraintsMandatory.hasKey("minFrameRate") ? videoConstraintsMandatory.getInt("minFrameRate") : DEFAULT_FPS; + info.width = targetWidth; + info.height = targetHeight; + info.fps = targetFps; info.capturer = videoCapturer; - videoCapturer.startCapture(info.width, info.height, info.fps); + info.cameraName = deviceId; + + // Find actual capture format. + Size actualSize = null; + if (videoCapturer instanceof Camera1Capturer) { + int cameraId = Camera1Helper.getCameraId(deviceId); + actualSize = Camera1Helper.findClosestCaptureFormat(cameraId, targetWidth, targetHeight); + } else if (videoCapturer instanceof Camera2Capturer) { + CameraManager cameraManager = (CameraManager) applicationContext.getSystemService(Context.CAMERA_SERVICE); + actualSize = Camera2Helper.findClosestCaptureFormat(cameraManager, deviceId, targetWidth, targetHeight); + } + + if (actualSize != null) { + info.width = actualSize.width; + info.height = actualSize.height; + } + + info.cameraEventsHandler = cameraEventsHandler; + videoCapturer.startCapture(targetWidth, targetHeight, targetFps); + + cameraEventsHandler.waitForCameraOpen(); String trackId = stateProvider.getNextTrackUUID(); mVideoCapturers.put(trackId, info); mSurfaceTextureHelpers.put(trackId, surfaceTextureHelper); - Log.d(TAG, "changeCaptureFormat: " + info.width + "x" + info.height + "@" + info.fps); - videoSource.adaptOutputFormat(info.width, info.height, info.fps); + Log.d(TAG, "Target: " + targetWidth + "x" + targetHeight + "@" + targetFps + ", Actual: " + info.width + "x" + info.height + "@" + info.fps); VideoTrack track = pcFactory.createVideoTrack(trackId, videoSource); mediaStream.addTrack(track); - stateProvider.putLocalTrack(track.id(), track); + LocalVideoTrack localVideoTrack = new LocalVideoTrack(track); + videoSource.setVideoProcessor(localVideoTrack); + + stateProvider.putLocalTrack(track.id(),localVideoTrack); ConstraintsMap trackParams = new ConstraintsMap(); @@ -819,33 +881,30 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi return trackParams; } - void removeVideoCapturerSync(String id) { - synchronized (mVideoCapturers) { - VideoCapturerInfo info = mVideoCapturers.get(id); - if (info != null) { - try { - info.capturer.stopCapture(); - } catch (InterruptedException e) { - Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); - } finally { - info.capturer.dispose(); - mVideoCapturers.remove(id); - SurfaceTextureHelper helper = mSurfaceTextureHelpers.get(id); - if (helper != null) { - helper.stopListening(); - helper.dispose(); - mSurfaceTextureHelpers.remove(id); - } + void removeVideoCapturer(String id) { + VideoCapturerInfoEx info = mVideoCapturers.get(id); + if (info != null) { + try { + info.capturer.stopCapture(); + if (info.cameraEventsHandler != null) { + info.cameraEventsHandler.waitForCameraClosed(); + } + } catch (InterruptedException e) { + Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); + } finally { + info.capturer.dispose(); + mVideoCapturers.remove(id); + SurfaceTextureHelper helper = mSurfaceTextureHelpers.get(id); + if (helper != null) { + helper.stopListening(); + helper.dispose(); + mSurfaceTextureHelpers.remove(id); } } } } - void removeVideoCapturer(String id) { - new Thread(() -> { - removeVideoCapturerSync(id); - }).start(); - } + @RequiresApi(api = VERSION_CODES.M) private void requestPermissions( @@ -915,7 +974,7 @@ void switchCamera(String id, Result result) { @Override public void onCameraSwitchDone(boolean b) { isFacing = !isFacing; - isTorchOn = false; + result.success(b); } @@ -973,302 +1032,13 @@ void stopRecording(Integer id) { } } - void hasTorch(String trackId, Result result) { - VideoCapturerInfo info = mVideoCapturers.get(trackId); - if (info == null) { - resultError("hasTorch", "Video capturer not found for id: " + trackId, result); - return; - } - - if (VERSION.SDK_INT >= VERSION_CODES.LOLLIPOP && info.capturer instanceof Camera2Capturer) { - CameraManager manager; - CameraDevice cameraDevice; + - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); - manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - boolean flashIsAvailable; - try { - CameraCharacteristics characteristics = - manager.getCameraCharacteristics(cameraDevice.getId()); - flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - result.success(flashIsAvailable); - return; - } - - if (info.capturer instanceof Camera1Capturer) { - Camera camera; - - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - Parameters params = camera.getParameters(); - List supportedModes = params.getSupportedFlashModes(); - - result.success( - supportedModes != null && supportedModes.contains(Parameters.FLASH_MODE_TORCH)); - return; - } - - resultError("hasTorch", "[TORCH] Video capturer not compatible", result); - } - - @RequiresApi(api = VERSION_CODES.LOLLIPOP) - void setZoom(String trackId, double zoomLevel, Result result) { - VideoCapturerInfo info = mVideoCapturers.get(trackId); - if (info == null) { - resultError("setZoom", "Video capturer not found for id: " + trackId, result); - return; - } - - if (info.capturer instanceof Camera2Capturer) { - CameraCaptureSession captureSession; - CameraDevice cameraDevice; - CaptureFormat captureFormat; - int fpsUnitFactor; - Surface surface; - Handler cameraThreadHandler; - CameraManager manager; - - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); - manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); - captureSession = - (CameraCaptureSession) - getPrivateProperty(session.getClass(), session, "captureSession"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - captureFormat = - (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); - fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); - surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); - cameraThreadHandler = - (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - try { - final CaptureRequest.Builder captureRequestBuilder = - cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); - - final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); - final Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); - final double maxZoomLevel = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); - - final double desiredZoomLevel = Math.max(1.0, Math.min(zoomLevel, maxZoomLevel)); - - float ratio = 1.0f / (float)desiredZoomLevel; - - if (rect != null) { - int croppedWidth = rect.width() - Math.round((float) rect.width() * ratio); - int croppedHeight = rect.height() - Math.round((float) rect.height() * ratio); - final Rect desiredRegion = new Rect(croppedWidth / 2, croppedHeight / 2, rect.width() - croppedWidth / 2, rect.height() - croppedHeight / 2); - captureRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, desiredRegion); - } - - captureRequestBuilder.set( - CaptureRequest.FLASH_MODE, - isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, - new Range<>( - captureFormat.framerate.min / fpsUnitFactor, - captureFormat.framerate.max / fpsUnitFactor)); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); - captureRequestBuilder.addTarget(surface); - captureSession.setRepeatingRequest( - captureRequestBuilder.build(), null, cameraThreadHandler); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - - - result.success(null); - return; - } - - if (info.capturer instanceof Camera1Capturer) { - Camera camera; - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - Camera.Parameters params = camera.getParameters(); - params.setFlashMode( - isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); - if(params.isZoomSupported()) { - int maxZoom = params.getMaxZoom(); - double desiredZoom = Math.max(0, Math.min(zoomLevel, maxZoom)); - params.setZoom((int)desiredZoom); - result.success(null); - return; - } - } - resultError("setZoom", "[ZOOM] Video capturer not compatible", result); - } - - @RequiresApi(api = VERSION_CODES.LOLLIPOP) - void setTorch(String trackId, boolean torch, Result result) { - VideoCapturerInfo info = mVideoCapturers.get(trackId); - if (info == null) { - resultError("setTorch", "Video capturer not found for id: " + trackId, result); - return; - } - - if (info.capturer instanceof Camera2Capturer) { - CameraCaptureSession captureSession; - CameraDevice cameraDevice; - CaptureFormat captureFormat; - int fpsUnitFactor; - Surface surface; - Handler cameraThreadHandler; - - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); - CameraManager manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); - captureSession = - (CameraCaptureSession) - getPrivateProperty(session.getClass(), session, "captureSession"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - captureFormat = - (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); - fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); - surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); - cameraThreadHandler = - (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - try { - final CaptureRequest.Builder captureRequestBuilder = - cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); - captureRequestBuilder.set( - CaptureRequest.FLASH_MODE, - torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, - new Range<>( - captureFormat.framerate.min / fpsUnitFactor, - captureFormat.framerate.max / fpsUnitFactor)); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); - captureRequestBuilder.addTarget(surface); - captureSession.setRepeatingRequest( - captureRequestBuilder.build(), null, cameraThreadHandler); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - - result.success(null); - isTorchOn = torch; - return; - } - - if (info.capturer instanceof Camera1Capturer) { - Camera camera; - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - Camera.Parameters params = camera.getParameters(); - params.setFlashMode( - torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); - camera.setParameters(params); - - result.success(null); - isTorchOn = torch; - return; - } - resultError("setTorch", "[TORCH] Video capturer not compatible", result); - } - - private Object getPrivateProperty(Class klass, Object object, String fieldName) - throws NoSuchFieldWithNameException { - try { - Field field = klass.getDeclaredField(fieldName); - field.setAccessible(true); - return field.get(object); - } catch (NoSuchFieldException e) { - throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); - } catch (IllegalAccessException e) { - // Should never happen since we are calling `setAccessible(true)` - throw new RuntimeException(e); - } - } - - private class NoSuchFieldWithNameException extends NoSuchFieldException { - - String className; - String fieldName; - - NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { - super(e.getMessage()); - this.className = className; - this.fieldName = fieldName; - } - } public void reStartCamera(IsCameraEnabled getCameraId) { - for (Map.Entry item : mVideoCapturers.entrySet()) { + for (Map.Entry item : mVideoCapturers.entrySet()) { if (!item.getValue().isScreenCapture && getCameraId.isEnabled(item.getKey())) { item.getValue().capturer.startCapture( item.getValue().width, @@ -1283,27 +1053,21 @@ public interface IsCameraEnabled { boolean isEnabled(String id); } - public class VideoCapturerInfo { - VideoCapturer capturer; - int width; - int height; - int fps; - boolean isScreenCapture = false; + public static class VideoCapturerInfoEx extends VideoCapturerInfo { + public CameraEventsHandler cameraEventsHandler; + } + + public VideoCapturerInfoEx getCapturerInfo(String trackId) { + return mVideoCapturers.get(trackId); } @RequiresApi(api = VERSION_CODES.M) - void setPreferredInputDevice(String deviceId) { + void setPreferredInputDevice(int i) { android.media.AudioManager audioManager = ((android.media.AudioManager) applicationContext.getSystemService(Context.AUDIO_SERVICE)); final AudioDeviceInfo[] devices = audioManager.getDevices(android.media.AudioManager.GET_DEVICES_INPUTS); - if (devices.length > 0) { - for (int i = 0; i < devices.length; i++) { - AudioDeviceInfo device = devices[i]; - if(deviceId.equals(AudioUtils.getAudioDeviceId(device))) { - preferredInput = device; - audioDeviceModule.setPreferredInputDevice(preferredInput); - return; - } - } + if (devices.length > i) { + preferredInput = devices[i]; + audioDeviceModule.setPreferredInputDevice(preferredInput); } } diff --git a/android/src/main/java/live/videosdk/webrtc/LocalTrack.java b/android/src/main/java/live/videosdk/webrtc/LocalTrack.java new file mode 100644 index 0000000000..4cc5b0cb78 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/LocalTrack.java @@ -0,0 +1,31 @@ +package live.videosdk.webrtc; + +import org.webrtc.MediaStreamTrack; + +public class LocalTrack { + public LocalTrack(MediaStreamTrack track) { + this.track = track; + } + + public MediaStreamTrack track; + + public void dispose() { + track.dispose(); + } + + public boolean enabled() { + return track.enabled(); + } + + public void setEnabled(boolean enabled) { + track.setEnabled(enabled); + } + + public String id() { + return track.id(); + } + + public String kind() { + return track.kind(); + } +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/live/videosdk/webrtc/MethodCallHandlerImpl.java similarity index 89% rename from android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java rename to android/src/main/java/live/videosdk/webrtc/MethodCallHandlerImpl.java index ee46a609b8..0c002206ce 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/live/videosdk/webrtc/MethodCallHandlerImpl.java @@ -1,6 +1,6 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; -import static com.cloudwebrtc.webrtc.utils.MediaConstraintsUtils.parseMediaConstraints; +import static live.videosdk.webrtc.utils.MediaConstraintsUtils.parseMediaConstraints; import android.app.Activity; import android.content.Context; @@ -8,6 +8,7 @@ import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.hardware.Camera.CameraInfo; +import android.media.MediaRecorder; import android.media.AudioAttributes; import android.media.AudioDeviceInfo; import android.os.Build; @@ -17,21 +18,29 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; - -import com.cloudwebrtc.webrtc.audio.AudioDeviceKind; -import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; -import com.cloudwebrtc.webrtc.audio.AudioUtils; -import com.cloudwebrtc.webrtc.record.AudioChannel; -import com.cloudwebrtc.webrtc.record.FrameCapturer; -import com.cloudwebrtc.webrtc.utils.AnyThreadResult; -import com.cloudwebrtc.webrtc.utils.Callback; -import com.cloudwebrtc.webrtc.utils.ConstraintsArray; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; -import com.cloudwebrtc.webrtc.utils.EglUtils; -import com.cloudwebrtc.webrtc.utils.ObjectType; -import com.cloudwebrtc.webrtc.utils.PermissionUtils; -import com.cloudwebrtc.webrtc.utils.Utils; +import live.videosdk.webrtc.audio.AudioProcessingController; +import live.videosdk.webrtc.audio.AudioDeviceKind; +import live.videosdk.webrtc.audio.AudioSwitchManager; +import live.videosdk.webrtc.audio.LocalAudioTrack; +import live.videosdk.webrtc.audio.PlaybackSamplesReadyCallbackAdapter; +import live.videosdk.webrtc.audio.RecordSamplesReadyCallbackAdapter; +import live.videosdk.webrtc.audio.AudioUtils; +import live.videosdk.webrtc.record.AudioChannel; +import live.videosdk.webrtc.record.FrameCapturer; +import live.videosdk.webrtc.utils.AnyThreadResult; +import live.videosdk.webrtc.utils.Callback; +import live.videosdk.webrtc.utils.ConstraintsArray; +import live.videosdk.webrtc.utils.ConstraintsMap; +import live.videosdk.webrtc.utils.EglUtils; +import live.videosdk.webrtc.utils.ObjectType; +import live.videosdk.webrtc.utils.PermissionUtils; +import live.videosdk.webrtc.utils.Utils; +import live .videosdk.webrtc.video.VideoCapturerInfo; +import live .videosdk.webrtc.video.camera.CameraUtils; +import live .videosdk.webrtc.video.camera.Point; +import live .videosdk.webrtc.video.LocalVideoTrack; import com.twilio.audioswitch.AudioDevice; +import live.videosdk.webrtc.LocalTrack; import org.webrtc.AudioTrack; import org.webrtc.CryptoOptions; @@ -96,14 +105,18 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { private final TextureRegistry textures; private PeerConnectionFactory mFactory; private final Map localStreams = new HashMap<>(); - private final Map localTracks = new HashMap<>(); + private final Map localTracks = new HashMap<>(); private final LongSparseArray renders = new LongSparseArray<>(); + public RecordSamplesReadyCallbackAdapter recordSamplesReadyCallbackAdapter; + + public PlaybackSamplesReadyCallbackAdapter playbackSamplesReadyCallbackAdapter; /** * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce * complexity and to (somewhat) separate concerns. */ private GetUserMediaImpl getUserMediaImpl; + private CameraUtils cameraUtils; private AudioDeviceModule audioDeviceModule; @@ -115,6 +128,8 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { private CustomVideoDecoderFactory videoDecoderFactory; + public AudioProcessingController audioProcessingController; + MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry) { this.context = context; this.textures = textureRegistry; @@ -133,7 +148,7 @@ void dispose() { mediaStream.dispose(); } localStreams.clear(); - for (final MediaStreamTrack track : localTracks.values()) { + for (final LocalTrack track : localTracks.values()) { track.dispose(); } localTracks.clear(); @@ -142,7 +157,7 @@ void dispose() { } mPeerConnectionObservers.clear(); } - private void initialize(int networkIgnoreMask, boolean forceSWCodec, List forceSWCodecList, + private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List forceSWCodecList, @Nullable ConstraintsMap androidAudioConfiguration) { if (mFactory != null) { return; @@ -155,6 +170,8 @@ private void initialize(int networkIgnoreMask, boolean forceSWCodec, List= Build.VERSION_CODES.Q; + boolean useLowLatency = Build.VERSION.SDK_INT >= Build.VERSION_CODES.O; + audioDeviceModuleBuilder.setUseHardwareAcousticEchoCanceler(useHardwareAudioProcessing) + .setUseLowLatency(useLowLatency) + .setUseHardwareNoiseSuppressor(useHardwareAudioProcessing); + } + audioDeviceModuleBuilder.setSamplesReadyCallback(recordSamplesReadyCallbackAdapter); + audioDeviceModuleBuilder.setPlaybackSamplesReadyCallback(playbackSamplesReadyCallbackAdapter); + + recordSamplesReadyCallbackAdapter.addCallback(getUserMediaImpl.inputSamplesInterceptor); - JavaAudioDeviceModule.Builder audioDeviceModuleBuilder = JavaAudioDeviceModule.builder(context) - .setUseHardwareAcousticEchoCanceler(true) - .setUseHardwareNoiseSuppressor(true) - .setSamplesReadyCallback(getUserMediaImpl.inputSamplesInterceptor); + recordSamplesReadyCallbackAdapter.addCallback(new JavaAudioDeviceModule.SamplesReadyCallback() { + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + for(LocalTrack track : localTracks.values()) { + if (track instanceof LocalAudioTrack) { + ((LocalAudioTrack) track).onWebRtcAudioRecordSamplesReady(audioSamples); + } + } + } + }); if (audioAttributes != null) { audioDeviceModuleBuilder.setAudioAttributes(audioAttributes); } audioDeviceModule = audioDeviceModuleBuilder.createAudioDeviceModule(); - + if(!bypassVoiceProcessing) { + if(JavaAudioDeviceModule.isBuiltInNoiseSuppressorSupported()) { + audioDeviceModule.setNoiseSuppressorEnabled(true); + } + } getUserMediaImpl.audioDeviceModule = (JavaAudioDeviceModule) audioDeviceModule; final Options options = new Options(); @@ -210,6 +259,9 @@ private void initialize(int networkIgnoreMask, boolean forceSWCodec, List audioTracks = new ArrayList<>(); List videoTracks = new ArrayList<>(); for (AudioTrack track : stream.audioTracks) { - localTracks.put(track.id(), track); + localTracks.put(track.id(), new LocalAudioTrack(track)); Map trackMap = new HashMap<>(); trackMap.put("enabled", track.enabled()); trackMap.put("id", track.id()); @@ -335,7 +391,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { audioTracks.add(trackMap); } for (VideoTrack track : stream.videoTracks) { - localTracks.put(track.id(), track); + localTracks.put(track.id(), new LocalVideoTrack(track)); Map trackMap = new HashMap<>(); trackMap.put("enabled", track.enabled()); trackMap.put("id", track.id()); @@ -386,7 +442,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { RtpSender audioSender = null; for (RtpSender sender : peerConnection.getSenders()) { - if (sender.track().kind().equals("audio")) { + if (sender != null && sender.track() != null && sender.track().kind().equals("audio")) { audioSender = sender; } } @@ -463,7 +519,10 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { for (int i = 0; i < renders.size(); i++) { FlutterRTCVideoRenderer renderer = renders.valueAt(i); if (renderer.checkMediaStream(streamId, "local")) { - renderer.setVideoTrack((VideoTrack) localTracks.get(trackId)); + LocalTrack track = localTracks.get(trackId); + if(track != null) { + renderer.setVideoTrack((VideoTrack) track.track); + } } } break; @@ -557,19 +616,51 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } case "mediaStreamTrackHasTorch": { String trackId = call.argument("trackId"); - getUserMediaImpl.hasTorch(trackId, result); + cameraUtils.hasTorch(trackId, result); break; } case "mediaStreamTrackSetTorch": { String trackId = call.argument("trackId"); boolean torch = call.argument("torch"); - getUserMediaImpl.setTorch(trackId, torch, result); + cameraUtils.setTorch(trackId, torch, result); break; } case "mediaStreamTrackSetZoom": { String trackId = call.argument("trackId"); double zoomLevel = call.argument("zoomLevel"); - getUserMediaImpl.setZoom(trackId, zoomLevel, result); + cameraUtils.setZoom(trackId, zoomLevel, result); + break; + } + case "mediaStreamTrackSetFocusMode": { + cameraUtils.setFocusMode(call, result); + break; + } + case "mediaStreamTrackSetFocusPoint":{ + Map focusPoint = call.argument("focusPoint"); + Boolean reset = (Boolean)focusPoint.get("reset"); + Double x = null; + Double y = null; + if (reset == null || !reset) { + x = (Double)focusPoint.get("x"); + y = (Double)focusPoint.get("y"); + } + cameraUtils.setFocusPoint(call, new Point(x, y), result); + break; + } + case "mediaStreamTrackSetExposureMode": { + cameraUtils.setExposureMode(call, result); + break; + } + case "mediaStreamTrackSetExposurePoint": { + Map exposurePoint = call.argument("exposurePoint"); + Boolean reset = (Boolean)exposurePoint.get("reset"); + Double x = null; + Double y = null; + if (reset == null || !reset) { + x = (Double)exposurePoint.get("x"); + y = (Double)exposurePoint.get("y"); + } + cameraUtils.setExposurePoint(call, new Point(x, y), result); break; } case "mediaStreamTrackSwitchCamera": { @@ -603,7 +694,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { case "selectAudioInput": if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1) { String deviceId = call.argument("deviceId"); - getUserMediaImpl.setPreferredInputDevice(deviceId); + getUserMediaImpl.setPreferredInputDevice(Integer.parseInt(deviceId)); result.success(null); } else { result.notImplemented(); @@ -828,7 +919,7 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { case "setPreferredInputDevice": { if (Build.VERSION.SDK_INT > Build.VERSION_CODES.LOLLIPOP_MR1) { String deviceId = call.argument("deviceId"); - getUserMediaImpl.setPreferredInputDevice(deviceId); + getUserMediaImpl.setPreferredInputDevice(Integer.parseInt(deviceId)); result.success(null); } else { result.notImplemented(); @@ -1264,16 +1355,31 @@ public boolean putLocalStream(String streamId, MediaStream stream) { } @Override - public boolean putLocalTrack(String trackId, MediaStreamTrack track) { + public boolean putLocalTrack(String trackId, LocalTrack track) { localTracks.put(trackId, track); return true; } @Override - public MediaStreamTrack getLocalTrack(String trackId) { + public LocalTrack getLocalTrack(String trackId) { return localTracks.get(trackId); } + public MediaStreamTrack getRemoteTrack(String trackId) { + for (Entry entry : mPeerConnectionObservers.entrySet()) { + PeerConnectionObserver pco = entry.getValue(); + MediaStreamTrack track = pco.remoteTracks.get(trackId); + if (track == null) { + track = pco.getTransceiversTrack(trackId); + } + if (track != null) { + return track; + } + } + return null; + } + + @Override public String getNextStreamUUID() { String uuid; @@ -1347,28 +1453,30 @@ MediaStream getStreamForId(String id, String peerConnectionId) { return stream; } - private MediaStreamTrack getTrackForId(String trackId, String peerConnectionId) { - MediaStreamTrack track = localTracks.get(trackId); - - if (track == null) { + public MediaStreamTrack getTrackForId(String trackId, String peerConnectionId) { + LocalTrack localTrack = localTracks.get(trackId); + MediaStreamTrack mediaStreamTrack = null; + if (localTrack == null) { for (Entry entry : mPeerConnectionObservers.entrySet()) { if (peerConnectionId != null && entry.getKey().compareTo(peerConnectionId) != 0) continue; PeerConnectionObserver pco = entry.getValue(); - track = pco.remoteTracks.get(trackId); + mediaStreamTrack = pco.remoteTracks.get(trackId); - if (track == null) { - track = pco.getTransceiversTrack(trackId); + if (mediaStreamTrack == null) { + mediaStreamTrack = pco.getTransceiversTrack(trackId); } - if (track != null) { + if (mediaStreamTrack != null) { break; } } + } else { + mediaStreamTrack = localTrack.track; } - return track; + return mediaStreamTrack; } @@ -1419,8 +1527,8 @@ public void getSources(Result result) { ConstraintsMap audio = new ConstraintsMap(); audio.putString("label", "Audio"); audio.putString("deviceId", "audio-1"); + audio.putString("facing", ""); audio.putString("kind", "audioinput"); - audio.putString("groupId", "microphone"); array.pushMap(audio); } else { android.media.AudioManager audioManager = ((android.media.AudioManager) context @@ -1430,10 +1538,27 @@ public void getSources(Result result) { AudioDeviceInfo device = devices[i]; if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC || device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO || device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + int type = (device.getType() & 0xFF); + String label = device.getProductName().toString(); + String address = Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? String.valueOf(i) : device.getAddress(); + + if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { + label = "Built-in Microphone (" + address + ")"; + } + + if(device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { + label = "Wired Headset"; + } + + if(device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { + label = "Bluetooth SCO (" + device.getProductName().toString() + ")"; + } + ConstraintsMap audio = new ConstraintsMap(); - audio.putString("label", AudioUtils.getAudioDeviceLabel(device)); - audio.putString("deviceId", AudioUtils.getAudioDeviceId(device)); - audio.putString("groupId", AudioUtils.getAudioGroupId(device)); + audio.putString("label", label); + audio.putString("deviceId", String.valueOf(i)); + audio.putString("groupId", "" + type); + audio.putString("facing", ""); audio.putString("kind", "audioinput"); array.pushMap(audio); } @@ -1446,7 +1571,7 @@ public void getSources(Result result) { ConstraintsMap audioOutputMap = new ConstraintsMap(); audioOutputMap.putString("label", audioOutput.getName()); audioOutputMap.putString("deviceId", AudioDeviceKind.fromAudioDevice(audioOutput).typeName); - audioOutputMap.putString("groupId", "" + AudioDeviceKind.fromAudioDevice(audioOutput).typeName); + audioOutputMap.putString("facing", ""); audioOutputMap.putString("kind", "audiooutput"); array.pushMap(audioOutputMap); } @@ -1472,14 +1597,14 @@ private void createLocalMediaStream(Result result) { } public void trackDispose(final String trackId) { - MediaStreamTrack track = localTracks.get(trackId); + LocalTrack track = localTracks.get(trackId); if (track == null) { Log.d(TAG, "trackDispose() track is null"); return; } removeTrackForRendererById(trackId); track.setEnabled(false); - if (track.kind().equals("video")) { + if (track instanceof LocalVideoTrack) { getUserMediaImpl.removeVideoCapturer(trackId); } localTracks.remove(trackId); @@ -1537,14 +1662,14 @@ public void mediaStreamAddTrack(final String streamId, final String trackId, Res public void mediaStreamRemoveTrack(final String streamId, final String trackId, Result result) { MediaStream mediaStream = localStreams.get(streamId); if (mediaStream != null) { - MediaStreamTrack track = localTracks.get(trackId); + LocalTrack track = localTracks.get(trackId); if (track != null) { String kind = track.kind(); if (kind.equals("audio")) { - mediaStream.removeTrack((AudioTrack) track); + mediaStream.removeTrack((AudioTrack) track.track); result.success(null); } else if (kind.equals("video")) { - mediaStream.removeTrack((VideoTrack) track); + mediaStream.removeTrack((VideoTrack) track.track); result.success(null); } else { resultError("mediaStreamRemoveTrack", "mediaStreamRemoveTrack() track [" + trackId + "] has unsupported type: " + kind, result); @@ -1563,7 +1688,7 @@ public void mediaStreamTrackRelease(final String streamId, final String _trackId Log.d(TAG, "mediaStreamTrackRelease() stream is null"); return; } - MediaStreamTrack track = localTracks.get(_trackId); + LocalTrack track = localTracks.get(_trackId); if (track == null) { Log.d(TAG, "mediaStreamTrackRelease() track is null"); return; @@ -1571,9 +1696,9 @@ public void mediaStreamTrackRelease(final String streamId, final String _trackId track.setEnabled(false); // should we do this? localTracks.remove(_trackId); if (track.kind().equals("audio")) { - stream.removeTrack((AudioTrack) track); + stream.removeTrack((AudioTrack) track.track); } else if (track.kind().equals("video")) { - stream.removeTrack((VideoTrack) track); + stream.removeTrack((VideoTrack) track.track); getUserMediaImpl.removeVideoCapturer(_trackId); } } @@ -1594,7 +1719,6 @@ public ConstraintsMap getCameraInfo(int index) { params.putString("deviceId", "" + index); params.putString("facing", facing); params.putString("kind", "videoinput"); - params.putString("groupId", "camera"); return params; } @@ -1944,7 +2068,7 @@ public void setActivity(Activity activity) { public void addTrack(String peerConnectionId, String trackId, List streamIds, Result result) { PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); - MediaStreamTrack track = localTracks.get(trackId); + LocalTrack track = localTracks.get(trackId); if (track == null) { resultError("addTrack", "track is null", result); return; @@ -1952,7 +2076,7 @@ public void addTrack(String peerConnectionId, String trackId, List strea if (pco == null || pco.getPeerConnection() == null) { resultError("addTrack", "peerConnection is null", result); } else { - pco.addTrack(track, streamIds, result); + pco.addTrack(track.track, streamIds, result); } } @@ -1968,7 +2092,7 @@ public void removeTrack(String peerConnectionId, String senderId, Result result) public void addTransceiver(String peerConnectionId, String trackId, Map transceiverInit, Result result) { PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); - MediaStreamTrack track = localTracks.get(trackId); + LocalTrack track = localTracks.get(trackId); if (track == null) { resultError("addTransceiver", "track is null", result); return; @@ -1976,7 +2100,7 @@ public void addTransceiver(String peerConnectionId, String trackId, Map 0) { - track = localTracks.get(trackId); + if (track == null) { resultError("rtpSenderSetTrack", "track is null", result); return; } } - pco.rtpSenderSetTrack(rtpSenderId, track, result, replace); + + if(track != null) { + mediaStreamTrack = track.track; + } + pco.rtpSenderSetTrack(rtpSenderId, mediaStreamTrack, result, replace); } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java b/android/src/main/java/live/videosdk/webrtc/OrientationAwareScreenCapturer.java similarity index 74% rename from android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java rename to android/src/main/java/live/videosdk/webrtc/OrientationAwareScreenCapturer.java index 13a46c4964..151c954bae 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/OrientationAwareScreenCapturer.java +++ b/android/src/main/java/live/videosdk/webrtc/OrientationAwareScreenCapturer.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import org.webrtc.SurfaceTextureHelper; import org.webrtc.CapturerObserver; @@ -17,10 +17,6 @@ import android.hardware.display.DisplayManager; import android.hardware.display.VirtualDisplay; import android.media.projection.MediaProjectionManager; -import android.os.Looper; -import android.os.Handler; -import android.os.Build; -import android.view.Display; /** * An copy of ScreenCapturerAndroid to capture the screen content while being aware of device orientation @@ -35,8 +31,6 @@ public class OrientationAwareScreenCapturer implements VideoCapturer, VideoSink private final MediaProjection.Callback mediaProjectionCallback; private int width; private int height; - private int oldWidth; - private int oldHeight; private VirtualDisplay virtualDisplay; private SurfaceTextureHelper surfaceTextureHelper; private CapturerObserver capturerObserver; @@ -64,13 +58,15 @@ public OrientationAwareScreenCapturer(Intent mediaProjectionPermissionResultData public void onFrame(VideoFrame frame) { checkNotDisposed(); - this.isPortrait = isDeviceOrientationPortrait(); - final int max = Math.max(this.height, this.width); - final int min = Math.min(this.height, this.width); - if (this.isPortrait) { - changeCaptureFormat(min, max, 15); - } else { - changeCaptureFormat(max, min, 15); + final boolean isOrientationPortrait = isDeviceOrientationPortrait(); + if (isOrientationPortrait != this.isPortrait) { + this.isPortrait = isOrientationPortrait; + + if (this.isPortrait) { + changeCaptureFormat(this.width, this.height, 15); + } else { + changeCaptureFormat(this.height, this.width, 15); + } } capturerObserver.onFrameCaptured(frame); } @@ -87,7 +83,6 @@ private void checkNotDisposed() { throw new RuntimeException("capturer is disposed."); } } - public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext, final CapturerObserver capturerObserver) { checkNotDisposed(); @@ -105,7 +100,6 @@ public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHel this.mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService( Context.MEDIA_PROJECTION_SERVICE); } - @Override public synchronized void startCapture( final int width, final int height, final int ignoredFramerate) { @@ -130,7 +124,6 @@ public synchronized void startCapture( capturerObserver.onCapturerStarted(true); surfaceTextureHelper.startListening(this); } - @Override public synchronized void stopCapture() { checkNotDisposed(); @@ -153,62 +146,37 @@ public void run() { } }); } - @Override public synchronized void dispose() { isDisposed = true; } - /** * Changes output video format. This method can be used to scale the output * video, or to change orientation when the captured screen is rotated for example. * - * @param width new output video width - * @param height new output video height + * @param width new output video width + * @param height new output video height * @param ignoredFramerate ignored */ @Override public synchronized void changeCaptureFormat( final int width, final int height, final int ignoredFramerate) { checkNotDisposed(); - if (this.oldWidth != width || this.oldHeight != height) { - this.oldWidth = width; - this.oldHeight = height; - - if (oldHeight > oldWidth) { - ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { - @Override - public void run() { - if (virtualDisplay != null && surfaceTextureHelper != null) { - virtualDisplay.setSurface(new Surface(surfaceTextureHelper.getSurfaceTexture())); - surfaceTextureHelper.setTextureSize(oldWidth, oldHeight); - virtualDisplay.resize(oldWidth, oldHeight, VIRTUAL_DISPLAY_DPI); - } - } - }); - } + this.width = width; + this.height = height; + if (virtualDisplay == null) { + // Capturer is stopped, the virtual display will be created in startCaptuer(). + return; + } - if (oldWidth > oldHeight) { - surfaceTextureHelper.setTextureSize(oldWidth, oldHeight); - virtualDisplay.setSurface(new Surface(surfaceTextureHelper.getSurfaceTexture())); - final Handler handler = new Handler(Looper.getMainLooper()); - handler.postDelayed(new Runnable() { - @Override - public void run() { - ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { - @Override - public void run() { - if (virtualDisplay != null && surfaceTextureHelper != null) { - virtualDisplay.resize(oldWidth, oldHeight, VIRTUAL_DISPLAY_DPI); - } - } - }); - } - }, 700); + ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() { + @Override + public void run() { + surfaceTextureHelper.setTextureSize(width, height); + virtualDisplay.resize(width, height, VIRTUAL_DISPLAY_DPI); } - } + }); } - private void createVirtualDisplay() { surfaceTextureHelper.setTextureSize(width, height); surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java b/android/src/main/java/live/videosdk/webrtc/PeerConnectionObserver.java similarity index 99% rename from android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java rename to android/src/main/java/live/videosdk/webrtc/PeerConnectionObserver.java index a4e8736268..38488c4e5a 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/PeerConnectionObserver.java +++ b/android/src/main/java/live/videosdk/webrtc/PeerConnectionObserver.java @@ -1,14 +1,14 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import android.util.Log; import androidx.annotation.Nullable; -import com.cloudwebrtc.webrtc.audio.AudioSwitchManager; -import com.cloudwebrtc.webrtc.utils.AnyThreadSink; -import com.cloudwebrtc.webrtc.utils.ConstraintsArray; -import com.cloudwebrtc.webrtc.utils.ConstraintsMap; -import com.cloudwebrtc.webrtc.utils.Utils; +import live.videosdk.webrtc.audio.AudioSwitchManager; +import live.videosdk.webrtc.utils.AnyThreadSink; +import live.videosdk.webrtc.utils.ConstraintsArray; +import live.videosdk.webrtc.utils.ConstraintsMap; +import live.videosdk.webrtc.utils.Utils; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; @@ -511,7 +511,6 @@ public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) { String transceiverId = transceiver.getMid(); if (null == transceiverId) { transceiverId = stateProvider.getNextStreamUUID(); - this.transceivers.put(transceiverId,transceiver); } params.putMap("transceiver", transceiverToMap(transceiverId, transceiver)); } @@ -1107,7 +1106,6 @@ public void getTransceivers(Result result) { String transceiverId = transceiver.getMid(); if (null == transceiverId) { transceiverId = stateProvider.getNextStreamUUID(); - this.transceivers.put(transceiverId,transceiver); } transceiversParams.pushMap(new ConstraintsMap(transceiverToMap(transceiverId, transceiver))); } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt b/android/src/main/java/live/videosdk/webrtc/SimulcastVideoEncoderFactoryWrapper.kt similarity index 99% rename from android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt rename to android/src/main/java/live/videosdk/webrtc/SimulcastVideoEncoderFactoryWrapper.kt index 7ad366d387..474ad98a73 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/SimulcastVideoEncoderFactoryWrapper.kt +++ b/android/src/main/java/live/videosdk/webrtc/SimulcastVideoEncoderFactoryWrapper.kt @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc +package live.videosdk.webrtc import org.webrtc.* import java.util.concurrent.Callable @@ -168,11 +168,12 @@ internal class SimulcastVideoEncoderFactoryWrapper( return future.get() } - override fun createNative(webrtcEnvRef: Long): Long { + override fun createNative(webrtcEnvRef: Long): Long { val future = executor.submit(Callable { return@Callable encoder.createNative(webrtcEnvRef) }) return future.get() } + override fun isHardwareEncoder(): Boolean { val future = executor.submit(Callable { return@Callable encoder.isHardwareEncoder }) return future.get() diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java b/android/src/main/java/live/videosdk/webrtc/StateProvider.java similarity index 85% rename from android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java rename to android/src/main/java/live/videosdk/webrtc/StateProvider.java index 83dbd7fa8d..f215d3b037 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/StateProvider.java +++ b/android/src/main/java/live/videosdk/webrtc/StateProvider.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import android.app.Activity; import android.content.Context; @@ -20,9 +20,9 @@ public interface StateProvider { boolean putLocalStream(String streamId, MediaStream stream); - boolean putLocalTrack(String trackId, MediaStreamTrack track); + boolean putLocalTrack(String trackId, LocalTrack track); - MediaStreamTrack getLocalTrack(String trackId); + LocalTrack getLocalTrack(String trackId); String getNextStreamUUID(); diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java b/android/src/main/java/live/videosdk/webrtc/SurfaceTextureRenderer.java similarity index 96% rename from android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java rename to android/src/main/java/live/videosdk/webrtc/SurfaceTextureRenderer.java index 32b4745b75..2bd0ecac01 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/SurfaceTextureRenderer.java +++ b/android/src/main/java/live/videosdk/webrtc/SurfaceTextureRenderer.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc; +package live.videosdk.webrtc; import android.graphics.SurfaceTexture; @@ -95,6 +95,10 @@ public void pauseVideo() { // VideoSink interface. @Override public void onFrame(VideoFrame frame) { + if(!isFirstFrameRendered) { + texture.setDefaultBufferSize(frame.getRotatedWidth(), frame.getRotatedHeight()); + createEglSurface(texture); + } updateFrameDimensionsAndReportEvents(frame); super.onFrame(frame); } @@ -104,7 +108,7 @@ public void onFrame(VideoFrame frame) { public void surfaceCreated(final SurfaceTexture texture) { ThreadUtils.checkIsOnMainThread(); this.texture = texture; - createEglSurface(texture); + } public void surfaceDestroyed() { diff --git a/android/src/main/java/live/videosdk/webrtc/VideoProcessor.java b/android/src/main/java/live/videosdk/webrtc/VideoProcessor.java new file mode 100644 index 0000000000..769381e1de --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/VideoProcessor.java @@ -0,0 +1,5 @@ +package live.videosdk.webrtc; +import org.webrtc.VideoFrame; +public interface VideoProcessor { + VideoFrame onFrameReceived(VideoFrame frame); +} diff --git a/android/src/main/java/live/videosdk/webrtc/WebRTCService.java b/android/src/main/java/live/videosdk/webrtc/WebRTCService.java new file mode 100644 index 0000000000..8974255c0f --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/WebRTCService.java @@ -0,0 +1,37 @@ +package live.videosdk.webrtc; + +import live.videosdk.webrtc.VideoProcessor; +import android.util.Log; + +public class WebRTCService { + + private static final String TAG = "WebRTCService"; + private static WebRTCService instance; + + private VideoProcessor videoProcessor; + + // Private constructor to prevent instantiation from outside + private WebRTCService() { + // Initialization logic if any + } + + // Static method to get the singleton instance + public static synchronized WebRTCService getInstance() { + if (instance == null) { + instance = new WebRTCService(); + } + return instance; + } + + // Method to set the VideoProcessor + public void setVideoProcessor(VideoProcessor videoProcessor) { + this.videoProcessor = videoProcessor; + } + + // Method to get the current VideoProcessor + public VideoProcessor getVideoProcessor() { + return videoProcessor; + } + + // Other methods related to WebRTC service can be added here +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java b/android/src/main/java/live/videosdk/webrtc/audio/AudioDeviceKind.java similarity index 91% rename from android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java rename to android/src/main/java/live/videosdk/webrtc/audio/AudioDeviceKind.java index df6a7cd7c8..7b15a912d1 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioDeviceKind.java +++ b/android/src/main/java/live/videosdk/webrtc/audio/AudioDeviceKind.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.audio; +package live.videosdk.webrtc.audio; import androidx.annotation.Nullable; @@ -6,7 +6,7 @@ public enum AudioDeviceKind { BLUETOOTH("bluetooth", AudioDevice.BluetoothHeadset.class), - WIRED_HEADSET("wired-headset", AudioDevice.WiredHeadset.class), + WIRED_HEADSET("headset", AudioDevice.WiredHeadset.class), SPEAKER("speaker", AudioDevice.Speakerphone.class), EARPIECE("earpiece", AudioDevice.Earpiece.class); diff --git a/android/src/main/java/live/videosdk/webrtc/audio/AudioProcessingAdapter.java b/android/src/main/java/live/videosdk/webrtc/audio/AudioProcessingAdapter.java new file mode 100644 index 0000000000..95fdc8e3b0 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/audio/AudioProcessingAdapter.java @@ -0,0 +1,59 @@ +package live.videosdk.webrtc.audio; + +import org.webrtc.ExternalAudioProcessingFactory; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +public class AudioProcessingAdapter implements ExternalAudioProcessingFactory.AudioProcessing { + public interface ExternalAudioFrameProcessing { + void initialize(int sampleRateHz, int numChannels); + + void reset(int newRate); + + void process(int numBands, int numFrames, ByteBuffer buffer); + } + + public AudioProcessingAdapter() {} + List audioProcessors = new ArrayList<>(); + + public void addProcessor(ExternalAudioFrameProcessing audioProcessor) { + synchronized (audioProcessors) { + audioProcessors.add(audioProcessor); + } + } + + public void removeProcessor(ExternalAudioFrameProcessing audioProcessor) { + synchronized (audioProcessors) { + audioProcessors.remove(audioProcessor); + } + } + + @Override + public void initialize(int sampleRateHz, int numChannels) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.initialize(sampleRateHz, numChannels); + } + } + } + + @Override + public void reset(int newRate) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.reset(newRate); + } + } + } + + @Override + public void process(int numBands, int numFrames, ByteBuffer buffer) { + synchronized (audioProcessors) { + for (ExternalAudioFrameProcessing audioProcessor : audioProcessors) { + audioProcessor.process(numBands, numFrames, buffer); + } + } + } +} \ No newline at end of file diff --git a/android/src/main/java/live/videosdk/webrtc/audio/AudioProcessingController.java b/android/src/main/java/live/videosdk/webrtc/audio/AudioProcessingController.java new file mode 100644 index 0000000000..4638464a72 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/audio/AudioProcessingController.java @@ -0,0 +1,24 @@ +package live.videosdk.webrtc.audio; + +import org.webrtc.ExternalAudioProcessingFactory; + +public class AudioProcessingController { + /** + * This is the audio processing module that will be applied to the audio stream after it is captured from the microphone. + * This is useful for adding echo cancellation, noise suppression, etc. + */ + public final AudioProcessingAdapter capturePostProcessing = new AudioProcessingAdapter(); + /** + * This is the audio processing module that will be applied to the audio stream before it is rendered to the speaker. + */ + public final AudioProcessingAdapter renderPreProcessing = new AudioProcessingAdapter(); + + public ExternalAudioProcessingFactory externalAudioProcessingFactory; + + public AudioProcessingController() { + this.externalAudioProcessingFactory = new ExternalAudioProcessingFactory(); + this.externalAudioProcessingFactory.setCapturePostProcessing(capturePostProcessing); + this.externalAudioProcessingFactory.setRenderPreProcessing(renderPreProcessing); + } + +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java b/android/src/main/java/live/videosdk/webrtc/audio/AudioSwitchManager.java similarity index 99% rename from android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java rename to android/src/main/java/live/videosdk/webrtc/audio/AudioSwitchManager.java index a2da4c088e..bfd4fc3b2f 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioSwitchManager.java +++ b/android/src/main/java/live/videosdk/webrtc/audio/AudioSwitchManager.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.audio; +package live.videosdk.webrtc.audio; import android.annotation.SuppressLint; import android.content.Context; @@ -269,8 +269,6 @@ public void enableSpeakerButPreferBluetooth() { if (audioDevice == null) { selectAudioOutput(AudioDevice.Speakerphone.class); - } else { - selectAudioOutput(audioDevice.getClass()); } } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java b/android/src/main/java/live/videosdk/webrtc/audio/AudioUtils.java similarity index 73% rename from android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java rename to android/src/main/java/live/videosdk/webrtc/audio/AudioUtils.java index 13dd4ba233..aac79d221c 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/audio/AudioUtils.java +++ b/android/src/main/java/live/videosdk/webrtc/audio/AudioUtils.java @@ -1,9 +1,7 @@ -package com.cloudwebrtc.webrtc.audio; +package live.videosdk.webrtc.audio; import android.media.AudioAttributes; -import android.media.AudioDeviceInfo; import android.media.AudioManager; -import android.os.Build; import android.util.Log; import androidx.annotation.Nullable; @@ -202,63 +200,4 @@ public static Integer getAudioAttributesContentTypeFromString(@Nullable String c return contentType; } - - static public String getAudioDeviceId(AudioDeviceInfo device) { - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { - return "audio-1"; - } else { - - String address = Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? "" : device.getAddress(); - String deviceId = "" + device.getId(); - if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { - deviceId = "microphone-" + address; - } - if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { - deviceId = "wired-headset"; - } - if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { - deviceId = "bluetooth"; - } - return deviceId; - } - } - - static public String getAudioGroupId(AudioDeviceInfo device) { - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { - return "microphone"; - } else { - String groupId = "" + device.getType(); - if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { - groupId = "microphone"; - } - if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { - groupId = "wired-headset"; - } - if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { - groupId = "bluetooth"; - } - return groupId; - } - } - - static public String getAudioDeviceLabel(AudioDeviceInfo device) { - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) { - return "Audio"; - } else { - String address = Build.VERSION.SDK_INT < Build.VERSION_CODES.P ? "" : device.getAddress(); - String label = device.getProductName().toString(); - if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_MIC) { - label = "Built-in Microphone (" + address + ")"; - } - - if (device.getType() == AudioDeviceInfo.TYPE_WIRED_HEADSET) { - label = "Wired Headset Microphone"; - } - - if (device.getType() == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) { - label = device.getProductName().toString(); - } - return label; - } - } } \ No newline at end of file diff --git a/android/src/main/java/live/videosdk/webrtc/audio/LocalAudioTrack.java b/android/src/main/java/live/videosdk/webrtc/audio/LocalAudioTrack.java new file mode 100644 index 0000000000..a784afed00 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/audio/LocalAudioTrack.java @@ -0,0 +1,73 @@ +package live.videosdk.webrtc.audio; + +import android.media.AudioFormat; +import android.os.SystemClock; + +import live.videosdk.webrtc.LocalTrack; + +import org.webrtc.AudioTrack; +import org.webrtc.AudioTrackSink; +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +/** + * LocalAudioTrack represents an audio track that is sourced from local audio capture. + */ +public class LocalAudioTrack + extends LocalTrack implements JavaAudioDeviceModule.SamplesReadyCallback { + public LocalAudioTrack(AudioTrack audioTrack) { + super(audioTrack); + } + + final List sinks = new ArrayList<>(); + + /** + * Add a sink to receive audio data from this track. + */ + public void addSink(AudioTrackSink sink) { + synchronized (sinks) { + sinks.add(sink); + } + } + + /** + * Remove a sink for this track. + */ + public void removeSink(AudioTrackSink sink) { + synchronized (sinks) { + sinks.remove(sink); + } + } + + private int getBytesPerSample(int audioFormat) { + switch (audioFormat) { + case AudioFormat.ENCODING_PCM_8BIT: + return 1; + case AudioFormat.ENCODING_PCM_16BIT: + case AudioFormat.ENCODING_IEC61937: + case AudioFormat.ENCODING_DEFAULT: + return 2; + case AudioFormat.ENCODING_PCM_FLOAT: + return 4; + default: + throw new IllegalArgumentException("Bad audio format " + audioFormat); + } + } + + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + int bitsPerSample = getBytesPerSample(audioSamples.getAudioFormat()) * 8; + int numFrames = audioSamples.getSampleRate() / 100; + long timestamp = SystemClock.elapsedRealtime(); + synchronized (sinks) { + for (AudioTrackSink sink : sinks) { + ByteBuffer byteBuffer = ByteBuffer.wrap(audioSamples.getData()); + sink.onData(byteBuffer, bitsPerSample, audioSamples.getSampleRate(), + audioSamples.getChannelCount(), numFrames, timestamp); + } + } + } +} \ No newline at end of file diff --git a/android/src/main/java/live/videosdk/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java b/android/src/main/java/live/videosdk/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java new file mode 100644 index 0000000000..bfd33410e0 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/audio/PlaybackSamplesReadyCallbackAdapter.java @@ -0,0 +1,32 @@ +package live.videosdk.webrtc.audio; + +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.util.ArrayList; +import java.util.List; + +public class PlaybackSamplesReadyCallbackAdapter + implements JavaAudioDeviceModule.PlaybackSamplesReadyCallback { + public PlaybackSamplesReadyCallbackAdapter() {} + + List callbacks = new ArrayList<>(); + + public void addCallback(JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.add(callback); + } + } + + public void removeCallback(JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.remove(callback); + } + } + + @Override + public void onWebRtcAudioTrackSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + for (JavaAudioDeviceModule.PlaybackSamplesReadyCallback callback : callbacks) { + callback.onWebRtcAudioTrackSamplesReady(audioSamples); + } + } +} \ No newline at end of file diff --git a/android/src/main/java/live/videosdk/webrtc/audio/RecordSamplesReadyCallbackAdapter.java b/android/src/main/java/live/videosdk/webrtc/audio/RecordSamplesReadyCallbackAdapter.java new file mode 100644 index 0000000000..2a27867873 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/audio/RecordSamplesReadyCallbackAdapter.java @@ -0,0 +1,34 @@ +package live.videosdk.webrtc.audio; + +import org.webrtc.audio.JavaAudioDeviceModule; + +import java.util.ArrayList; +import java.util.List; + +public class RecordSamplesReadyCallbackAdapter + implements JavaAudioDeviceModule.SamplesReadyCallback { + public RecordSamplesReadyCallbackAdapter() {} + + List callbacks = new ArrayList<>(); + + public void addCallback(JavaAudioDeviceModule.SamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.add(callback); + } + } + + public void removeCallback(JavaAudioDeviceModule.SamplesReadyCallback callback) { + synchronized (callbacks) { + callbacks.remove(callback); + } + } + + @Override + public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) { + synchronized (callbacks) { + for (JavaAudioDeviceModule.SamplesReadyCallback callback : callbacks) { + callback.onWebRtcAudioRecordSamplesReady(audioSamples); + } + } + } +} diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioChannel.java b/android/src/main/java/live/videosdk/webrtc/record/AudioChannel.java similarity index 56% rename from android/src/main/java/com/cloudwebrtc/webrtc/record/AudioChannel.java rename to android/src/main/java/live/videosdk/webrtc/record/AudioChannel.java index 5b9a033476..9dc9de8f1f 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioChannel.java +++ b/android/src/main/java/live/videosdk/webrtc/record/AudioChannel.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.record; +package live.videosdk.webrtc.record; public enum AudioChannel { INPUT, diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java b/android/src/main/java/live/videosdk/webrtc/record/AudioSamplesInterceptor.java similarity index 96% rename from android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java rename to android/src/main/java/live/videosdk/webrtc/record/AudioSamplesInterceptor.java index ddc4d1ff2f..9cc5ff27d7 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioSamplesInterceptor.java +++ b/android/src/main/java/live/videosdk/webrtc/record/AudioSamplesInterceptor.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.record; +package live.videosdk.webrtc.record; import android.annotation.SuppressLint; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioTrackInterceptor.java b/android/src/main/java/live/videosdk/webrtc/record/AudioTrackInterceptor.java similarity index 98% rename from android/src/main/java/com/cloudwebrtc/webrtc/record/AudioTrackInterceptor.java rename to android/src/main/java/live/videosdk/webrtc/record/AudioTrackInterceptor.java index 4f71082100..37a1586241 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/AudioTrackInterceptor.java +++ b/android/src/main/java/live/videosdk/webrtc/record/AudioTrackInterceptor.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.record; +package live.videosdk.webrtc.record; import android.annotation.TargetApi; import android.media.AudioFormat; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java b/android/src/main/java/live/videosdk/webrtc/record/FrameCapturer.java similarity index 99% rename from android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java rename to android/src/main/java/live/videosdk/webrtc/record/FrameCapturer.java index fb48c68a15..be8d0c2c4a 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/FrameCapturer.java +++ b/android/src/main/java/live/videosdk/webrtc/record/FrameCapturer.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.record; +package live.videosdk.webrtc.record; import android.graphics.Bitmap; import android.graphics.BitmapFactory; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java b/android/src/main/java/live/videosdk/webrtc/record/MediaRecorderImpl.java similarity index 96% rename from android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java rename to android/src/main/java/live/videosdk/webrtc/record/MediaRecorderImpl.java index f1c45357bc..52a03d5c58 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/MediaRecorderImpl.java +++ b/android/src/main/java/live/videosdk/webrtc/record/MediaRecorderImpl.java @@ -1,9 +1,9 @@ -package com.cloudwebrtc.webrtc.record; +package live.videosdk.webrtc.record; import androidx.annotation.Nullable; import android.util.Log; -import com.cloudwebrtc.webrtc.utils.EglUtils; +import live.videosdk.webrtc.utils.EglUtils; import org.webrtc.VideoTrack; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/OutputAudioSamplesInterceptor.java b/android/src/main/java/live/videosdk/webrtc/record/OutputAudioSamplesInterceptor.java similarity index 95% rename from android/src/main/java/com/cloudwebrtc/webrtc/record/OutputAudioSamplesInterceptor.java rename to android/src/main/java/live/videosdk/webrtc/record/OutputAudioSamplesInterceptor.java index 7628a096be..18c0f06eac 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/OutputAudioSamplesInterceptor.java +++ b/android/src/main/java/live/videosdk/webrtc/record/OutputAudioSamplesInterceptor.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.record; +package live.videosdk.webrtc.record; import org.webrtc.audio.JavaAudioDeviceModule; import org.webrtc.audio.WebRtcAudioTrackUtils; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java b/android/src/main/java/live/videosdk/webrtc/record/VideoFileRenderer.java similarity index 99% rename from android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java rename to android/src/main/java/live/videosdk/webrtc/record/VideoFileRenderer.java index f2a0de795c..92a832d374 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java +++ b/android/src/main/java/live/videosdk/webrtc/record/VideoFileRenderer.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.record; +package live.videosdk.webrtc.record; import android.media.MediaCodec; import android.media.MediaCodecInfo; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadResult.java b/android/src/main/java/live/videosdk/webrtc/utils/AnyThreadResult.java similarity index 95% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadResult.java rename to android/src/main/java/live/videosdk/webrtc/utils/AnyThreadResult.java index e1de0e46c1..a213d44cd6 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadResult.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/AnyThreadResult.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; import android.os.Looper; import android.os.Handler; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadSink.java b/android/src/main/java/live/videosdk/webrtc/utils/AnyThreadSink.java similarity index 95% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadSink.java rename to android/src/main/java/live/videosdk/webrtc/utils/AnyThreadSink.java index ad0bce2fb4..34d9121287 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/AnyThreadSink.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/AnyThreadSink.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; import android.os.Handler; import android.os.Looper; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/Callback.java b/android/src/main/java/live/videosdk/webrtc/utils/Callback.java similarity index 63% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/Callback.java rename to android/src/main/java/live/videosdk/webrtc/utils/Callback.java index a74409a3df..1b90c08cf7 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/Callback.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/Callback.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; public interface Callback { diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsArray.java b/android/src/main/java/live/videosdk/webrtc/utils/ConstraintsArray.java similarity index 98% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsArray.java rename to android/src/main/java/live/videosdk/webrtc/utils/ConstraintsArray.java index c9dd0089df..ecc45dbdcc 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsArray.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/ConstraintsArray.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; import java.util.ArrayList; import java.util.Map; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java b/android/src/main/java/live/videosdk/webrtc/utils/ConstraintsMap.java similarity index 98% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java rename to android/src/main/java/live/videosdk/webrtc/utils/ConstraintsMap.java index 5a6d8b6d3b..64de6d3e98 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ConstraintsMap.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/ConstraintsMap.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; import java.util.ArrayList; import java.util.HashMap; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/EglUtils.java b/android/src/main/java/live/videosdk/webrtc/utils/EglUtils.java similarity index 96% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/EglUtils.java rename to android/src/main/java/live/videosdk/webrtc/utils/EglUtils.java index 8291e97fc0..f38f92e3b2 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/EglUtils.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/EglUtils.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; import android.os.Build; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java b/android/src/main/java/live/videosdk/webrtc/utils/MediaConstraintsUtils.java similarity index 98% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java rename to android/src/main/java/live/videosdk/webrtc/utils/MediaConstraintsUtils.java index 3ba4ae9824..77b6a9feed 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/MediaConstraintsUtils.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/MediaConstraintsUtils.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; import android.util.Log; import java.util.List; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ObjectType.java b/android/src/main/java/live/videosdk/webrtc/utils/ObjectType.java similarity index 73% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/ObjectType.java rename to android/src/main/java/live/videosdk/webrtc/utils/ObjectType.java index 481603d775..1078a0125a 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/ObjectType.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/ObjectType.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; public enum ObjectType { Null, diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java b/android/src/main/java/live/videosdk/webrtc/utils/PermissionUtils.java similarity index 99% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java rename to android/src/main/java/live/videosdk/webrtc/utils/PermissionUtils.java index 5e9c8f6033..46c018a57d 100755 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/PermissionUtils.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/PermissionUtils.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; import android.app.Activity; import android.app.Fragment; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java b/android/src/main/java/live/videosdk/webrtc/utils/Utils.java similarity index 98% rename from android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java rename to android/src/main/java/live/videosdk/webrtc/utils/Utils.java index b990ca4e24..549551c0ba 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/utils/Utils.java +++ b/android/src/main/java/live/videosdk/webrtc/utils/Utils.java @@ -1,4 +1,4 @@ -package com.cloudwebrtc.webrtc.utils; +package live.videosdk.webrtc.utils; import androidx.annotation.Nullable; diff --git a/android/src/main/java/live/videosdk/webrtc/video/LocalVideoTrack.java b/android/src/main/java/live/videosdk/webrtc/video/LocalVideoTrack.java new file mode 100644 index 0000000000..a575d3bc2f --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/video/LocalVideoTrack.java @@ -0,0 +1,67 @@ +package live.videosdk.webrtc.video; + +import androidx.annotation.Nullable; + +import live.videosdk.webrtc.LocalTrack; + +import org.webrtc.VideoFrame; +import org.webrtc.VideoProcessor; +import org.webrtc.VideoSink; +import org.webrtc.VideoTrack; + +import java.util.ArrayList; +import java.util.List; + +public class LocalVideoTrack extends LocalTrack implements VideoProcessor { + public interface ExternalVideoFrameProcessing { + /** + * Process a video frame. + * @param frame + * @return The processed video frame. + */ + public abstract VideoFrame onFrame(VideoFrame frame); + } + + public LocalVideoTrack(VideoTrack videoTrack) { + super(videoTrack); + } + + List processors = new ArrayList<>(); + + public void addProcessor(ExternalVideoFrameProcessing processor) { + synchronized (processors) { + processors.add(processor); + } + } + + public void removeProcessor(ExternalVideoFrameProcessing processor) { + synchronized (processors) { + processors.remove(processor); + } + } + + private VideoSink sink = null; + + @Override + public void setSink(@Nullable VideoSink videoSink) { + sink = videoSink; + } + + @Override + public void onCapturerStarted(boolean b) {} + + @Override + public void onCapturerStopped() {} + + @Override + public void onFrameCaptured(VideoFrame videoFrame) { + if (sink != null) { + synchronized (processors) { + for (ExternalVideoFrameProcessing processor : processors) { + videoFrame = processor.onFrame(videoFrame); + } + } + sink.onFrame(videoFrame); + } + } +} \ No newline at end of file diff --git a/android/src/main/java/live/videosdk/webrtc/video/VideoCapturerInfo.java b/android/src/main/java/live/videosdk/webrtc/video/VideoCapturerInfo.java new file mode 100644 index 0000000000..321cb17fd6 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/video/VideoCapturerInfo.java @@ -0,0 +1,12 @@ +package live.videosdk.webrtc.video; + +import org.webrtc.VideoCapturer; + +public class VideoCapturerInfo { + public VideoCapturer capturer; + public int width; + public int height; + public int fps; + public boolean isScreenCapture = false; + public String cameraName; +} \ No newline at end of file diff --git a/android/src/main/java/live/videosdk/webrtc/video/camera/CameraRegionUtils.java b/android/src/main/java/live/videosdk/webrtc/video/camera/CameraRegionUtils.java new file mode 100644 index 0000000000..0254a3cf1c --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/video/camera/CameraRegionUtils.java @@ -0,0 +1,201 @@ +package live.videosdk.webrtc.video.camera; + +import android.annotation.TargetApi; +import android.graphics.Rect; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; +import android.util.Size; +import androidx.annotation.NonNull; +import androidx.annotation.VisibleForTesting; +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import java.util.Arrays; + +/** + * Utility class offering functions to calculate values regarding the camera boundaries. + * + *

The functions are used to calculate focus and exposure settings. + */ +public final class CameraRegionUtils { + + @NonNull + public static Size getCameraBoundaries( + @NonNull CameraCharacteristics cameraCharacteristics, @NonNull CaptureRequest.Builder requestBuilder) { + if (SdkCapabilityChecker.supportsDistortionCorrection() + && supportsDistortionCorrection(cameraCharacteristics)) { + // Get the current distortion correction mode. + Integer distortionCorrectionMode = + requestBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE); + + // Return the correct boundaries depending on the mode. + android.graphics.Rect rect; + if (distortionCorrectionMode == null + || distortionCorrectionMode == CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) { + rect = getSensorInfoPreCorrectionActiveArraySize(cameraCharacteristics); + } else { + rect = getSensorInfoActiveArraySize(cameraCharacteristics); + } + + return SizeFactory.create(rect.width(), rect.height()); + } else { + // No distortion correction support. + return getSensorInfoPixelArraySize(cameraCharacteristics); + } + } + + @TargetApi(Build.VERSION_CODES.P) + private static boolean supportsDistortionCorrection(CameraCharacteristics cameraCharacteristics) { + int[] availableDistortionCorrectionModes = getDistortionCorrectionAvailableModes(cameraCharacteristics); + if (availableDistortionCorrectionModes == null) { + availableDistortionCorrectionModes = new int[0]; + } + long nonOffModesSupported = + Arrays.stream(availableDistortionCorrectionModes) + .filter((value) -> value != CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) + .count(); + return nonOffModesSupported > 0; + } + + static public int[] getDistortionCorrectionAvailableModes(CameraCharacteristics cameraCharacteristics) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + return cameraCharacteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES); + } + return null; + } + + public static Rect getSensorInfoActiveArraySize(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + } + + public static Size getSensorInfoPixelArraySize(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE); + } + + @NonNull + public static Rect getSensorInfoPreCorrectionActiveArraySize(CameraCharacteristics cameraCharacteristics) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return cameraCharacteristics.get( + CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); + } + return getSensorInfoActiveArraySize(cameraCharacteristics); + } + + public static Integer getControlMaxRegionsAutoExposure(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE); + } + + /** + * Converts a point into a {@link MeteringRectangle} with the supplied coordinates as the center + * point. + * + *

Since the Camera API (due to cross-platform constraints) only accepts a point when + * configuring a specific focus or exposure area and Android requires a rectangle to configure + * these settings there is a need to convert the point into a rectangle. This method will create + * the required rectangle with an arbitrarily size that is a 10th of the current viewport and the + * coordinates as the center point. + * + * @param boundaries - The camera boundaries to calculate the metering rectangle for. + * @param x x - 1 >= coordinate >= 0. + * @param y y - 1 >= coordinate >= 0. + * @return The dimensions of the metering rectangle based on the supplied coordinates and + * boundaries. + */ + @NonNull + public static MeteringRectangle convertPointToMeteringRectangle( + @NonNull Size boundaries, + double x, + double y, + @NonNull PlatformChannel.DeviceOrientation orientation) { + assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0); + assert (x >= 0 && x <= 1); + assert (y >= 0 && y <= 1); + // Rotate the coordinates to match the device orientation. + double oldX = x, oldY = y; + switch (orientation) { + case PORTRAIT_UP: // 90 ccw. + y = 1 - oldX; + x = oldY; + break; + case PORTRAIT_DOWN: // 90 cw. + x = 1 - oldY; + y = oldX; + break; + case LANDSCAPE_LEFT: + // No rotation required. + break; + case LANDSCAPE_RIGHT: // 180. + x = 1 - x; + y = 1 - y; + break; + } + // Interpolate the target coordinate. + int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1))); + int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1))); + // Determine the dimensions of the metering rectangle (10th of the viewport). + int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d); + int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d); + // Adjust target coordinate to represent top-left corner of metering rectangle. + targetX -= targetWidth / 2; + targetY -= targetHeight / 2; + // Adjust target coordinate as to not fall out of bounds. + if (targetX < 0) { + targetX = 0; + } + if (targetY < 0) { + targetY = 0; + } + int maxTargetX = boundaries.getWidth() - 1 - targetWidth; + int maxTargetY = boundaries.getHeight() - 1 - targetHeight; + if (targetX > maxTargetX) { + targetX = maxTargetX; + } + if (targetY > maxTargetY) { + targetY = maxTargetY; + } + // Build the metering rectangle. + return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1); + } + + /** Factory class that assists in creating a {@link MeteringRectangle} instance. */ + static class MeteringRectangleFactory { + /** + * Creates a new instance of the {@link MeteringRectangle} class. + * + *

This method is visible for testing purposes only and should never be used outside this * + * class. + * + * @param x coordinate >= 0. + * @param y coordinate >= 0. + * @param width width >= 0. + * @param height height >= 0. + * @param meteringWeight weight between {@value MeteringRectangle#METERING_WEIGHT_MIN} and + * {@value MeteringRectangle#METERING_WEIGHT_MAX} inclusively. + * @return new instance of the {@link MeteringRectangle} class. + * @throws IllegalArgumentException if any of the parameters were negative. + */ + @VisibleForTesting + public static MeteringRectangle create( + int x, int y, int width, int height, int meteringWeight) { + return new MeteringRectangle(x, y, width, height, meteringWeight); + } + } + + /** Factory class that assists in creating a {@link Size} instance. */ + static class SizeFactory { + /** + * Creates a new instance of the {@link Size} class. + * + *

This method is visible for testing purposes only and should never be used outside this * + * class. + * + * @param width width >= 0. + * @param height height >= 0. + * @return new instance of the {@link Size} class. + */ + @VisibleForTesting + public static Size create(int width, int height) { + return new Size(width, height); + } + } +} \ No newline at end of file diff --git a/android/src/main/java/live/videosdk/webrtc/video/camera/CameraUtils.java b/android/src/main/java/live/videosdk/webrtc/video/camera/CameraUtils.java new file mode 100644 index 0000000000..5ad47ce4fa --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/video/camera/CameraUtils.java @@ -0,0 +1,722 @@ +package live.videosdk.webrtc.video.camera; + +import android.app.Activity; +import android.graphics.Rect; +import android.hardware.Camera; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; +import android.os.Handler; +import android.util.Log; +import android.util.Range; +import android.util.Size; +import android.view.Surface; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import live.videosdk.webrtc.GetUserMediaImpl; +import live.videosdk.webrtc.utils.AnyThreadResult; +import live.videosdk.webrtc.video.VideoCapturerInfo; + +import org.webrtc.Camera1Capturer; +import org.webrtc.Camera2Capturer; +import org.webrtc.CameraEnumerationAndroid; + +import java.lang.reflect.Field; +import java.util.List; + +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel; + +public class CameraUtils { + private static final String TAG = "CameraUtils"; + Activity activity; + private GetUserMediaImpl getUserMediaImpl; + private boolean isTorchOn = false; + private DeviceOrientationManager deviceOrientationManager; + public CameraUtils(GetUserMediaImpl getUserMediaImpl, Activity activity) { + this.getUserMediaImpl = getUserMediaImpl; + this.activity = activity; + this.deviceOrientationManager = new DeviceOrientationManager(activity, 0); + this.deviceOrientationManager.start(); + } + + public void setFocusMode(MethodCall call, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setFocusMode", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + switch (mode) { + case "locked": + // When locking the auto-focus the camera device should do a one-time focus and afterwards + // set the auto-focus to idle. This is accomplished by setting the CONTROL_AF_MODE to + // CONTROL_AF_MODE_AUTO. + captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); + break; + case "auto": + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); + break; + default: + break; + } + + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + + //captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + if(!params.getSupportedFocusModes().isEmpty()) { + switch (mode) { + case "locked": + params.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED); + break; + case "auto": + params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); + break; + default: + break; + } + result.success(null); + return; + } + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void setFocusPoint(MethodCall call, Point focusPoint, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setFocusMode", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + MeteringRectangle focusRectangle = null; + Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraCharacteristics, captureRequestBuilder); + PlatformChannel.DeviceOrientation orientation = deviceOrientationManager.getLastUIOrientation(); + focusRectangle = + convertPointToMeteringRectangle(cameraBoundaries, focusPoint.x, focusPoint.y, orientation); + + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_REGIONS, + captureRequestBuilder == null ? null : new MeteringRectangle[] {focusRectangle}); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + params.setFocusAreas(null); + + result.success(null); + return; + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void setExposureMode(MethodCall call, AnyThreadResult result) {} + + public void setExposurePoint(MethodCall call,Point exposurePoint, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setExposurePoint", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setExposurePoint", "[setExposurePoint] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + if(CameraRegionUtils.getControlMaxRegionsAutoExposure(cameraCharacteristics) <= 0) { + resultError("setExposurePoint", "[setExposurePoint] Camera does not support auto exposure", result); + return; + } + + MeteringRectangle exposureRectangle = null; + Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraCharacteristics, captureRequestBuilder); + PlatformChannel.DeviceOrientation orientation = deviceOrientationManager.getLastUIOrientation(); + exposureRectangle = + convertPointToMeteringRectangle(cameraBoundaries, exposurePoint.x, exposurePoint.y, orientation); + if (exposureRectangle != null) { + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {exposureRectangle}); + } else { + MeteringRectangle[] defaultRegions = captureRequestBuilder.get(CaptureRequest.CONTROL_AE_REGIONS); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, defaultRegions); + } + + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + params.setFocusAreas(null); + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void hasTorch(String trackId, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("hasTorch", "Video capturer not found for id: " + trackId, result); + return; + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && info.capturer instanceof Camera2Capturer) { + CameraManager manager; + CameraDevice cameraDevice; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + boolean flashIsAvailable; + try { + CameraCharacteristics characteristics = + manager.getCameraCharacteristics(cameraDevice.getId()); + flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(flashIsAvailable); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + List supportedModes = params.getSupportedFlashModes(); + + result.success( + supportedModes != null && supportedModes.contains(Camera.Parameters.FLASH_MODE_TORCH)); + return; + } + + resultError("hasTorch", "[TORCH] Video capturer not compatible", result); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public void setZoom(String trackId, double zoomLevel, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setZoom", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + final double maxZoomLevel = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); + + final double desiredZoomLevel = Math.max(1.0, Math.min(zoomLevel, maxZoomLevel)); + + float ratio = 1.0f / (float)desiredZoomLevel; + + if (rect != null) { + int croppedWidth = rect.width() - Math.round((float) rect.width() * ratio); + int croppedHeight = rect.height() - Math.round((float) rect.height() * ratio); + final Rect desiredRegion = new Rect(croppedWidth / 2, croppedHeight / 2, rect.width() - croppedWidth / 2, rect.height() - croppedHeight / 2); + captureRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, desiredRegion); + } + + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + if(params.isZoomSupported()) { + int maxZoom = params.getMaxZoom(); + double desiredZoom = Math.max(0, Math.min(zoomLevel, maxZoom)); + params.setZoom((int)desiredZoom); + result.success(null); + return; + } + } + resultError("setZoom", "[ZOOM] Video capturer not compatible", result); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public void setTorch(String trackId, boolean torch, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setTorch", "Video capturer not found for id: " + trackId, result); + return; + } + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + CameraManager manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(null); + isTorchOn = torch; + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + camera.setParameters(params); + + result.success(null); + isTorchOn = torch; + return; + } + resultError("setTorch", "[TORCH] Video capturer not compatible", result); + } + + + private class NoSuchFieldWithNameException extends NoSuchFieldException { + + String className; + String fieldName; + + NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { + super(e.getMessage()); + this.className = className; + this.fieldName = fieldName; + } + } + static private void resultError(String method, String error, MethodChannel.Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + private Object getPrivateProperty(Class klass, Object object, String fieldName) + throws NoSuchFieldWithNameException { + try { + Field field = klass.getDeclaredField(fieldName); + field.setAccessible(true); + return field.get(object); + } catch (NoSuchFieldException e) { + throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); + } catch (IllegalAccessException e) { + // Should never happen since we are calling `setAccessible(true)` + throw new RuntimeException(e); + } + } + @NonNull + public static MeteringRectangle convertPointToMeteringRectangle( + @NonNull Size boundaries, + double x, + double y, + @NonNull PlatformChannel.DeviceOrientation orientation) { + assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0); + assert (x >= 0 && x <= 1); + assert (y >= 0 && y <= 1); + // Rotate the coordinates to match the device orientation. + double oldX = x, oldY = y; + switch (orientation) { + case PORTRAIT_UP: // 90 ccw. + y = 1 - oldX; + x = oldY; + break; + case PORTRAIT_DOWN: // 90 cw. + x = 1 - oldY; + y = oldX; + break; + case LANDSCAPE_LEFT: + // No rotation required. + break; + case LANDSCAPE_RIGHT: // 180. + x = 1 - x; + y = 1 - y; + break; + } + // Interpolate the target coordinate. + int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1))); + int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1))); + // Determine the dimensions of the metering rectangle (10th of the viewport). + int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d); + int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d); + // Adjust target coordinate to represent top-left corner of metering rectangle. + targetX -= targetWidth / 2; + targetY -= targetHeight / 2; + // Adjust target coordinate as to not fall out of bounds. + if (targetX < 0) { + targetX = 0; + } + if (targetY < 0) { + targetY = 0; + } + int maxTargetX = boundaries.getWidth() - 1 - targetWidth; + int maxTargetY = boundaries.getHeight() - 1 - targetHeight; + if (targetX > maxTargetX) { + targetX = maxTargetX; + } + if (targetY > maxTargetY) { + targetY = maxTargetY; + } + // Build the metering rectangle. + return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1); + } + + static class MeteringRectangleFactory { + public static MeteringRectangle create( + int x, int y, int width, int height, int meteringWeight) { + return new MeteringRectangle(x, y, width, height, meteringWeight); + } + } +} diff --git a/android/src/main/java/live/videosdk/webrtc/video/camera/DeviceOrientationManager.java b/android/src/main/java/live/videosdk/webrtc/video/camera/DeviceOrientationManager.java new file mode 100644 index 0000000000..f1a09a8169 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/video/camera/DeviceOrientationManager.java @@ -0,0 +1,189 @@ + +package live.videosdk.webrtc.video.camera; + +import android.app.Activity; +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.content.res.Configuration; +import android.view.Display; +import android.view.Surface; +import android.view.WindowManager; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation; + +/** + * Support class to help to determine the media orientation based on the orientation of the device. + */ +public class DeviceOrientationManager { + + private static final IntentFilter orientationIntentFilter = + new IntentFilter(Intent.ACTION_CONFIGURATION_CHANGED); + + private final Activity activity; + private final int sensorOrientation; + private PlatformChannel.DeviceOrientation lastOrientation; + private BroadcastReceiver broadcastReceiver; + + /** Factory method to create a device orientation manager. */ + @NonNull + public static DeviceOrientationManager create( + @NonNull Activity activity, + int sensorOrientation) { + return new DeviceOrientationManager(activity, sensorOrientation); + } + + DeviceOrientationManager( + @NonNull Activity activity, + int sensorOrientation) { + this.activity = activity; + this.sensorOrientation = sensorOrientation; + } + + public void start() { + if (broadcastReceiver != null) { + return; + } + broadcastReceiver = + new BroadcastReceiver() { + @Override + public void onReceive(Context context, Intent intent) { + handleUIOrientationChange(); + } + }; + activity.registerReceiver(broadcastReceiver, orientationIntentFilter); + broadcastReceiver.onReceive(activity, null); + } + + /** Stops listening for orientation updates. */ + public void stop() { + if (broadcastReceiver == null) { + return; + } + activity.unregisterReceiver(broadcastReceiver); + broadcastReceiver = null; + } + + + /** @return the last received UI orientation. */ + @Nullable + public PlatformChannel.DeviceOrientation getLastUIOrientation() { + return this.lastOrientation; + } + + /** + * Handles orientation changes based on change events triggered by the OrientationIntentFilter. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + */ + @VisibleForTesting + void handleUIOrientationChange() { + PlatformChannel.DeviceOrientation orientation = getUIOrientation(); + handleOrientationChange(orientation, lastOrientation); + lastOrientation = orientation; + } + @VisibleForTesting + static void handleOrientationChange( + DeviceOrientation newOrientation, + DeviceOrientation previousOrientation) { + } + + @SuppressWarnings("deprecation") + @VisibleForTesting + PlatformChannel.DeviceOrientation getUIOrientation() { + final int rotation = getDisplay().getRotation(); + final int orientation = activity.getResources().getConfiguration().orientation; + + switch (orientation) { + case Configuration.ORIENTATION_PORTRAIT: + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return PlatformChannel.DeviceOrientation.PORTRAIT_UP; + } else { + return PlatformChannel.DeviceOrientation.PORTRAIT_DOWN; + } + case Configuration.ORIENTATION_LANDSCAPE: + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT; + } else { + return PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT; + } + case Configuration.ORIENTATION_SQUARE: + case Configuration.ORIENTATION_UNDEFINED: + default: + return PlatformChannel.DeviceOrientation.PORTRAIT_UP; + } + } + + /** + * Calculates the sensor orientation based on the supplied angle. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @param angle Orientation angle. + * @return The sensor orientation based on the supplied angle. + */ + @VisibleForTesting + PlatformChannel.DeviceOrientation calculateSensorOrientation(int angle) { + final int tolerance = 45; + angle += tolerance; + + // Orientation is 0 in the default orientation mode. This is portrait-mode for phones + // and landscape for tablets. We have to compensate for this by calculating the default + // orientation, and apply an offset accordingly. + int defaultDeviceOrientation = getDeviceDefaultOrientation(); + if (defaultDeviceOrientation == Configuration.ORIENTATION_LANDSCAPE) { + angle += 90; + } + // Determine the orientation + angle = angle % 360; + return new PlatformChannel.DeviceOrientation[] { + PlatformChannel.DeviceOrientation.PORTRAIT_UP, + PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT, + PlatformChannel.DeviceOrientation.PORTRAIT_DOWN, + PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT, + } + [angle / 90]; + } + + /** + * Gets the default orientation of the device. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @return The default orientation of the device. + */ + @VisibleForTesting + int getDeviceDefaultOrientation() { + Configuration config = activity.getResources().getConfiguration(); + int rotation = getDisplay().getRotation(); + if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) + && config.orientation == Configuration.ORIENTATION_LANDSCAPE) + || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) + && config.orientation == Configuration.ORIENTATION_PORTRAIT)) { + return Configuration.ORIENTATION_LANDSCAPE; + } else { + return Configuration.ORIENTATION_PORTRAIT; + } + } + + /** + * Gets an instance of the Android {@link android.view.Display}. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @return An instance of the Android {@link android.view.Display}. + */ + @SuppressWarnings("deprecation") + @VisibleForTesting + Display getDisplay() { + return ((WindowManager) activity.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); + } +} diff --git a/android/src/main/java/live/videosdk/webrtc/video/camera/Point.java b/android/src/main/java/live/videosdk/webrtc/video/camera/Point.java new file mode 100644 index 0000000000..6b7831d127 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/video/camera/Point.java @@ -0,0 +1,14 @@ +package live.videosdk.webrtc.video.camera; + +import androidx.annotation.Nullable; + +/** Represents a point on an x/y axis. */ +public class Point { + public final Double x; + public final Double y; + + public Point(@Nullable Double x, @Nullable Double y) { + this.x = x; + this.y = y; + } +} \ No newline at end of file diff --git a/android/src/main/java/live/videosdk/webrtc/video/camera/SdkCapabilityChecker.java b/android/src/main/java/live/videosdk/webrtc/video/camera/SdkCapabilityChecker.java new file mode 100644 index 0000000000..15a79090d7 --- /dev/null +++ b/android/src/main/java/live/videosdk/webrtc/video/camera/SdkCapabilityChecker.java @@ -0,0 +1,56 @@ +package live.videosdk.webrtc.video.camera; + +import android.annotation.SuppressLint; +import android.os.Build; +import androidx.annotation.ChecksSdkIntAtLeast; +import androidx.annotation.VisibleForTesting; + +/** Abstracts SDK version checks, and allows overriding them in unit tests. */ +public class SdkCapabilityChecker { + /** The current SDK version, overridable for testing. */ + @SuppressLint("AnnotateVersionCheck") + @VisibleForTesting + public static int SDK_VERSION = Build.VERSION.SDK_INT; + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.P) + public static boolean supportsDistortionCorrection() { + // See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#DISTORTION_CORRECTION_AVAILABLE_MODES + return SDK_VERSION >= Build.VERSION_CODES.P; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.O) + public static boolean supportsEglRecordableAndroid() { + // See https://developer.android.com/reference/android/opengl/EGLExt#EGL_RECORDABLE_ANDROID + return SDK_VERSION >= Build.VERSION_CODES.O; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.S) + public static boolean supportsEncoderProfiles() { + // See https://developer.android.com/reference/android/media/EncoderProfiles + return SDK_VERSION >= Build.VERSION_CODES.S; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.M) + public static boolean supportsMarshmallowNoiseReductionModes() { + // See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES + return SDK_VERSION >= Build.VERSION_CODES.M; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.P) + public static boolean supportsSessionConfiguration() { + // See https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration + return SDK_VERSION >= Build.VERSION_CODES.P; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.N) + public static boolean supportsVideoPause() { + // See https://developer.android.com/reference/androidx/camera/video/VideoRecordEvent.Pause + return SDK_VERSION >= Build.VERSION_CODES.N; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.R) + public static boolean supportsZoomRatio() { + // See https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#CONTROL_ZOOM_RATIO + return SDK_VERSION >= Build.VERSION_CODES.R; + } +} \ No newline at end of file diff --git a/android/src/main/java/org/webrtc/Camera1Helper.java b/android/src/main/java/org/webrtc/Camera1Helper.java new file mode 100644 index 0000000000..35d8066013 --- /dev/null +++ b/android/src/main/java/org/webrtc/Camera1Helper.java @@ -0,0 +1,39 @@ +package org.webrtc; + +import androidx.annotation.Nullable; + +import java.util.ArrayList; +import java.util.List; + +/** + * A helper to access package-protected methods used in [Camera2Session] + *

+ * Note: cameraId as used in the Camera1XXX classes refers to the index within the list of cameras. + * + * @suppress + */ + +public class Camera1Helper { + + public static int getCameraId(String deviceName) { + return Camera1Enumerator.getCameraIndex(deviceName); + } + + @Nullable + public static List getSupportedFormats(int cameraId) { + return Camera1Enumerator.getSupportedFormats(cameraId); + } + + public static Size findClosestCaptureFormat(int cameraId, int width, int height) { + List formats = getSupportedFormats(cameraId); + + List sizes = new ArrayList<>(); + if (formats != null) { + for (CameraEnumerationAndroid.CaptureFormat format : formats) { + sizes.add(new Size(format.width, format.height)); + } + } + + return CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); + } +} \ No newline at end of file diff --git a/android/src/main/java/org/webrtc/Camera2Helper.java b/android/src/main/java/org/webrtc/Camera2Helper.java new file mode 100644 index 0000000000..0b7b1ffbae --- /dev/null +++ b/android/src/main/java/org/webrtc/Camera2Helper.java @@ -0,0 +1,36 @@ + +package org.webrtc; + +import android.hardware.camera2.CameraManager; + +import androidx.annotation.Nullable; + +import java.util.ArrayList; +import java.util.List; + +/** + * A helper to access package-protected methods used in [Camera2Session] + *

+ * Note: cameraId as used in the Camera2XXX classes refers to the id returned + * by [CameraManager.getCameraIdList]. + */ +public class Camera2Helper { + + @Nullable + public static List getSupportedFormats(CameraManager cameraManager, @Nullable String cameraId) { + return Camera2Enumerator.getSupportedFormats(cameraManager, cameraId); + } + + public static Size findClosestCaptureFormat(CameraManager cameraManager, @Nullable String cameraId, int width, int height) { + List formats = getSupportedFormats(cameraManager, cameraId); + + List sizes = new ArrayList<>(); + if (formats != null) { + for (CameraEnumerationAndroid.CaptureFormat format : formats) { + sizes.add(new Size(format.width, format.height)); + } + } + + return CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height); + } +} \ No newline at end of file diff --git a/android/src/main/java/org/webrtc/audio/WebRtcAudioTrackUtils.java b/android/src/main/java/org/webrtc/audio/WebRtcAudioTrackUtils.java index 4aba9d7a0e..80e23ccdb7 100644 --- a/android/src/main/java/org/webrtc/audio/WebRtcAudioTrackUtils.java +++ b/android/src/main/java/org/webrtc/audio/WebRtcAudioTrackUtils.java @@ -3,7 +3,7 @@ import android.media.AudioTrack; import android.util.Log; -import com.cloudwebrtc.webrtc.record.AudioTrackInterceptor; +import live.videosdk.webrtc.record.AudioTrackInterceptor; import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; diff --git a/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java b/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java index 772b3f936c..e4304fa268 100644 --- a/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java +++ b/android/src/main/java/org/webrtc/video/CustomVideoEncoderFactory.java @@ -2,7 +2,7 @@ import androidx.annotation.Nullable; -import com.cloudwebrtc.webrtc.SimulcastVideoEncoderFactoryWrapper; +import live.videosdk.webrtc.SimulcastVideoEncoderFactoryWrapper; import org.webrtc.EglBase; import org.webrtc.SoftwareVideoEncoderFactory; diff --git a/common/cpp/flutter_webrtc_plugin.cc b/common/cpp/flutter_webrtc_plugin.cc index c0bf7e402c..27425c02e0 100644 --- a/common/cpp/flutter_webrtc_plugin.cc +++ b/common/cpp/flutter_webrtc_plugin.cc @@ -1,4 +1,4 @@ -#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" +#include "videosdk_webrtc/flutter_web_r_t_c_plugin.h" #include "flutter_common.h" #include "flutter_webrtc.h" @@ -7,7 +7,7 @@ const char* kChannelName = "FlutterWebRTC.Method"; //#if defined(_WINDOWS) -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { // A webrtc plugin for windows/linux. class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { @@ -63,16 +63,15 @@ class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { TextureRegistrar* textures_; }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin + -#if defined(_WINDOWS) void FlutterWebRTCPluginRegisterWithRegistrar( FlutterDesktopPluginRegistrarRef registrar) { -#else -void flutter_web_r_t_c_plugin_register_with_registrar( - FlPluginRegistrar* registrar) { -#endif + static auto* plugin_registrar = new flutter::PluginRegistrar(registrar); - flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + videosdk_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( plugin_registrar); -} \ No newline at end of file + + } + \ No newline at end of file diff --git a/common/cpp/include/flutter_data_channel.h b/common/cpp/include/flutter_data_channel.h index ccffa511cd..05ec312f7b 100644 --- a/common/cpp/include/flutter_data_channel.h +++ b/common/cpp/include/flutter_data_channel.h @@ -4,7 +4,7 @@ #include "flutter_common.h" #include "flutter_webrtc_base.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { class FlutterRTCDataChannelObserver : public RTCDataChannelObserver { public: @@ -49,6 +49,6 @@ class FlutterDataChannel { FlutterWebRTCBase* base_; }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // !FLUTTER_WEBRTC_RTC_DATA_CHANNEL_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_frame_capturer.h b/common/cpp/include/flutter_frame_capturer.h index 41e9a6556e..b0d90ea0e2 100644 --- a/common/cpp/include/flutter_frame_capturer.h +++ b/common/cpp/include/flutter_frame_capturer.h @@ -9,7 +9,7 @@ #include -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { using namespace libwebrtc; @@ -32,6 +32,6 @@ class FlutterFrameCapturer bool SaveFrame(); }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // !FLUTTER_WEBRTC_RTC_FRAME_CAPTURER_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_frame_cryptor.h b/common/cpp/include/flutter_frame_cryptor.h index 111b2f6abf..69c50e68d4 100644 --- a/common/cpp/include/flutter_frame_cryptor.h +++ b/common/cpp/include/flutter_frame_cryptor.h @@ -6,7 +6,7 @@ #include "rtc_frame_cryptor.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { class FlutterFrameCryptorObserver : public libwebrtc::RTCFrameCryptorObserver { public: @@ -23,11 +23,9 @@ class FlutterFrameCryptor { public: FlutterFrameCryptor(FlutterWebRTCBase* base) : base_(base) {} - // Since this takes ownership of result, ownership will be passed back to 'outResult' if this function fails bool HandleFrameCryptorMethodCall( const MethodCallProxy& method_call, - std::unique_ptr result, - std::unique_ptr *outResult); + std::unique_ptr result); void FrameCryptorFactoryCreateFrameCryptor( const EncodableMap& constraints, @@ -98,6 +96,6 @@ class FlutterFrameCryptor { std::map> key_providers_; }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX diff --git a/common/cpp/include/flutter_media_stream.h b/common/cpp/include/flutter_media_stream.h index 8139a56174..7ed5ab071c 100644 --- a/common/cpp/include/flutter_media_stream.h +++ b/common/cpp/include/flutter_media_stream.h @@ -4,7 +4,7 @@ #include "flutter_common.h" #include "flutter_webrtc_base.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { class FlutterMediaStream { public: @@ -52,6 +52,6 @@ class FlutterMediaStream { FlutterWebRTCBase* base_; }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // !FLUTTER_WEBRTC_RTC_GET_USERMEDIA_HXX diff --git a/common/cpp/include/flutter_peerconnection.h b/common/cpp/include/flutter_peerconnection.h index 5efd1e5a45..100cc143ce 100644 --- a/common/cpp/include/flutter_peerconnection.h +++ b/common/cpp/include/flutter_peerconnection.h @@ -4,7 +4,7 @@ #include "flutter_common.h" #include "flutter_webrtc_base.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { class FlutterPeerConnectionObserver : public RTCPeerConnectionObserver { public: @@ -203,6 +203,6 @@ const char* peerConnectionStateString(RTCPeerConnectionState state); const char* iceGatheringStateString(RTCIceGatheringState state); -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // !FLUTTER_WEBRTC_RTC_PEER_CONNECTION_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_screen_capture.h b/common/cpp/include/flutter_screen_capture.h index 07b4501e5e..dc38bf098e 100644 --- a/common/cpp/include/flutter_screen_capture.h +++ b/common/cpp/include/flutter_screen_capture.h @@ -7,7 +7,7 @@ #include "rtc_desktop_capturer.h" #include "rtc_desktop_media_list.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { class FlutterScreenCapture : public MediaListObserver, public DesktopCapturerObserver { @@ -55,6 +55,6 @@ class FlutterScreenCapture : public MediaListObserver, std::vector> sources_; }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // FLUTTER_SCRREN_CAPTURE_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_video_renderer.h b/common/cpp/include/flutter_video_renderer.h index 41bec0c4de..6927076d20 100644 --- a/common/cpp/include/flutter_video_renderer.h +++ b/common/cpp/include/flutter_video_renderer.h @@ -9,7 +9,7 @@ #include -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { using namespace libwebrtc; @@ -78,6 +78,6 @@ class FlutterVideoRendererManager { std::map> renderers_; }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // !FLUTTER_WEBRTC_RTC_VIDEO_RENDERER_HXX \ No newline at end of file diff --git a/common/cpp/include/flutter_webrtc.h b/common/cpp/include/flutter_webrtc.h index 5886a67dff..492ad0858c 100644 --- a/common/cpp/include/flutter_webrtc.h +++ b/common/cpp/include/flutter_webrtc.h @@ -12,7 +12,7 @@ #include "libwebrtc.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { using namespace libwebrtc; @@ -38,6 +38,6 @@ class FlutterWebRTC : public FlutterWebRTCBase, std::unique_ptr result); }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // PLUGINS_FLUTTER_WEBRTC_HXX diff --git a/common/cpp/include/flutter_webrtc_base.h b/common/cpp/include/flutter_webrtc_base.h index 9d24455df7..955130a4f2 100644 --- a/common/cpp/include/flutter_webrtc_base.h +++ b/common/cpp/include/flutter_webrtc_base.h @@ -23,7 +23,7 @@ #include "uuidxx.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { using namespace libwebrtc; @@ -126,6 +126,6 @@ class FlutterWebRTCBase { std::unique_ptr event_channel_; }; -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin #endif // !FLUTTER_WEBRTC_BASE_HXX diff --git a/common/cpp/include/flutter_webrtc/flutter_web_r_t_c_plugin.h b/common/cpp/include/videosdk_webrtc/flutter_web_r_t_c_plugin.h similarity index 60% rename from common/cpp/include/flutter_webrtc/flutter_web_r_t_c_plugin.h rename to common/cpp/include/videosdk_webrtc/flutter_web_r_t_c_plugin.h index 4ffb80aa4f..98880d9757 100644 --- a/common/cpp/include/flutter_webrtc/flutter_web_r_t_c_plugin.h +++ b/common/cpp/include/videosdk_webrtc/flutter_web_r_t_c_plugin.h @@ -1,27 +1,6 @@ #ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ #define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ -#if defined(_WINDOWS) - -#include -#ifdef FLUTTER_PLUGIN_IMPL -#define FLUTTER_PLUGIN_EXPORT __declspec(dllexport) -#else -#define FLUTTER_PLUGIN_EXPORT __declspec(dllimport) -#endif - -#if defined(__cplusplus) -extern "C" { -#endif - -FLUTTER_PLUGIN_EXPORT void FlutterWebRTCPluginRegisterWithRegistrar( - FlutterDesktopPluginRegistrarRef registrar); - -#if defined(__cplusplus) -} // extern "C" -#endif - -#else #include G_BEGIN_DECLS diff --git a/common/cpp/src/flutter_data_channel.cc b/common/cpp/src/flutter_data_channel.cc index f333d42a6f..900495d15e 100644 --- a/common/cpp/src/flutter_data_channel.cc +++ b/common/cpp/src/flutter_data_channel.cc @@ -2,7 +2,7 @@ #include -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { FlutterRTCDataChannelObserver::FlutterRTCDataChannelObserver( scoped_refptr data_channel, @@ -147,4 +147,4 @@ void FlutterRTCDataChannelObserver::OnMessage(const char* buffer, auto data = EncodableValue(params); event_channel_->Success(data); } -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin diff --git a/common/cpp/src/flutter_frame_capturer.cc b/common/cpp/src/flutter_frame_capturer.cc index 4d0026d74f..5e0fc713c8 100644 --- a/common/cpp/src/flutter_frame_capturer.cc +++ b/common/cpp/src/flutter_frame_capturer.cc @@ -7,7 +7,7 @@ #include #include "svpng.hpp" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { FlutterFrameCapturer::FlutterFrameCapturer(RTCVideoTrack* track, std::string path) { @@ -73,4 +73,4 @@ bool FlutterFrameCapturer::SaveFrame() { return true; } -} // namespace flutter_webrtc_plugin \ No newline at end of file +} // namespace videosdk_webrtc_plugin \ No newline at end of file diff --git a/common/cpp/src/flutter_frame_cryptor.cc b/common/cpp/src/flutter_frame_cryptor.cc index 975f400751..695e5e02d6 100644 --- a/common/cpp/src/flutter_frame_cryptor.cc +++ b/common/cpp/src/flutter_frame_cryptor.cc @@ -2,7 +2,7 @@ #include "base/scoped_ref_ptr.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { libwebrtc::Algorithm AlgorithmFromInt(int algorithm) { switch (algorithm) { @@ -48,8 +48,7 @@ void FlutterFrameCryptorObserver::OnFrameCryptionStateChanged( bool FlutterFrameCryptor::HandleFrameCryptorMethodCall( const MethodCallProxy& method_call, - std::unique_ptr result, - std::unique_ptr *outResult) { + std::unique_ptr result) { const std::string& method_name = method_call.method_name(); if (!method_call.arguments()) { result->Error("Bad Arguments", "Null arguments received"); @@ -103,8 +102,7 @@ bool FlutterFrameCryptor::HandleFrameCryptorMethodCall( KeyProviderDispose(params, std::move(result)); return true; } - - *outResult = std::move(result); + return false; } @@ -605,4 +603,4 @@ void FlutterFrameCryptor::KeyProviderDispose( result->Success(EncodableValue(params)); } -} // namespace flutter_webrtc_plugin \ No newline at end of file +} // namespace videosdk_webrtc_plugin \ No newline at end of file diff --git a/common/cpp/src/flutter_media_stream.cc b/common/cpp/src/flutter_media_stream.cc index 324dbdc4ce..e8036824b5 100644 --- a/common/cpp/src/flutter_media_stream.cc +++ b/common/cpp/src/flutter_media_stream.cc @@ -4,7 +4,7 @@ #define DEFAULT_HEIGHT 720 #define DEFAULT_FPS 30 -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { FlutterMediaStream::FlutterMediaStream(FlutterWebRTCBase* base) : base_(base) { base_->audio_device_->OnDeviceChange([&] { @@ -552,4 +552,4 @@ void FlutterMediaStream::MediaStreamTrackDispose( base_->RemoveMediaTrackForId(track_id); result->Success(); } -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin diff --git a/common/cpp/src/flutter_peerconnection.cc b/common/cpp/src/flutter_peerconnection.cc index 0115dc2173..a8fba820d0 100644 --- a/common/cpp/src/flutter_peerconnection.cc +++ b/common/cpp/src/flutter_peerconnection.cc @@ -6,7 +6,7 @@ #include "rtc_dtmf_sender.h" #include "rtc_rtp_parameters.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { std::string RTCMediaTypeToString(RTCMediaType type) { switch (type) { @@ -936,9 +936,6 @@ EncodableMap statsToMap(const scoped_refptr& stats) { auto members = stats->Members(); for (int i = 0; i < members.size(); i++) { auto member = members[i]; - if (!member->IsDefined()) { - continue; - } switch (member->GetType()) { case RTCStatsMember::Type::kBool: values[EncodableValue(member->GetName().std_string())] = @@ -1384,4 +1381,4 @@ void FlutterPeerConnectionObserver::RemoveStreamForId(const std::string& id) { remote_streams_.erase(it); } -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin diff --git a/common/cpp/src/flutter_screen_capture.cc b/common/cpp/src/flutter_screen_capture.cc index df660daf9b..10d859b722 100644 --- a/common/cpp/src/flutter_screen_capture.cc +++ b/common/cpp/src/flutter_screen_capture.cc @@ -1,6 +1,6 @@ #include "flutter_screen_capture.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { FlutterScreenCapture::FlutterScreenCapture(FlutterWebRTCBase* base) : base_(base) {} @@ -279,4 +279,4 @@ void FlutterScreenCapture::GetDisplayMedia( result->Success(EncodableValue(params)); } -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin diff --git a/common/cpp/src/flutter_video_renderer.cc b/common/cpp/src/flutter_video_renderer.cc index e7e1774a39..3f6f4ca2a1 100644 --- a/common/cpp/src/flutter_video_renderer.cc +++ b/common/cpp/src/flutter_video_renderer.cc @@ -1,6 +1,6 @@ #include "flutter_video_renderer.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { FlutterVideoRenderer::~FlutterVideoRenderer() {} @@ -181,4 +181,4 @@ void FlutterVideoRendererManager::VideoRendererDispose( "VideoRendererDispose() texture not found!"); } -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin diff --git a/common/cpp/src/flutter_webrtc.cc b/common/cpp/src/flutter_webrtc.cc index 77cddc1bd4..3ba9a0501d 100644 --- a/common/cpp/src/flutter_webrtc.cc +++ b/common/cpp/src/flutter_webrtc.cc @@ -1,8 +1,8 @@ #include "flutter_webrtc.h" -#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" +#include "videosdk_webrtc/flutter_web_r_t_c_plugin.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { FlutterWebRTC::FlutterWebRTC(FlutterWebRTCPlugin* plugin) : FlutterWebRTCBase::FlutterWebRTCBase(plugin->messenger(), @@ -1241,13 +1241,11 @@ void FlutterWebRTC::HandleMethodCall( state[EncodableValue("state")] = peerConnectionStateString(pc->peer_connection_state()); result->Success(EncodableValue(state)); + } else if (HandleFrameCryptorMethodCall(method_call, std::move(result))) { + // Do nothing } else { - if (HandleFrameCryptorMethodCall(method_call, std::move(result), &result)) { - return; - } else { - result->NotImplemented(); - } + result->NotImplemented(); } } -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin diff --git a/common/cpp/src/flutter_webrtc_base.cc b/common/cpp/src/flutter_webrtc_base.cc index 901ba384eb..f265248885 100644 --- a/common/cpp/src/flutter_webrtc_base.cc +++ b/common/cpp/src/flutter_webrtc_base.cc @@ -3,7 +3,7 @@ #include "flutter_data_channel.h" #include "flutter_peerconnection.h" -namespace flutter_webrtc_plugin { +namespace videosdk_webrtc_plugin { const char* kEventChannelName = "FlutterWebRTC.Event"; @@ -359,4 +359,4 @@ FlutterWebRTCBase::GetRtpReceiverById(RTCPeerConnection* pc, return result; } -} // namespace flutter_webrtc_plugin +} // namespace videosdk_webrtc_plugin diff --git a/common/darwin/Classes/AudioManager.h b/common/darwin/Classes/AudioManager.h new file mode 100644 index 0000000000..211f3f1e4c --- /dev/null +++ b/common/darwin/Classes/AudioManager.h @@ -0,0 +1,20 @@ +#import +#import +#import "AudioProcessingAdapter.h" + +@interface AudioManager : NSObject + +@property(nonatomic, strong) RTCDefaultAudioProcessingModule* _Nonnull audioProcessingModule; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull capturePostProcessingAdapter; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull renderPreProcessingAdapter; + ++ (_Nonnull instancetype)sharedInstance; + +- (void)addLocalAudioRenderer:(nonnull id)renderer; + +- (void)removeLocalAudioRenderer:(nonnull id)renderer; + +@end + diff --git a/common/darwin/Classes/AudioManager.m b/common/darwin/Classes/AudioManager.m new file mode 100644 index 0000000000..efc3a8741e --- /dev/null +++ b/common/darwin/Classes/AudioManager.m @@ -0,0 +1,50 @@ +#import "AudioManager.h" +#import "AudioProcessingAdapter.h" + +@implementation AudioManager { + RTCDefaultAudioProcessingModule* _audioProcessingModule; + AudioProcessingAdapter* _capturePostProcessingAdapter; + AudioProcessingAdapter* _renderPreProcessingAdapter; +} + +@synthesize capturePostProcessingAdapter = _capturePostProcessingAdapter; +@synthesize renderPreProcessingAdapter = _renderPreProcessingAdapter; +@synthesize audioProcessingModule = _audioProcessingModule; + ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + static AudioManager* sharedInstance = nil; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + if (self = [super init]) { + _audioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + _capturePostProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _renderPreProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _audioProcessingModule.capturePostProcessingDelegate = _capturePostProcessingAdapter; + _audioProcessingModule.renderPreProcessingDelegate = _renderPreProcessingAdapter; + } + return self; +} + +- (void)addLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter addAudioRenderer:renderer]; +} + +- (void)removeLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter removeAudioRenderer:renderer]; +} + +- (void)addRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter addAudioRenderer:sink]; +} + +- (void)removeRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter removeAudioRenderer:sink]; +} + +@end \ No newline at end of file diff --git a/common/darwin/Classes/AudioProcessingAdapter.h b/common/darwin/Classes/AudioProcessingAdapter.h new file mode 100644 index 0000000000..91498b45d8 --- /dev/null +++ b/common/darwin/Classes/AudioProcessingAdapter.h @@ -0,0 +1,26 @@ +#import +#import + +@protocol ExternalAudioProcessingDelegate + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels; + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) * _Nonnull)audioBuffer; + +- (void)audioProcessingRelease; + +@end + +@interface AudioProcessingAdapter : NSObject + +- (nonnull instancetype)init; + +- (void)addProcessing:(id _Nonnull)processor; + +- (void)removeProcessing:(id _Nonnull)processor; + +- (void)addAudioRenderer:(nonnull id)renderer; + +- (void)removeAudioRenderer:(nonnull id)renderer; + +@end \ No newline at end of file diff --git a/common/darwin/Classes/AudioProcessingAdapter.m b/common/darwin/Classes/AudioProcessingAdapter.m new file mode 100644 index 0000000000..73fa3dda1f --- /dev/null +++ b/common/darwin/Classes/AudioProcessingAdapter.m @@ -0,0 +1,105 @@ +#import "AudioProcessingAdapter.h" +#import +#import + +@implementation AudioProcessingAdapter { + NSMutableArray>* _renderers; + NSMutableArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)init { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _renderers = [[NSMutableArray> alloc] init]; + _processors = [[NSMutableArray> alloc] init]; + } + return self; +} + +- (void)addProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + [_processors addObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + _processors = [[_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)addAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + _renderers = [[_renderers + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != renderer; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingInitializeWithSampleRate:sampleRateHz channels:channels]; + } + os_unfair_lock_unlock(&_lock); +} + +- (AVAudioPCMBuffer*)toPCMBuffer:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + AVAudioFormat* format = + [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16 + sampleRate:audioBuffer.frames * 100.0 + channels:(AVAudioChannelCount)audioBuffer.channels + interleaved:NO]; + AVAudioPCMBuffer* pcmBuffer = + [[AVAudioPCMBuffer alloc] initWithPCMFormat:format + frameCapacity:(AVAudioFrameCount)audioBuffer.frames]; + if (!pcmBuffer) { + NSLog(@"Failed to create AVAudioPCMBuffer"); + return nil; + } + pcmBuffer.frameLength = (AVAudioFrameCount)audioBuffer.frames; + for (int i = 0; i < audioBuffer.channels; i++) { + float* sourceBuffer = [audioBuffer rawBufferForChannel:i]; + int16_t* targetBuffer = (int16_t*)pcmBuffer.int16ChannelData[i]; + for (int frame = 0; frame < audioBuffer.frames; frame++) { + targetBuffer[frame] = sourceBuffer[frame]; + } + } + return pcmBuffer; +} + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingProcess:audioBuffer]; + } + + for (id renderer in _renderers) { + [renderer renderPCMBuffer:[self toPCMBuffer:audioBuffer]]; + } + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingRelease { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingRelease]; + } + os_unfair_lock_unlock(&_lock); +} + +@end \ No newline at end of file diff --git a/common/darwin/Classes/AudioUtils.m b/common/darwin/Classes/AudioUtils.m index 5a48a7daae..43378adde4 100644 --- a/common/darwin/Classes/AudioUtils.m +++ b/common/darwin/Classes/AudioUtils.m @@ -14,9 +14,8 @@ + (void)ensureAudioSessionWithRecording:(BOOL)recording { session.category != AVAudioSessionCategoryMultiRoute) { config.category = AVAudioSessionCategoryPlayAndRecord; config.categoryOptions = - AVAudioSessionCategoryOptionAllowBluetooth | - AVAudioSessionCategoryOptionAllowBluetoothA2DP | - AVAudioSessionCategoryOptionAllowAirPlay; + AVAudioSessionCategoryOptionAllowBluetooth | AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowAirPlay; [session lockForConfiguration]; NSError* error = nil; @@ -93,7 +92,7 @@ + (void)setSpeakerphoneOn:(BOOL)enable { AVAudioSessionCategoryOptionAllowBluetooth error:&error]; - success = [session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_Speaker + success = [session overrideOutputAudioPort:kAudioSessionProperty_OverrideAudioRoute error:&error]; if (!success) NSLog(@"setSpeakerphoneOn: Port override failed due to: %@", error); diff --git a/common/darwin/Classes/CameraUtils.h b/common/darwin/Classes/CameraUtils.h new file mode 100644 index 0000000000..efe9ae87aa --- /dev/null +++ b/common/darwin/Classes/CameraUtils.h @@ -0,0 +1,43 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (CameraUtils) + +- (void)mediaStreamTrackHasTorch:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetTorch:(nonnull RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetZoom:(nonnull RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSwitchCamera:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (NSInteger)selectFpsForFormat:(nonnull AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps; + +- (nullable AVCaptureDeviceFormat*)selectFormatForDevice:(nonnull AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight; + +- (nullable AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position; + + +@end \ No newline at end of file diff --git a/common/darwin/Classes/CameraUtils.m b/common/darwin/Classes/CameraUtils.m new file mode 100644 index 0000000000..fea72a6b8e --- /dev/null +++ b/common/darwin/Classes/CameraUtils.m @@ -0,0 +1,350 @@ +#import "CameraUtils.h" + +@implementation FlutterWebRTCPlugin (CameraUtils) + +-(AVCaptureDevice*) currentDevice { + if (!self.videoCapturer) { + return nil; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + return nil; + } + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + return deviceInput.device; +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't check torch"); + result(@NO); + return; + } + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +#else + NSLog(@"Not supported on macOS. Can't check torch"); + result(@NO); +#endif +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device is nil" details:nil]); + return; + } + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device does not support torch" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:error.localizedDescription details:nil]); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"device is nil" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:error.localizedDescription details:nil]); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#else + NSLog(@"Not supported on macOS. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)applyFocusMode:(NSString*)focusMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } else if([@"auto" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"device is nil" details:nil]); + return; + } + self.focusMode = focusMode; + [self applyFocusMode:focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"device is nil" details:nil]); + return; + } + BOOL reset = ((NSNumber *)focusPoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)focusPoint[@"x"]).doubleValue; + y = ((NSNumber *)focusPoint[@"y"]).doubleValue; + } + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + + [device setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyFocusMode:self.focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void) applyExposureMode:(NSString*)exposureMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } else if([@"auto" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { + [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; + } else if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result{ +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"device is nil" details:nil]); + return; + } + self.exposureMode = exposureMode; + [self applyExposureMode:exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +#if TARGET_OS_IPHONE +- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation + x:(double)x + y:(double)y { + double oldX = x, oldY = y; + switch (orientation) { + case UIDeviceOrientationPortrait: // 90 ccw + y = 1 - oldX; + x = oldY; + break; + case UIDeviceOrientationPortraitUpsideDown: // 90 cw + x = 1 - oldY; + y = oldX; + break; + case UIDeviceOrientationLandscapeRight: // 180 + x = 1 - x; + y = 1 - y; + break; + case UIDeviceOrientationLandscapeLeft: + default: + // No rotation required + break; + } + return CGPointMake(x, y); +} +#endif + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"device is nil" details:nil]); + return; + } + + BOOL reset = ((NSNumber *)exposurePoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)exposurePoint[@"x"]).doubleValue; + y = ((NSNumber *)exposurePoint[@"y"]).doubleValue; + } + if (!device.isExposurePointOfInterestSupported) { + NSLog(@"Exposure point of interest is not supported. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Exposure point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + [device setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyExposureMode:self.exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } +#if TARGET_OS_IPHONE + [self.videoCapturer stopCapture]; +#endif + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = + self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice* videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:self._lastTargetWidth + targetHeight:self._lastTargetHeight]; + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:[self selectFpsForFormat:selectedFormat + targetFps:self._lastTargetFps] + completionHandler:^(NSError* error) { + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" + message:@"Error while switching camera" + details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + + +@end \ No newline at end of file diff --git a/common/darwin/Classes/CustomCapturerDelegate.h b/common/darwin/Classes/CustomCapturerDelegate.h new file mode 100644 index 0000000000..cc2adedaba --- /dev/null +++ b/common/darwin/Classes/CustomCapturerDelegate.h @@ -0,0 +1,10 @@ +#import +#import + +@interface CustomCapturerDelegate : NSObject + +@property (nonatomic, strong) RTCVideoSource *videoSource; + +- (instancetype)initWithVideoSource:(RTCVideoSource *)videoSource; + +@end \ No newline at end of file diff --git a/common/darwin/Classes/FlutterRTCDesktopCapturer.m b/common/darwin/Classes/FlutterRTCDesktopCapturer.m index 2d54dd91c0..af778607e3 100644 --- a/common/darwin/Classes/FlutterRTCDesktopCapturer.m +++ b/common/darwin/Classes/FlutterRTCDesktopCapturer.m @@ -7,6 +7,8 @@ #import "FlutterBroadcastScreenCapturer.h" #import "FlutterRPScreenRecorder.h" #endif +#import "VideoProcessingAdapter.h" +#import "LocalVideoTrack.h" #if TARGET_OS_OSX RTCDesktopMediaList* _screen = nil; @@ -20,8 +22,9 @@ - (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; RTCVideoSource* videoSource = [self.peerConnectionFactory videoSourceForScreenCast:YES]; + NSString* trackUUID = [[NSUUID UUID] UUIDString]; - +VideoProcessingAdapter *videoProcessingAdapter = [[VideoProcessingAdapter alloc] initWithRTCVideoSource:videoSource]; #if TARGET_OS_IPHONE BOOL useBroadcastExtension = false; id videoConstraints = constraints[@"video"]; @@ -34,9 +37,9 @@ - (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result id screenCapturer; if (useBroadcastExtension) { - screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoSource]; + screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoProcessingAdapter]; } else { - screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; + screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:[videoProcessingAdapter source]]; } [screenCapturer startCapture]; @@ -110,7 +113,7 @@ - (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result RTCDesktopSource* source = nil; if (useDefaultScreen) { desktopCapturer = [[RTCDesktopCapturer alloc] initWithDefaultScreen:self - captureDelegate:videoSource]; + captureDelegate:videoProcessingAdapter]; } else { source = [self getSourceById:sourceId]; if (source == nil) { @@ -119,7 +122,7 @@ - (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result } desktopCapturer = [[RTCDesktopCapturer alloc] initWithSource:source delegate:self - captureDelegate:videoSource]; + captureDelegate:videoProcessingAdapter]; } [desktopCapturer startCaptureWithFPS:fps]; NSLog(@"start desktop capture: sourceId: %@, type: %@, fps: %lu", sourceId, @@ -132,12 +135,14 @@ - (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result handler(); }; #endif - + RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; [mediaStream addVideoTrack:videoTrack]; - [self.localTracks setObject:videoTrack forKey:trackUUID]; + LocalVideoTrack *localVideoTrack = [[LocalVideoTrack alloc] initWithTrack:videoTrack videoProcessing:videoProcessingAdapter]; + + [self.localTracks setObject:localVideoTrack forKey:trackUUID]; NSMutableArray* audioTracks = [NSMutableArray array]; NSMutableArray* videoTracks = [NSMutableArray array]; diff --git a/common/darwin/Classes/FlutterRTCFrameCapturer.m b/common/darwin/Classes/FlutterRTCFrameCapturer.m index baf12de1e0..4c31f89daa 100644 --- a/common/darwin/Classes/FlutterRTCFrameCapturer.m +++ b/common/darwin/Classes/FlutterRTCFrameCapturer.m @@ -137,7 +137,7 @@ - (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame { dstUV:dstUV dstStrideUV:(int)dstUVStride width:i420Buffer.width - height:i420Buffer.height]; + height:i420Buffer.height]; } else { uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); diff --git a/common/darwin/Classes/FlutterRTCFrameCryptor.m b/common/darwin/Classes/FlutterRTCFrameCryptor.m index 748f677395..1c8ae09c63 100644 --- a/common/darwin/Classes/FlutterRTCFrameCryptor.m +++ b/common/darwin/Classes/FlutterRTCFrameCryptor.m @@ -77,14 +77,13 @@ - (void)handleFrameCryptorMethodCall:(nonnull FlutterMethodCall*)call } } -- (RTCCryptorAlgorithm)getAlgorithm:(NSNumber*)algorithm { +- (RTCCryptorAlgorithm) getAlgorithm:(NSNumber*)algorithm { switch ([algorithm intValue]) { case 0: - return RTCCryptorAlgorithmAesGcm; - case 1: - return RTCCryptorAlgorithmAesCbc; + return RTCCryptorAlgorithmAesGcm; + default: - return RTCCryptorAlgorithmAesGcm; + return RTCCryptorAlgorithmAesGcm; } } diff --git a/common/darwin/Classes/FlutterRTCMediaStream.m b/common/darwin/Classes/FlutterRTCMediaStream.m index 485fa739fe..41ac44defc 100644 --- a/common/darwin/Classes/FlutterRTCMediaStream.m +++ b/common/darwin/Classes/FlutterRTCMediaStream.m @@ -1,8 +1,56 @@ #import #import "AudioUtils.h" +#import "CameraUtils.h" #import "FlutterRTCFrameCapturer.h" #import "FlutterRTCMediaStream.h" #import "FlutterRTCPeerConnection.h" +#import +#import "CustomCapturerDelegate.h" +#import "WebRTCService.h" +#import "VideoProcessor.h" +#import "LocalVideoTrack.h" +#import "LocalAudioTrack.h" +#import "VideoProcessingAdapter.h" + +@implementation CustomCapturerDelegate + +- (instancetype)initWithVideoSource:(RTCVideoSource *)videoSource { + self = [super init]; + if (self) { + _videoSource = videoSource; + } + return self; +} + +- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame { + WebRTCService *webrtcService = [WebRTCService sharedInstance]; + @try { + if ([webrtcService getVideoProcessor]) { + // Process the frame using your Processor instance + RTCVideoFrame *processedFrame = [[webrtcService getVideoProcessor] onFrameReceived:frame]; + // Pass the processed frame to the video source, or the original frame if processedFrame is nil + if (self.videoSource) { + if (processedFrame) { + [self.videoSource capturer:capturer didCaptureVideoFrame:processedFrame]; + } else { + [self.videoSource capturer:capturer didCaptureVideoFrame:frame]; + } + } + } else { + if (self.videoSource) { + [self.videoSource capturer:capturer didCaptureVideoFrame:frame]; + } + } + } + @catch (NSException *exception) { + if (self.videoSource) { + [self.videoSource capturer:capturer didCaptureVideoFrame:frame]; + } + } +} + + +@end @implementation RTCMediaStreamTrack (Flutter) @@ -37,15 +85,21 @@ @implementation FlutterWebRTCPlugin (RTCMediaStream) * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} */ typedef void (^NavigatorUserMediaErrorCallback)(NSString* errorType, NSString* errorMessage); - /** * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} */ typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream* mediaStream); - - (NSDictionary*)defaultVideoConstraints { return @{@"minWidth" : @"1280", @"minHeight" : @"720", @"minFrameRate" : @"30"}; } +- (NSDictionary*)defaultAudioConstraints { + return @{}; +} + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} + */ +typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream* mediaStream); - (RTCMediaConstraints*)defaultMediaStreamConstraints { RTCMediaConstraints* constraints = @@ -54,6 +108,40 @@ - (RTCMediaConstraints*)defaultMediaStreamConstraints { return constraints; } +- (NSArray *) captureDevices { + if (@available(iOS 13.0, macOS 10.15, macCatalyst 14.0, tvOS 17.0, *)) { + NSArray *deviceTypes = @[ +#if TARGET_OS_IPHONE + AVCaptureDeviceTypeBuiltInTripleCamera, + AVCaptureDeviceTypeBuiltInDualCamera, + AVCaptureDeviceTypeBuiltInDualWideCamera, + AVCaptureDeviceTypeBuiltInWideAngleCamera, + AVCaptureDeviceTypeBuiltInTelephotoCamera, + AVCaptureDeviceTypeBuiltInUltraWideCamera, +#else + AVCaptureDeviceTypeBuiltInWideAngleCamera, +#endif + ]; + +#if !defined(TARGET_OS_IPHONE) + if (@available(macOS 13.0, *)) { + deviceTypes = [deviceTypes arrayByAddingObject:AVCaptureDeviceTypeDeskViewCamera]; + } +#endif + + if (@available(iOS 17.0, macOS 14.0, tvOS 17.0, *)) { + deviceTypes = [deviceTypes arrayByAddingObjectsFromArray: @[ + AVCaptureDeviceTypeContinuityCamera, + AVCaptureDeviceTypeExternal, + ]]; + } + + return [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified].devices; + } + return @[]; +} /** * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, * adds it to a specific {@link RTCMediaStream}, and reports success to a @@ -77,7 +165,7 @@ - (void)getUserAudio:(NSDictionary*)constraints mediaStream:(RTCMediaStream*)mediaStream { id audioConstraints = constraints[@"audio"]; NSString* audioDeviceId = @""; - +RTCMediaConstraints *rtcConstraints; if ([audioConstraints isKindOfClass:[NSDictionary class]]) { // constraints.audio.deviceId NSString* deviceId = audioConstraints[@"deviceId"]; @@ -85,7 +173,7 @@ - (void)getUserAudio:(NSDictionary*)constraints if (deviceId) { audioDeviceId = deviceId; } - +rtcConstraints = [self parseMediaConstraints:audioConstraints]; // constraints.audio.optional.sourceId id optionalVideoConstraints = audioConstraints[@"optional"]; if (optionalVideoConstraints && [optionalVideoConstraints isKindOfClass:[NSArray class]] && @@ -100,7 +188,10 @@ - (void)getUserAudio:(NSDictionary*)constraints } } } - } + } else { + rtcConstraints = [self parseMediaConstraints:[self defaultAudioConstraints]]; + } + #if !defined(TARGET_OS_IPHONE) if (audioDeviceId != nil) { @@ -108,8 +199,12 @@ - (void)getUserAudio:(NSDictionary*)constraints } #endif + NSString* trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; + RTCAudioSource *audioSource = [self.peerConnectionFactory audioSourceWithConstraints:rtcConstraints]; + RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithSource:audioSource trackId:trackId]; + LocalAudioTrack *localAudioTrack = [[LocalAudioTrack alloc] initWithTrack:audioTrack]; + //LocalAudioTrack *localAudioTrack = [self.localTracks objectForKey:trackId]; audioTrack.settings = @{ @"deviceId" : audioDeviceId, @@ -123,7 +218,7 @@ - (void)getUserAudio:(NSDictionary*)constraints [mediaStream addAudioTrack:audioTrack]; - [self.localTracks setObject:audioTrack forKey:trackId]; +[self.localTracks setObject: audioTrack forKey:trackId]; [self ensureAudioSession]; @@ -309,14 +404,18 @@ - (void)getUserVideo:(NSDictionary*)constraints AVCaptureDevice* videoDevice; NSString* videoDeviceId = nil; NSString* facingMode = nil; - + NSArray* captureDevices = [self captureDevices]; if ([videoConstraints isKindOfClass:[NSDictionary class]]) { // constraints.video.deviceId NSString* deviceId = videoConstraints[@"deviceId"]; if (deviceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:deviceId]; - videoDeviceId = deviceId; + for (AVCaptureDevice *device in captureDevices) { + if( [deviceId isEqualToString:device.uniqueID]) { + videoDevice = device; + videoDeviceId = deviceId; + } + } } // constraints.video.optional @@ -328,7 +427,13 @@ - (void)getUserVideo:(NSDictionary*)constraints if ([item isKindOfClass:[NSDictionary class]]) { NSString* sourceId = ((NSDictionary*)item)[@"sourceId"]; if (sourceId) { - videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId]; + for (AVCaptureDevice *device in captureDevices) { + if( [sourceId isEqualToString:device.uniqueID]) { + videoDevice = device; + videoDeviceId = sourceId; + } + } + if (videoDevice) { videoDeviceId = sourceId; break; @@ -361,10 +466,11 @@ - (void)getUserVideo:(NSDictionary*)constraints } } - if ([videoConstraints isKindOfClass:[NSNumber class]]) { + if ([videoConstraints isKindOfClass:[NSNumber class]]) { videoConstraints = @{@"mandatory": [self defaultVideoConstraints]}; } + NSInteger targetWidth = 0; NSInteger targetHeight = 0; NSInteger targetFps = 0; @@ -421,21 +527,31 @@ - (void)getUserVideo:(NSDictionary*)constraints if (videoDevice) { RTCVideoSource* videoSource = [self.peerConnectionFactory videoSource]; + #if TARGET_OS_OSX if (self.videoCapturer) { [self.videoCapturer stopCapture]; } - self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource]; +#endif + CustomCapturerDelegate *customDelegate = [[CustomCapturerDelegate alloc] initWithVideoSource:videoSource]; + self.customDelegate = [[CustomCapturerDelegate alloc] initWithVideoSource:videoSource]; + self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:customDelegate]; + self.videoCapturer.delegate = self.customDelegate; + + VideoProcessingAdapter *videoProcessingAdapter = [[VideoProcessingAdapter alloc] initWithRTCVideoSource:videoSource]; + // self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoProcessingAdapter]; AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice targetWidth:targetWidth targetHeight:targetHeight]; - + CMVideoDimensions selectedDimension = CMVideoFormatDescriptionGetDimensions(selectedFormat.formatDescription); + NSInteger selectedWidth = (NSInteger) selectedDimension.width; + NSInteger selectedHeight = (NSInteger) selectedDimension.height; NSInteger selectedFps = [self selectFpsForFormat:selectedFormat targetFps:targetFps]; self._lastTargetFps = selectedFps; self._lastTargetWidth = targetWidth; self._lastTargetHeight = targetHeight; - NSLog(@"target format %ldx%ld, targetFps: %ld, seledted fps %ld", targetWidth, targetHeight, targetFps, selectedFps); + NSLog(@"target format %ldx%ld, targetFps: %ld, selected format: %ldx%ld, selected fps %ld", targetWidth, targetHeight, targetFps, selectedWidth, selectedHeight, selectedFps); if ([videoDevice lockForConfiguration:NULL]) { @try { @@ -459,7 +575,8 @@ - (void)getUserVideo:(NSDictionary*)constraints NSString* trackUUID = [[NSUUID UUID] UUIDString]; RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID]; - + LocalVideoTrack *localVideoTrack = [[LocalVideoTrack alloc] initWithTrack:videoTrack videoProcessing:videoProcessingAdapter]; + __weak RTCCameraVideoCapturer* capturer = self.videoCapturer; self.videoCapturerStopHandlers[videoTrack.trackId] = ^(CompletionHandler handler) { NSLog(@"Stop video capturer, trackID %@", videoTrack.trackId); @@ -489,7 +606,7 @@ - (void)getUserVideo:(NSDictionary*)constraints [mediaStream addVideoTrack:videoTrack]; - [self.localTracks setObject:videoTrack forKey:trackUUID]; + [self.localTracks setObject:localVideoTrack forKey:trackUUID]; successCallback(mediaStream); } else { @@ -539,8 +656,7 @@ - (void)requestAccessForMediaType:(NSString*)mediaType // because audio capture is done using AVAudioSession which does not use // AVCaptureDevice there. Anyway, Simulator will not (visually) request access // for audio. - if (mediaType == AVMediaTypeVideo && - [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) { + if (mediaType == AVMediaTypeVideo && [self captureDevices].count == 0) { // Since successCallback and errorCallback are asynchronously invoked // elsewhere, make sure that the invocation here is consistent. dispatch_async(dispatch_get_main_queue(), ^{ @@ -618,7 +734,7 @@ - (void)createLocalMediaStream:(FlutterResult)result { - (void)getSources:(FlutterResult)result { NSMutableArray* sources = [NSMutableArray array]; - NSArray* videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + NSArray* videoDevices = [self captureDevices]; for (AVCaptureDevice* device in videoDevices) { [sources addObject:@{ @"facing" : device.positionString, @@ -628,22 +744,22 @@ - (void)getSources:(FlutterResult)result { }]; } #if TARGET_OS_IPHONE - RTCAudioSession* session = [RTCAudioSession sharedInstance]; for (AVAudioSessionPortDescription* port in session.session.availableInputs) { // NSLog(@"input portName: %@, type %@", port.portName,port.portType); [sources addObject:@{ + @"facing" : @"", @"deviceId" : port.UID, @"label" : port.portName, @"groupId" : port.portType, @"kind" : @"audioinput", }]; } - for (AVAudioSessionPortDescription* port in session.currentRoute.outputs) { // NSLog(@"output portName: %@, type %@", port.portName,port.portType); if (session.currentRoute.outputs.count == 1 && ![port.UID isEqualToString:@"Speaker"]) { [sources addObject:@{ + @"facing" : @"", @"deviceId" : @"Speaker", @"label" : @"Speaker", @"groupId" : @"Speaker", @@ -651,6 +767,7 @@ - (void)getSources:(FlutterResult)result { }]; } [sources addObject:@{ + @"facing" : @"", @"deviceId" : port.UID, @"label" : port.portName, @"groupId" : port.portType, @@ -664,6 +781,7 @@ - (void)getSources:(FlutterResult)result { NSArray* inputDevices = [audioDeviceModule inputDevices]; for (RTCIODevice* device in inputDevices) { [sources addObject:@{ + @"facing" : @"", @"deviceId" : device.deviceId, @"label" : device.name, @"kind" : @"audioinput", @@ -673,6 +791,7 @@ - (void)getSources:(FlutterResult)result { NSArray* outputDevices = [audioDeviceModule outputDevices]; for (RTCIODevice* device in outputDevices) { [sources addObject:@{ + @"facing" : @"", @"deviceId" : device.deviceId, @"label" : device.name, @"kind" : @"audiooutput", @@ -905,7 +1024,7 @@ - (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { return device; } } - if(captureDevices.count > 0) { + if(captureDevices.count > 0) { return captureDevices[0]; } return nil; @@ -921,6 +1040,13 @@ - (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device for (AVCaptureDeviceFormat* format in formats) { CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + #if TARGET_OS_IPHONE + if (@available(iOS 13.0, *)) { + if(format.isMultiCamSupported != AVCaptureMultiCamSession.multiCamSupported) { + continue; + } + } +#endif //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); if (diff < currentDiff) { diff --git a/common/darwin/Classes/FlutterRTCPeerConnection.m b/common/darwin/Classes/FlutterRTCPeerConnection.m index d37eafde10..f5bbc9ad39 100644 --- a/common/darwin/Classes/FlutterRTCPeerConnection.m +++ b/common/darwin/Classes/FlutterRTCPeerConnection.m @@ -807,7 +807,6 @@ - (void)peerConnectionGetRtpSenderCapabilities:(nonnull NSDictionary*)argsMap } return nil; } - - (void)transceiverSetCodecPreferences:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result { NSString* peerConnectionId = argsMap[@"peerConnectionId"]; @@ -827,7 +826,7 @@ - (void)transceiverSetCodecPreferences:(nonnull NSDictionary*)argsMap details:nil]); return; } - id codecs = argsMap[@"codecs"]; +id codecs = argsMap[@"codecs"]; NSMutableArray* codecCaps = [NSMutableArray array]; for (id c in codecs) { NSArray* kindAndName = [c[@"mimeType"] componentsSeparatedByString:@"/"]; diff --git a/common/darwin/Classes/FlutterRTCVideoRenderer.m b/common/darwin/Classes/FlutterRTCVideoRenderer.m index 8d9ff54de5..fea4f766f4 100644 --- a/common/darwin/Classes/FlutterRTCVideoRenderer.m +++ b/common/darwin/Classes/FlutterRTCVideoRenderer.m @@ -9,6 +9,7 @@ #import #import "FlutterWebRTCPlugin.h" +#import @implementation FlutterRTCVideoRenderer { CGSize _frameSize; @@ -17,6 +18,8 @@ @implementation FlutterRTCVideoRenderer { RTCVideoRotation _rotation; FlutterEventChannel* _eventChannel; bool _isFirstFrameRendered; + bool _frameAvailable; + os_unfair_lock _lock; } @synthesize textureId = _textureId; @@ -27,7 +30,9 @@ - (instancetype)initWithTextureRegistry:(id)registry messenger:(NSObject*)messenger { self = [super init]; if (self) { + _lock = OS_UNFAIR_LOCK_INIT; _isFirstFrameRendered = false; + _frameAvailable = false; _frameSize = CGSizeZero; _renderSize = CGSizeZero; _rotation = -1; @@ -45,22 +50,27 @@ - (instancetype)initWithTextureRegistry:(id)registry return self; } -- (void)dealloc { - if (_pixelBufferRef) { - CVBufferRelease(_pixelBufferRef); - } -} - - (CVPixelBufferRef)copyPixelBuffer { - if (_pixelBufferRef != nil) { - CVBufferRetain(_pixelBufferRef); - return _pixelBufferRef; + CVPixelBufferRef buffer = nil; + os_unfair_lock_lock(&_lock); + if (_pixelBufferRef != nil && _frameAvailable) { + buffer = CVBufferRetain(_pixelBufferRef); + _frameAvailable = false; } - return nil; + os_unfair_lock_unlock(&_lock); + return buffer; } - (void)dispose { + os_unfair_lock_lock(&_lock); [_registry unregisterTexture:_textureId]; + _textureId = -1; + if (_pixelBufferRef) { + CVBufferRelease(_pixelBufferRef); + _pixelBufferRef = nil; + } + _frameAvailable = false; + os_unfair_lock_unlock(&_lock); } - (void)setVideoTrack:(RTCVideoTrack*)videoTrack { @@ -180,14 +190,23 @@ - (void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer #pragma mark - RTCVideoRenderer methods - (void)renderFrame:(RTCVideoFrame*)frame { - [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; + + os_unfair_lock_lock(&_lock); + if(!_frameAvailable && _pixelBufferRef) { + [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; + if(_textureId != -1) { + [_registry textureFrameAvailable:_textureId]; + } + _frameAvailable = true; + } + os_unfair_lock_unlock(&_lock); __weak FlutterRTCVideoRenderer* weakSelf = self; if (_renderSize.width != frame.width || _renderSize.height != frame.height) { dispatch_async(dispatch_get_main_queue(), ^{ FlutterRTCVideoRenderer* strongSelf = weakSelf; if (strongSelf.eventSink) { - postEvent( strongSelf.eventSink, @{ + strongSelf.eventSink(@{ @"event" : @"didTextureChangeVideoSize", @"id" : @(strongSelf.textureId), @"width" : @(frame.width), @@ -202,7 +221,7 @@ - (void)renderFrame:(RTCVideoFrame*)frame { dispatch_async(dispatch_get_main_queue(), ^{ FlutterRTCVideoRenderer* strongSelf = weakSelf; if (strongSelf.eventSink) { - postEvent( strongSelf.eventSink,@{ + strongSelf.eventSink(@{ @"event" : @"didTextureChangeRotation", @"id" : @(strongSelf.textureId), @"rotation" : @(frame.rotation), @@ -216,10 +235,9 @@ - (void)renderFrame:(RTCVideoFrame*)frame { // Notify the Flutter new pixelBufferRef to be ready. dispatch_async(dispatch_get_main_queue(), ^{ FlutterRTCVideoRenderer* strongSelf = weakSelf; - [strongSelf.registry textureFrameAvailable:strongSelf.textureId]; if (!strongSelf->_isFirstFrameRendered) { if (strongSelf.eventSink) { - postEvent(strongSelf.eventSink, @{@"event" : @"didFirstFrameRendered"}); + strongSelf.eventSink(@{@"event" : @"didFirstFrameRendered"}); strongSelf->_isFirstFrameRendered = true; } } @@ -232,17 +250,18 @@ - (void)renderFrame:(RTCVideoFrame*)frame { * @param size The size of the video frame to render. */ - (void)setSize:(CGSize)size { - if (_pixelBufferRef == nil || - (size.width != _frameSize.width || size.height != _frameSize.height)) { + os_unfair_lock_lock(&_lock); + if (size.width != _frameSize.width || size.height != _frameSize.height) { if (_pixelBufferRef) { CVBufferRelease(_pixelBufferRef); } NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32BGRA, (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); - + _frameAvailable = false; _frameSize = size; } + os_unfair_lock_unlock(&_lock); } #pragma mark - FlutterStreamHandler methods diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h index c64c34801b..de87b53636 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.h +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -6,10 +6,11 @@ #import #import - +#import "LocalTrack.h" +@class CustomCapturerDelegate; @class FlutterRTCVideoRenderer; @class FlutterRTCFrameCapturer; - +@class AudioManager; void postEvent(FlutterEventSink _Nonnull sink, id _Nullable event); typedef void (^CompletionHandler)(void); @@ -29,7 +30,7 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); @property(nonatomic, strong) RTCPeerConnectionFactory* _Nullable peerConnectionFactory; @property(nonatomic, strong) NSMutableDictionary* _Nullable peerConnections; @property(nonatomic, strong) NSMutableDictionary* _Nullable localStreams; -@property(nonatomic, strong) NSMutableDictionary* _Nullable localTracks; +@property(nonatomic, strong) NSMutableDictionary>* _Nullable localTracks; @property(nonatomic, strong) NSMutableDictionary* _Nullable renders; @property(nonatomic, strong) NSMutableDictionary* _Nullable videoCapturerStopHandlers; @@ -38,27 +39,35 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); @property(nonatomic, strong) NSMutableDictionary* _Nullable keyProviders; #if TARGET_OS_IPHONE -@property(nonatomic, retain) UIViewController* _Nullable viewController; /*for broadcast or ReplayKit */ +@property(nonatomic, retain) UIViewController* viewController; /*for broadcast or ReplayKit */ #endif @property(nonatomic, strong) FlutterEventSink _Nullable eventSink; @property(nonatomic, strong) NSObject* _Nonnull messenger; @property(nonatomic, strong) RTCCameraVideoCapturer* _Nullable videoCapturer; +@property(nonatomic, strong) CustomCapturerDelegate* _Nullable customDelegate; @property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer; @property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput; - +@property(nonatomic, strong) NSString * _Nonnull focusMode; +@property(nonatomic, strong) NSString * _Nonnull exposureMode; @property(nonatomic) BOOL _usingFrontCamera; @property(nonatomic) NSInteger _lastTargetWidth; @property(nonatomic) NSInteger _lastTargetHeight; @property(nonatomic) NSInteger _lastTargetFps; -- (RTCMediaStream* _Nullable)streamForId:(NSString* _Nonnull)streamId peerConnectionId:(NSString* _Nonnull)peerConnectionId; -- (RTCRtpTransceiver* _Nullable)getRtpTransceiverById:(RTCPeerConnection* _Nonnull)peerConnection Id:(NSString* _Nonnull)Id; -- (NSDictionary* _Nullable)mediaStreamToMap:(RTCMediaStream* _Nonnull)stream ownerTag:(NSString* _Nonnull)ownerTag; +@property(nonatomic, strong) AudioManager* _Nullable audioManager; + +- (RTCMediaStream* _Nullable)streamForId:(NSString* _Nonnull)streamId peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (RTCMediaStreamTrack* _Nullable)trackForId:(NSString* _Nonnull)trackId peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (RTCRtpTransceiver* _Nullable)getRtpTransceiverById:(RTCPeerConnection* _Nonnull)peerConnection Id:(NSString* _Nullable)Id; +- (NSDictionary* _Nullable)mediaStreamToMap:(RTCMediaStream* _Nonnull)stream ownerTag:(NSString* _Nullable)ownerTag; + - (NSDictionary* _Nullable)mediaTrackToMap:(RTCMediaStreamTrack* _Nonnull)track; - (NSDictionary* _Nullable)receiverToMap:(RTCRtpReceiver* _Nonnull)receiver; - (NSDictionary* _Nullable)transceiverToMap:(RTCRtpTransceiver* _Nonnull)transceiver; +- (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId; + - (BOOL)hasLocalAudioTrack; - (void)ensureAudioSession; - (void)deactiveRtcAudioSession; @@ -66,4 +75,6 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); - (RTCRtpReceiver* _Nullable)getRtpReceiverById:(RTCPeerConnection* _Nonnull)peerConnection Id:(NSString* _Nonnull)Id; - (RTCRtpSender* _Nullable)getRtpSenderById:(RTCPeerConnection* _Nonnull)peerConnection Id:(NSString* _Nonnull)Id; ++ (FlutterWebRTCPlugin * _Nullable)sharedSingleton; + @end diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m index c3bdb2a589..7b43678306 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.m +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -1,5 +1,6 @@ #import "FlutterWebRTCPlugin.h" #import "AudioUtils.h" +#import "CameraUtils.h" #import "FlutterRTCDataChannel.h" #import "FlutterRTCDesktopCapturer.h" #import "FlutterRTCMediaStream.h" @@ -10,10 +11,13 @@ #import "FlutterRTCVideoPlatformViewFactory.h" #import "FlutterRTCVideoPlatformViewController.h" #endif +#import "AudioManager.h" #import #import #import - +#import "LocalTrack.h" +#import "LocalAudioTrack.h" +#import "LocalVideoTrack.h" #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wprotocol" @@ -93,17 +97,23 @@ @implementation FlutterWebRTCPlugin { id _messenger; id _textures; BOOL _speakerOn; - BOOL _speakerOnButPreferBluetooth; AVAudioSessionPort _preferredInput; -#if TARGET_OS_IPHONE - FLutterRTCVideoPlatformViewFactory *_platformViewFactory; -#endif + AudioManager* _audioManager; } +static FlutterWebRTCPlugin *sharedSingleton; + ++ (FlutterWebRTCPlugin *)sharedSingleton +{ + @synchronized(self) + { + return sharedSingleton; + } +} @synthesize messenger = _messenger; @synthesize eventSink = _eventSink; @synthesize preferredInput = _preferredInput; - +@synthesize audioManager = _audioManager; + (void)registerWithRegistrar:(NSObject*)registrar { FlutterMethodChannel* channel = [FlutterMethodChannel methodChannelWithName:@"FlutterWebRTC.Method" @@ -131,6 +141,7 @@ - (instancetype)initWithChannel:(FlutterMethodChannel*)channel withTextures:(NSObject*)textures { self = [super init]; + sharedSingleton = self; FlutterEventChannel* eventChannel = [FlutterEventChannel eventChannelWithName:@"FlutterWebRTC.Event" binaryMessenger:messenger]; @@ -142,13 +153,11 @@ - (instancetype)initWithChannel:(FlutterMethodChannel*)channel _textures = textures; _messenger = messenger; _speakerOn = NO; - _speakerOnButPreferBluetooth = NO; _eventChannel = eventChannel; + _audioManager = AudioManager.sharedInstance; #if TARGET_OS_IPHONE _preferredInput = AVAudioSessionPortHeadphones; self.viewController = viewController; - _platformViewFactory = [[FLutterRTCVideoPlatformViewFactory alloc] initWithMessenger:messenger]; - [registrar registerViewFactory:_platformViewFactory withId:FLutterRTCVideoPlatformViewFactoryID]; #endif } @@ -163,6 +172,8 @@ - (instancetype)initWithChannel:(FlutterMethodChannel*)channel self.keyProviders = [NSMutableDictionary new]; self.videoCapturerStopHandlers = [NSMutableDictionary new]; #if TARGET_OS_IPHONE + self.focusMode = @"locked"; + self.exposureMode = @"locked"; AVAudioSession* session = [AVAudioSession sharedInstance]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) @@ -210,7 +221,6 @@ - (void)didSessionRouteChange:(NSNotification*)notification { NSDictionary* interuptionDict = notification.userInfo; NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; - RTCAudioSession* session = [RTCAudioSession sharedInstance]; if (self.eventSink && (routeChangeReason == AVAudioSessionRouteChangeReasonNewDeviceAvailable || routeChangeReason == AVAudioSessionRouteChangeReasonOldDeviceUnavailable || @@ -221,7 +231,9 @@ - (void)didSessionRouteChange:(NSNotification*)notification { #endif } -- (void)initialize:(NSArray*)networkIgnoreMask { + +- (void)initialize:(NSArray*)networkIgnoreMask +bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { // RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); if (!_peerConnectionFactory) { VideoDecoderFactory* decoderFactory = [[VideoDecoderFactory alloc] init]; @@ -229,9 +241,11 @@ - (void)initialize:(NSArray*)networkIgnoreMask { VideoEncoderFactorySimulcast* simulcastFactory = [[VideoEncoderFactorySimulcast alloc] initWithPrimary:encoderFactory fallback:encoderFactory]; - - _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:simulcastFactory - decoderFactory:decoderFactory]; + _peerConnectionFactory = + [[RTCPeerConnectionFactory alloc] initWithBypassVoiceProcessing:bypassVoiceProcessing + encoderFactory:simulcastFactory + decoderFactory:decoderFactory + audioProcessingModule:_audioManager.audioProcessingModule]; RTCPeerConnectionFactoryOptions *options = [[RTCPeerConnectionFactoryOptions alloc] init]; for (NSString* adapter in networkIgnoreMask) @@ -263,11 +277,16 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { if ([@"initialize" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; NSDictionary* options = argsMap[@"options"]; + BOOL enableBypassVoiceProcessing = NO; + if(options[@"bypassVoiceProcessing"] != nil){ + enableBypassVoiceProcessing = ((NSNumber*)options[@"bypassVoiceProcessing"]).boolValue; + } NSArray* networkIgnoreMask = [NSArray new]; + if (options[@"networkIgnoreMask"] != nil) { networkIgnoreMask = ((NSArray*)options[@"networkIgnoreMask"]); } - [self initialize:networkIgnoreMask]; + [self initialize:networkIgnoreMask bypassVoiceProcessing:enableBypassVoiceProcessing]; result(@""); } else if ([@"createPeerConnection" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; @@ -554,11 +573,16 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { for (RTCVideoTrack* track in stream.videoTracks) { [_localTracks removeObjectForKey:track.trackId]; RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + FlutterRTCVideoRenderer *renderer = [self findRendererByTrackId:videoTrack.trackId]; + if(renderer != nil) { + renderer.videoTrack = nil; + } CapturerStopHandler stopHandler = self.videoCapturerStopHandlers[videoTrack.trackId]; if (stopHandler) { shouldCallResult = NO; stopHandler(^{ NSLog(@"video capturer stopped, trackID = %@", videoTrack.trackId); + self.videoCapturer = nil; result(nil); }); [self.videoCapturerStopHandlers removeObjectForKey:videoTrack.trackId]; @@ -592,13 +616,13 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { RTCMediaStream* stream = self.localStreams[streamId]; if (stream) { - RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + id track = self.localTracks[trackId]; if (track != nil) { - if ([track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack* audioTrack = (RTCAudioTrack*)track; + if ([track isKindOfClass:[LocalAudioTrack class]]) { + RTCAudioTrack* audioTrack = ((LocalAudioTrack*)track).audioTrack; [stream addAudioTrack:audioTrack]; - } else if ([track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + } else if ([track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; [stream addVideoTrack:videoTrack]; } } else { @@ -666,6 +690,10 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { [_localTracks removeObjectForKey:trackId]; if (audioTrack) { [self ensureAudioSession]; + } + FlutterRTCVideoRenderer *renderer = [self findRendererByTrackId:trackId]; + if(renderer != nil) { + renderer.videoTrack = nil; } result(nil); } else if ([@"restartIce" isEqualToString:call.method]) { @@ -714,9 +742,11 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { NSDictionary* argsMap = call.arguments; NSNumber* textureId = argsMap[@"textureId"]; FlutterRTCVideoRenderer* render = self.renders[textureId]; - render.videoTrack = nil; - [render dispose]; - [self.renders removeObjectForKey:textureId]; + if(render != nil) { + render.videoTrack = nil; + [render dispose]; + [self.renders removeObjectForKey:textureId]; + } result(nil); } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; @@ -753,65 +783,12 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { } [self rendererSetSrcObject:render stream:videoTrack]; result(nil); - } -#if TARGET_OS_IPHONE - else if ([@"videoPlatformViewRendererSetSrcObject" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSNumber* viewId = argsMap[@"viewId"]; - FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; - NSString* streamId = argsMap[@"streamId"]; - NSString* ownerTag = argsMap[@"ownerTag"]; - NSString* trackId = argsMap[@"trackId"]; - if (!render) { - result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" - message:nil - details:nil]); - return; - } - RTCMediaStream* stream = nil; - RTCVideoTrack* videoTrack = nil; - if ([ownerTag isEqualToString:@"local"]) { - stream = _localStreams[streamId]; - } - if (!stream) { - stream = [self streamForId:streamId peerConnectionId:ownerTag]; - } - if (stream) { - NSArray* videoTracks = stream ? stream.videoTracks : nil; - videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; - for (RTCVideoTrack* track in videoTracks) { - if ([track.trackId isEqualToString:trackId]) { - videoTrack = track; - } - } - if (!videoTrack) { - NSLog(@"Not found video track for RTCMediaStream: %@", streamId); - } - } - render.videoTrack = videoTrack; - result(nil); - } else if([@"videoPlatformViewRendererSetObjectFit" isEqualToString:call.method]){ - NSDictionary* argsMap = call.arguments; - NSNumber* viewId = argsMap[@"viewId"]; - NSNumber* fit = argsMap[@"objectFit"]; - FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; - [render setObjectFit:fit]; - result(nil); - } else if ([@"videoPlatformViewRendererDispose" isEqualToString:call.method]) { - NSDictionary* argsMap = call.arguments; - NSNumber* viewId = argsMap[@"viewId"]; - FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; - render.videoTrack = nil; - [_platformViewFactory.renders removeObjectForKey:viewId]; - result(nil); - } -#endif - else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { + } else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack* track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; [self mediaStreamTrackHasTorch:videoTrack result:result]; } else { if (track == nil) { @@ -827,9 +804,9 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { NSDictionary* argsMap = call.arguments; NSString* trackId = argsMap[@"trackId"]; BOOL torch = [argsMap[@"torch"] boolValue]; - RTCMediaStreamTrack* track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; } else { if (track == nil) { @@ -845,9 +822,9 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { NSDictionary* argsMap = call.arguments; NSString* trackId = argsMap[@"trackId"]; double zoomLevel = [argsMap[@"zoomLevel"] doubleValue]; - RTCMediaStreamTrack* track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; [self mediaStreamTrackSetZoom:videoTrack zoomLevel:zoomLevel result:result]; } else { if (track == nil) { @@ -859,12 +836,84 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { details:nil]); } } + } else if ([@"mediaStreamTrackSetFocusMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* focusMode = argsMap[@"focusMode"]; + id track = self.localTracks[trackId]; + if (track != nil && focusMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusMode:videoTrack focusMode:focusMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetFocusPoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* focusPoint = argsMap[@"focusPoint"]; + id track = self.localTracks[trackId]; + if (track != nil && focusPoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusPoint:videoTrack focusPoint:focusPoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposureMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* exposureMode = argsMap[@"exposureMode"]; + id track = self.localTracks[trackId]; + if (track != nil && exposureMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposureMode:videoTrack exposureMode:exposureMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposurePoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* exposurePoint = argsMap[@"exposurePoint"]; + id track = self.localTracks[trackId]; + if (track != nil && exposurePoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposurePoint:videoTrack exposurePoint:exposurePoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; NSString* trackId = argsMap[@"trackId"]; - RTCMediaStreamTrack* track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { - RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; [self mediaStreamTrackSwitchCamera:videoTrack result:result]; } else { if (track == nil) { @@ -893,9 +942,9 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { NSDictionary* argsMap = call.arguments; NSString* trackId = argsMap[@"trackId"]; NSNumber* mute = argsMap[@"mute"]; - RTCMediaStreamTrack* track = self.localTracks[trackId]; - if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { - RTCAudioTrack* audioTrack = (RTCAudioTrack*)track; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalAudioTrack class]]) { + RTCAudioTrack* audioTrack = ((LocalAudioTrack*)track).audioTrack; audioTrack.isEnabled = !mute.boolValue; } result(nil); @@ -905,9 +954,7 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { NSDictionary* argsMap = call.arguments; NSNumber* enable = argsMap[@"enable"]; _speakerOn = enable.boolValue; - _speakerOnButPreferBluetooth = NO; [AudioUtils setSpeakerphoneOn:_speakerOn]; - postEvent(self.eventSink, @{@"event" : @"onDeviceChange"}); result(nil); } else if ([@"ensureAudioSession" isEqualToString:call.method]) { @@ -915,8 +962,6 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { result(nil); } else if ([@"enableSpeakerphoneButPreferBluetooth" isEqualToString:call.method]) { - _speakerOn = YES; - _speakerOnButPreferBluetooth = YES; [AudioUtils setSpeakerphoneOnButPreferBluetooth]; result(nil); } @@ -1411,8 +1456,8 @@ - (void)dealloc { - (BOOL)hasLocalAudioTrack { for (id key in _localTracks.allKeys) { - RTCMediaStreamTrack* track = [_localTracks objectForKey:key]; - if ([track.kind isEqualToString:@"audio"]) { + id track = [_localTracks objectForKey:key]; + if (track != nil && [track isKindOfClass:[LocalAudioTrack class]]) { return YES; } } @@ -1422,6 +1467,7 @@ - (BOOL)hasLocalAudioTrack { - (void)ensureAudioSession { #if TARGET_OS_IPHONE [AudioUtils ensureAudioSessionWithRecording:[self hasLocalAudioTrack]]; + [AudioUtils setSpeakerphoneOn:_speakerOn]; #endif } @@ -1441,7 +1487,7 @@ - (void)mediaStreamGetTracks:(NSString*)streamId result:(FlutterResult)result { for (RTCMediaStreamTrack* track in stream.audioTracks) { NSString* trackId = track.trackId; - [self.localTracks setObject:track forKey:trackId]; + [self.localTracks setObject:[[LocalAudioTrack alloc] initWithTrack:(RTCAudioTrack *)track] forKey:trackId]; [audioTracks addObject:@{ @"enabled" : @(track.isEnabled), @"id" : trackId, @@ -1454,7 +1500,8 @@ - (void)mediaStreamGetTracks:(NSString*)streamId result:(FlutterResult)result { for (RTCMediaStreamTrack* track in stream.videoTracks) { NSString* trackId = track.trackId; - [_localTracks setObject:track forKey:trackId]; + [_localTracks setObject:[[LocalVideoTrack alloc] initWithTrack:(RTCVideoTrack *)track] + forKey:trackId]; [videoTracks addObject:@{ @"enabled" : @(track.isEnabled), @"id" : trackId, @@ -1490,25 +1537,72 @@ - (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString*)p return stream; } +- (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId { + RTCMediaStreamTrack *mediaStreamTrack = nil; + + for (NSString *currentId in _peerConnections.allKeys) { + RTCPeerConnection *peerConnection = _peerConnections[currentId]; + RTCConfiguration *config = peerConnection.configuration; + RTCSdpSemantics sdpSemantics = config.sdpSemantics; + BOOL isUnifiedPlan = (sdpSemantics == RTCSdpSemanticsUnifiedPlan); + + if (isUnifiedPlan) { + for (RTCRtpReceiver *receiver in peerConnection.receivers) { + RTCMediaStreamTrack *track = receiver.track; + if (track && [track.trackId isEqualToString:trackId]) { + mediaStreamTrack = track; + break; + } + } + } else { + for (id streamObj in peerConnection.remoteStreams) { + if (![streamObj isKindOfClass:[RTCMediaStream class]]) { + continue; + } + RTCMediaStream *stream = (RTCMediaStream *)streamObj; + for (RTCVideoTrack *videoTrack in stream.videoTracks) { + if ([videoTrack.trackId isEqualToString:trackId]) { + mediaStreamTrack = videoTrack; + break; + } + } + if (mediaStreamTrack) break; + } + if (!mediaStreamTrack && [peerConnection respondsToSelector:@selector(remoteTracks)]) { + NSDictionary *remoteTracks = [peerConnection performSelector:@selector(remoteTracks)]; + mediaStreamTrack = remoteTracks[trackId]; + } + } + if (mediaStreamTrack) { + break; + } + } + return mediaStreamTrack; +} + + + + - (RTCMediaStreamTrack*)trackForId:(NSString*)trackId peerConnectionId:(NSString*)peerConnectionId { - RTCMediaStreamTrack* track = _localTracks[trackId]; + id track = _localTracks[trackId]; + RTCMediaStreamTrack *mediaStreamTrack = nil; if (!track) { for (NSString* currentId in _peerConnections.allKeys) { if (peerConnectionId && [currentId isEqualToString:peerConnectionId] == false) { continue; } RTCPeerConnection* peerConnection = _peerConnections[currentId]; - track = peerConnection.remoteTracks[trackId]; - if (!track) { + mediaStreamTrack = peerConnection.remoteTracks[trackId]; + if (!mediaStreamTrack) { for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { if (transceiver.receiver.track != nil && [transceiver.receiver.track.trackId isEqual:trackId]) { - track = transceiver.receiver.track; + mediaStreamTrack = transceiver.receiver.track; break; } } } - if (track) { + if (mediaStreamTrack) { break; } } @@ -2160,4 +2254,13 @@ - (NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver { }; return params; } + +- (FlutterRTCVideoRenderer *)findRendererByTrackId:(NSString *)trackId { + for (FlutterRTCVideoRenderer *renderer in self.renders.allValues) { + if (renderer.videoTrack != nil && [renderer.videoTrack.trackId isEqualToString:trackId]) { + return renderer; + } + } + return nil; +} @end diff --git a/common/darwin/Classes/LocalAudioTrack.h b/common/darwin/Classes/LocalAudioTrack.h new file mode 100644 index 0000000000..7cd1861a06 --- /dev/null +++ b/common/darwin/Classes/LocalAudioTrack.h @@ -0,0 +1,19 @@ +#import +#import "AudioProcessingAdapter.h" +#import "LocalTrack.h" + +@interface LocalAudioTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCAudioTrack* _Nonnull)track; + +@property(nonatomic, strong) RTCAudioTrack* _Nonnull audioTrack; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/common/darwin/Classes/LocalAudioTrack.m b/common/darwin/Classes/LocalAudioTrack.m new file mode 100644 index 0000000000..a080d4f090 --- /dev/null +++ b/common/darwin/Classes/LocalAudioTrack.m @@ -0,0 +1,38 @@ +#import "LocalAudioTrack.h" +#import "AudioManager.h" + +@implementation LocalAudioTrack { + RTCAudioTrack* _track; +} + +@synthesize audioTrack = _track; + +- (instancetype)initWithTrack:(RTCAudioTrack*)track { + self = [super init]; + if (self) { + _track = track; + } + return self; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +- (void)addRenderer:(id)renderer { + [AudioManager.sharedInstance addLocalAudioRenderer:renderer]; +} + +- (void)removeRenderer:(id)renderer { + [AudioManager.sharedInstance removeLocalAudioRenderer:renderer]; +} + +- (void)addProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter addProcessing:processor]; +} + +- (void)removeProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter removeProcessing:processor]; +} + +@end diff --git a/common/darwin/Classes/LocalTrack.h b/common/darwin/Classes/LocalTrack.h new file mode 100644 index 0000000000..e224df4c89 --- /dev/null +++ b/common/darwin/Classes/LocalTrack.h @@ -0,0 +1,7 @@ +#import + +@protocol LocalTrack + +- (RTCMediaStreamTrack*)track; + +@end \ No newline at end of file diff --git a/common/darwin/Classes/LocalVideoTrack.h b/common/darwin/Classes/LocalVideoTrack.h new file mode 100644 index 0000000000..3d4654e336 --- /dev/null +++ b/common/darwin/Classes/LocalVideoTrack.h @@ -0,0 +1,24 @@ +#import +#import "LocalTrack.h" +#import "VideoProcessingAdapter.h" + +@interface LocalVideoTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track; + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track + videoProcessing:(VideoProcessingAdapter* _Nullable)processing; + +@property(nonatomic, strong) RTCVideoTrack* _Nonnull videoTrack; + +@property(nonatomic, strong) VideoProcessingAdapter* _Nonnull processing; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end \ No newline at end of file diff --git a/common/darwin/Classes/LocalVideoTrack.m b/common/darwin/Classes/LocalVideoTrack.m new file mode 100644 index 0000000000..d08c432f02 --- /dev/null +++ b/common/darwin/Classes/LocalVideoTrack.m @@ -0,0 +1,47 @@ +#import "LocalVideoTrack.h" + +@implementation LocalVideoTrack { + RTCVideoTrack* _track; + VideoProcessingAdapter* _processing; +} + +@synthesize videoTrack = _track; +@synthesize processing = _processing; + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + videoProcessing:(VideoProcessingAdapter*)processing { + self = [super init]; + if (self) { + _track = track; + _processing = processing; + } + return self; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track { + return [self initWithTrack:track videoProcessing:nil]; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +/** Register a renderer that will render all frames received on this track. */ +- (void)addRenderer:(id)renderer { + [_track addRenderer:renderer]; +} + +/** Deregister a renderer. */ +- (void)removeRenderer:(id)renderer { + [_track removeRenderer:renderer]; +} + +- (void)addProcessing:(id)processor { + [_processing addProcessing:processor]; +} + +- (void)removeProcessing:(id)processor { + [_processing removeProcessing:processor]; +} + +@end \ No newline at end of file diff --git a/common/darwin/Classes/VideoProcessingAdapter.h b/common/darwin/Classes/VideoProcessingAdapter.h new file mode 100644 index 0000000000..c953316eec --- /dev/null +++ b/common/darwin/Classes/VideoProcessingAdapter.h @@ -0,0 +1,18 @@ +#import +#import + +@protocol ExternalVideoProcessingDelegate +- (RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)onFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)frame; +@end + +@interface VideoProcessingAdapter : NSObject + +- (_Nonnull instancetype)initWithRTCVideoSource:(RTCVideoSource* _Nonnull)source; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +- (RTCVideoSource* _Nonnull) source; + +@end diff --git a/common/darwin/Classes/VideoProcessingAdapter.m b/common/darwin/Classes/VideoProcessingAdapter.m new file mode 100644 index 0000000000..f3e7966522 --- /dev/null +++ b/common/darwin/Classes/VideoProcessingAdapter.m @@ -0,0 +1,55 @@ +#import "VideoProcessingAdapter.h" +#import + +@implementation VideoProcessingAdapter { + RTCVideoSource* _videoSource; + CGSize _frameSize; + NSArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)initWithRTCVideoSource:(RTCVideoSource*)source { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _videoSource = source; + _processors = [NSArray> new]; + } + return self; +} + +- (RTCVideoSource* _Nonnull) source { + return _videoSource; +} + +- (void)addProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors arrayByAddingObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]]; + os_unfair_lock_unlock(&_lock); +} + +- (void)setSize:(CGSize)size { + _frameSize = size; +} + +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + frame = [processor onFrame:frame]; + } + [_videoSource capturer:capturer didCaptureVideoFrame:frame]; + os_unfair_lock_unlock(&_lock); +} + +@end \ No newline at end of file diff --git a/common/darwin/Classes/VideoProcessor.h b/common/darwin/Classes/VideoProcessor.h new file mode 100644 index 0000000000..02e1599d4f --- /dev/null +++ b/common/darwin/Classes/VideoProcessor.h @@ -0,0 +1,10 @@ +#import +#import + +// Define Processor class +@interface VideoProcessor : NSObject + +// Declare any properties and methods needed +- (RTCVideoFrame *)onFrameReceived:(RTCVideoFrame *)frame; + +@end \ No newline at end of file diff --git a/common/darwin/Classes/VideoProcessor.m b/common/darwin/Classes/VideoProcessor.m new file mode 100644 index 0000000000..48c33a8998 --- /dev/null +++ b/common/darwin/Classes/VideoProcessor.m @@ -0,0 +1,7 @@ +#import "VideoProcessor.h" + +@implementation VideoProcessor + +// Empty implementation + +@end diff --git a/common/darwin/Classes/WebRTCService.h b/common/darwin/Classes/WebRTCService.h new file mode 100644 index 0000000000..3b4c740c8e --- /dev/null +++ b/common/darwin/Classes/WebRTCService.h @@ -0,0 +1,18 @@ + +#import +#import "VideoProcessor.h" // Import Processor header file + +@interface WebRTCService : NSObject + +@property (nonatomic, strong) VideoProcessor *videoProcessor; + +// Singleton instance method ++ (instancetype)sharedInstance; + +// Method to set the Processor +- (void)setVideoProcessor:(VideoProcessor *)videoProcessor; + +// Method to get the current Processor +- (VideoProcessor *)getVideoProcessor; + +@end \ No newline at end of file diff --git a/common/darwin/Classes/WebRTCService.m b/common/darwin/Classes/WebRTCService.m new file mode 100644 index 0000000000..2f7e5d4f71 --- /dev/null +++ b/common/darwin/Classes/WebRTCService.m @@ -0,0 +1,36 @@ +#import "WebRTCService.h" + +@implementation WebRTCService + +// Static variable for the singleton instance +static WebRTCService *instance = nil; + +// Private initializer to prevent instantiation from outside +- (instancetype)initPrivate { + self = [super init]; + if (self) { + // Initialization logic if any + } + return self; +} + +// Singleton instance method ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + instance = [[self alloc] initPrivate]; + }); + return instance; +} + +// Method to set the Processor +- (void)setVideoProcessor:(VideoProcessor *)videoProcessor { + _videoProcessor = videoProcessor; +} + +// Method to get the current Processor +- (VideoProcessor *)getVideoProcessor { + return _videoProcessor; +} + +@end \ No newline at end of file diff --git a/elinux/CMakeLists.txt b/elinux/CMakeLists.txt new file mode 100644 index 0000000000..6ae0ea6390 --- /dev/null +++ b/elinux/CMakeLists.txt @@ -0,0 +1,60 @@ +cmake_minimum_required(VERSION 3.15) +set(PROJECT_NAME "flutter_webrtc") +project(${PROJECT_NAME} LANGUAGES CXX) + +# This value is used when generating builds using this plugin, so it must +# not be changed +set(PLUGIN_NAME "flutter_webrtc_plugin") + +#add_definitions(-DLIB_WEBRTC_API_DLL) +add_definitions(-DRTC_DESKTOP_DEVICE) +add_definitions(-DFLUTTER_ELINUX) + +add_library(${PLUGIN_NAME} SHARED + "../third_party/uuidxx/uuidxx.cc" + "../common/cpp/src/flutter_data_channel.cc" + "../common/cpp/src/flutter_frame_cryptor.cc" + "../common/cpp/src/flutter_frame_capturer.cc" + "../common/cpp/src/flutter_media_stream.cc" + "../common/cpp/src/flutter_peerconnection.cc" + "../common/cpp/src/flutter_video_renderer.cc" + "../common/cpp/src/flutter_screen_capture.cc" + "../common/cpp/src/flutter_webrtc.cc" + "../common/cpp/src/flutter_webrtc_base.cc" + "../common/cpp/src/flutter_common.cc" + "flutter_webrtc_plugin.cc" +) + +include_directories( + "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/uuidxx" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/include" + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/svpng" +) + +apply_standard_settings(${PLUGIN_NAME}) +set_target_properties(${PLUGIN_NAME} PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) +target_include_directories(${PLUGIN_NAME} INTERFACE + "${CMAKE_CURRENT_SOURCE_DIR}" +) +target_link_libraries(${PLUGIN_NAME} PRIVATE + flutter + flutter_wrapper_plugin + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" +) + +# List of absolute paths to libraries that should be bundled with the plugin +set(flutter_webrtc_bundled_libraries + "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" + PARENT_SCOPE +) + +# Add $ORIGIN to RPATH so that lib/libflutter_webrtc_plugin.so can find lib/libwebrtc.so at runtime +set_property( + TARGET ${PLUGIN_NAME} + PROPERTY BUILD_RPATH + "\$ORIGIN" +) \ No newline at end of file diff --git a/elinux/videosdk_webrtc/flutter_web_r_t_c_plugin.h b/elinux/videosdk_webrtc/flutter_web_r_t_c_plugin.h new file mode 100644 index 0000000000..65bfce3375 --- /dev/null +++ b/elinux/videosdk_webrtc/flutter_web_r_t_c_plugin.h @@ -0,0 +1,25 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ +#define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ + +#include + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __attribute__((visibility("default"))) +#else +#define FLUTTER_PLUGIN_EXPORT +#endif + + +#if defined(__cplusplus) +extern "C" { +#endif + +FLUTTER_PLUGIN_EXPORT void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar); + +#if defined(__cplusplus) +} // extern "C" +#endif + + +# endif \ No newline at end of file diff --git a/example/android/app/build.gradle b/example/android/app/build.gradle index 459b756e4b..c3f051c346 100644 --- a/example/android/app/build.gradle +++ b/example/android/app/build.gradle @@ -27,7 +27,7 @@ apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" android { namespace "com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example" - compileSdkVersion 33 + compileSdkVersion 35 compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 @@ -39,8 +39,8 @@ android { applicationId "com.cloudwebrtc.flutterflutterexample.flutter_webrtc_example" // You can update the following values to match your application needs. // For more information, see: https://docs.flutter.dev/deployment/android#reviewing-the-gradle-build-configuration. - minSdkVersion 21 - targetSdkVersion 31 + minSdkVersion 23 + targetSdkVersion 35 versionCode flutterVersionCode.toInteger() versionName flutterVersionName } diff --git a/example/android/build.gradle b/example/android/build.gradle index f7eb7f63ce..04cb5c7dc5 100644 --- a/example/android/build.gradle +++ b/example/android/build.gradle @@ -1,12 +1,14 @@ buildscript { - ext.kotlin_version = '1.7.10' + ext.kotlin_version = '1.9.24' repositories { google() - mavenCentral() + jcenter() } dependencies { - classpath 'com.android.tools.build:gradle:7.3.0' + // START: FlutterFire Configuration + // END: FlutterFire Configuration + classpath 'com.android.tools.build:gradle:8.5.2' classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" } } @@ -14,15 +16,42 @@ buildscript { allprojects { repositories { google() - mavenCentral() + jcenter() } + + subprojects { + afterEvaluate { project -> + if (project.hasProperty('android')) { + project.android { + if (namespace == null) { + namespace project.group + } + + compileOptions { + sourceCompatibility JavaVersion.VERSION_17 + targetCompatibility JavaVersion.VERSION_17 + } + tasks.withType(org.jetbrains.kotlin.gradle.tasks.KotlinCompile).configureEach { + kotlinOptions { + jvmTarget = "17" + } + } + java { + toolchain { + languageVersion = JavaLanguageVersion.of(17) + } + } + + } + } + } + } + } rootProject.buildDir = '../build' subprojects { project.buildDir = "${rootProject.buildDir}/${project.name}" -} -subprojects { project.evaluationDependsOn(':app') } diff --git a/example/android/gradle/wrapper/gradle-wrapper.properties b/example/android/gradle/wrapper/gradle-wrapper.properties index 3c472b99c6..81a4301fcc 100644 --- a/example/android/gradle/wrapper/gradle-wrapper.properties +++ b/example/android/gradle/wrapper/gradle-wrapper.properties @@ -2,4 +2,4 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.5-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip diff --git a/example/elinux/.gitignore b/example/elinux/.gitignore new file mode 100644 index 0000000000..74dc3e954e --- /dev/null +++ b/example/elinux/.gitignore @@ -0,0 +1 @@ +flutter/ephemeral/ \ No newline at end of file diff --git a/example/elinux/CMakeLists.txt b/example/elinux/CMakeLists.txt new file mode 100644 index 0000000000..e8552cddf3 --- /dev/null +++ b/example/elinux/CMakeLists.txt @@ -0,0 +1,103 @@ +cmake_minimum_required(VERSION 3.15) +# stop cmake from taking make from CMAKE_SYSROOT +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +project(runner LANGUAGES CXX) + +set(BINARY_NAME "example") + +cmake_policy(SET CMP0063 NEW) + +set(CMAKE_INSTALL_RPATH "$ORIGIN/lib") + +# Basically we use this include when we got the following error: +# fatal error: 'bits/c++config.h' file not found +include_directories(SYSTEM ${FLUTTER_SYSTEM_INCLUDE_DIRECTORIES}) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) + +# Configure build options. +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + set(CMAKE_BUILD_TYPE "Debug" CACHE + STRING "Flutter build mode" FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Profile" "Release") +endif() + +# Configure build option to target backend. +if (NOT FLUTTER_TARGET_BACKEND_TYPE) + set(FLUTTER_TARGET_BACKEND_TYPE "wayland" CACHE + STRING "Flutter target backend type" FORCE) + set_property(CACHE FLUTTER_TARGET_BACKEND_TYPE PROPERTY STRINGS + "wayland" "gbm" "eglstream" "x11") +endif() + +# Compilation settings that should be applied to most targets. +function(APPLY_STANDARD_SETTINGS TARGET) + target_compile_features(${TARGET} PUBLIC cxx_std_17) + target_compile_options(${TARGET} PRIVATE -Wall -Werror) + target_compile_options(${TARGET} PRIVATE "$<$>:-O3>") + target_compile_definitions(${TARGET} PRIVATE "$<$>:NDEBUG>") +endfunction() + +set(FLUTTER_MANAGED_DIR "${CMAKE_CURRENT_SOURCE_DIR}/flutter") + +# Flutter library and tool build rules. +add_subdirectory(${FLUTTER_MANAGED_DIR}) + +# Application build +add_subdirectory("runner") + +# Generated plugin build rules, which manage building the plugins and adding +# them to the application. +include(flutter/generated_plugins.cmake) + +# === Installation === +# By default, "installing" just makes a relocatable bundle in the build +# directory. +set(BUILD_BUNDLE_DIR "${PROJECT_BINARY_DIR}/bundle") +if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + set(CMAKE_INSTALL_PREFIX "${BUILD_BUNDLE_DIR}" CACHE PATH "..." FORCE) +endif() + +# Start with a clean build bundle directory every time. +install(CODE " + file(REMOVE_RECURSE \"${BUILD_BUNDLE_DIR}/\") + " COMPONENT Runtime) + +set(INSTALL_BUNDLE_DATA_DIR "${CMAKE_INSTALL_PREFIX}/data") +set(INSTALL_BUNDLE_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib") + +install(TARGETS ${BINARY_NAME} RUNTIME DESTINATION "${CMAKE_INSTALL_PREFIX}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_ICU_DATA_FILE}" DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" + COMPONENT Runtime) + +install(FILES "${FLUTTER_LIBRARY}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +install(FILES "${FLUTTER_EMBEDDER_LIBRARY}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) + +if(PLUGIN_BUNDLED_LIBRARIES) + install(FILES "${PLUGIN_BUNDLED_LIBRARIES}" + DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() + +# Fully re-copy the assets directory on each build to avoid having stale files +# from a previous install. +set(FLUTTER_ASSET_DIR_NAME "flutter_assets") +install(CODE " + file(REMOVE_RECURSE \"${INSTALL_BUNDLE_DATA_DIR}/${FLUTTER_ASSET_DIR_NAME}\") + " COMPONENT Runtime) +install(DIRECTORY "${PROJECT_BUILD_DIR}/${FLUTTER_ASSET_DIR_NAME}" + DESTINATION "${INSTALL_BUNDLE_DATA_DIR}" COMPONENT Runtime) + +# Install the AOT library on non-Debug builds only. +if(NOT CMAKE_BUILD_TYPE MATCHES "Debug") + install(FILES "${AOT_LIBRARY}" DESTINATION "${INSTALL_BUNDLE_LIB_DIR}" + COMPONENT Runtime) +endif() \ No newline at end of file diff --git a/example/elinux/flutter/CMakeLists.txt b/example/elinux/flutter/CMakeLists.txt new file mode 100644 index 0000000000..6dd6974695 --- /dev/null +++ b/example/elinux/flutter/CMakeLists.txt @@ -0,0 +1,108 @@ +cmake_minimum_required(VERSION 3.15) + +set(EPHEMERAL_DIR "${CMAKE_CURRENT_SOURCE_DIR}/ephemeral") + +# Configuration provided via flutter tool. +include(${EPHEMERAL_DIR}/generated_config.cmake) + +set(WRAPPER_ROOT "${EPHEMERAL_DIR}/cpp_client_wrapper") + +# Serves the same purpose as list(TRANSFORM ... PREPEND ...), +# which isn't available in 3.10. +function(list_prepend LIST_NAME PREFIX) + set(NEW_LIST "") + foreach(element ${${LIST_NAME}}) + list(APPEND NEW_LIST "${PREFIX}${element}") + endforeach(element) + set(${LIST_NAME} "${NEW_LIST}" PARENT_SCOPE) +endfunction() + +# === Flutter Library === +# System-level dependencies. +set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/libflutter_engine.so") +if(FLUTTER_TARGET_BACKEND_TYPE MATCHES "gbm") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_gbm.so") +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "eglstream") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_eglstream.so") +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "x11") + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_x11.so") +else() + set(FLUTTER_EMBEDDER_LIBRARY "${EPHEMERAL_DIR}/libflutter_elinux_wayland.so") +endif() + +# Published to parent scope for install step. +set(FLUTTER_LIBRARY ${FLUTTER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_EMBEDDER_LIBRARY ${FLUTTER_EMBEDDER_LIBRARY} PARENT_SCOPE) +set(FLUTTER_ICU_DATA_FILE "${EPHEMERAL_DIR}/icudtl.dat" PARENT_SCOPE) +set(PROJECT_BUILD_DIR "${PROJECT_DIR}/build/elinux/" PARENT_SCOPE) +set(AOT_LIBRARY "${EPHEMERAL_DIR}/libapp.so" PARENT_SCOPE) + +list(APPEND FLUTTER_LIBRARY_HEADERS + "flutter_export.h" + "flutter_plugin_registrar.h" + "flutter_messenger.h" + "flutter_texture_registrar.h" + "flutter_elinux.h" + "flutter_platform_views.h" +) +list_prepend(FLUTTER_LIBRARY_HEADERS "${EPHEMERAL_DIR}/") +add_library(flutter INTERFACE) +target_include_directories(flutter INTERFACE + "${EPHEMERAL_DIR}" +) +target_link_libraries(flutter INTERFACE "${FLUTTER_LIBRARY}") +target_link_libraries(flutter INTERFACE "${FLUTTER_EMBEDDER_LIBRARY}") +add_dependencies(flutter flutter_assemble) + +# === Wrapper === +list(APPEND CPP_WRAPPER_SOURCES_CORE + "core_implementations.cc" + "standard_codec.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_CORE "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_PLUGIN + "plugin_registrar.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_PLUGIN "${WRAPPER_ROOT}/") +list(APPEND CPP_WRAPPER_SOURCES_APP + "flutter_engine.cc" + "flutter_view_controller.cc" +) +list_prepend(CPP_WRAPPER_SOURCES_APP "${WRAPPER_ROOT}/") + +# Wrapper sources needed for a plugin. +add_library(flutter_wrapper_plugin STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} +) +apply_standard_settings(flutter_wrapper_plugin) +set_target_properties(flutter_wrapper_plugin PROPERTIES + POSITION_INDEPENDENT_CODE ON) +set_target_properties(flutter_wrapper_plugin PROPERTIES + CXX_VISIBILITY_PRESET hidden) +target_link_libraries(flutter_wrapper_plugin PUBLIC flutter) +target_include_directories(flutter_wrapper_plugin PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_plugin flutter_assemble) + +# Wrapper sources needed for the runner. +add_library(flutter_wrapper_app STATIC + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_APP} +) +apply_standard_settings(flutter_wrapper_app) +target_link_libraries(flutter_wrapper_app PUBLIC flutter) +target_include_directories(flutter_wrapper_app PUBLIC + "${WRAPPER_ROOT}/include" +) +add_dependencies(flutter_wrapper_app flutter_assemble) + +add_custom_target(flutter_assemble DEPENDS + "${FLUTTER_LIBRARY}" + "${FLUTTER_EMBEDDER_LIBRARY}" + ${FLUTTER_LIBRARY_HEADERS} + ${CPP_WRAPPER_SOURCES_CORE} + ${CPP_WRAPPER_SOURCES_PLUGIN} + ${CPP_WRAPPER_SOURCES_APP} +) \ No newline at end of file diff --git a/example/elinux/flutter/generated_plugin_registrant.cc b/example/elinux/flutter/generated_plugin_registrant.cc new file mode 100644 index 0000000000..f234f60733 --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.cc @@ -0,0 +1,14 @@ +// +// Generated file. Do not edit. +// + +// clang-format off + +#include "generated_plugin_registrant.h" + +#include + +void RegisterPlugins(flutter::PluginRegistry* registry) { + FlutterWebRTCPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("FlutterWebRTCPlugin")); +} \ No newline at end of file diff --git a/example/elinux/flutter/generated_plugin_registrant.dart b/example/elinux/flutter/generated_plugin_registrant.dart new file mode 100644 index 0000000000..aedede8ce8 --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.dart @@ -0,0 +1,8 @@ +// +// Generated file. Do not edit. +// + +// ignore_for_file: lines_longer_than_80_chars + +// ignore: public_member_api_docs +void registerPlugins() {} diff --git a/example/elinux/flutter/generated_plugin_registrant.h b/example/elinux/flutter/generated_plugin_registrant.h new file mode 100644 index 0000000000..e8aca0e82d --- /dev/null +++ b/example/elinux/flutter/generated_plugin_registrant.h @@ -0,0 +1,13 @@ +// +// Generated file. Do not edit. +// + +#ifndef GENERATED_PLUGIN_REGISTRANT_ +#define GENERATED_PLUGIN_REGISTRANT_ + +#include + +// Registers Flutter plugins. +void RegisterPlugins(flutter::PluginRegistry* registry); + +#endif // GENERATED_PLUGIN_REGISTRANT_ \ No newline at end of file diff --git a/example/elinux/flutter/generated_plugins.cmake b/example/elinux/flutter/generated_plugins.cmake new file mode 100644 index 0000000000..8a5ca7474e --- /dev/null +++ b/example/elinux/flutter/generated_plugins.cmake @@ -0,0 +1,16 @@ +# +# Generated file, do not edit. +# + +list(APPEND FLUTTER_PLUGIN_LIST + flutter_webrtc +) + +set(PLUGIN_BUNDLED_LIBRARIES) + +foreach(plugin ${FLUTTER_PLUGIN_LIST}) + add_subdirectory(flutter/ephemeral/.plugin_symlinks/${plugin}/elinux plugins/${plugin}) + target_link_libraries(${BINARY_NAME} PRIVATE ${plugin}_plugin) + list(APPEND PLUGIN_BUNDLED_LIBRARIES $) + list(APPEND PLUGIN_BUNDLED_LIBRARIES ${${plugin}_bundled_libraries}) +endforeach(plugin) \ No newline at end of file diff --git a/example/elinux/runner/CMakeLists.txt b/example/elinux/runner/CMakeLists.txt new file mode 100644 index 0000000000..5b75e3e35c --- /dev/null +++ b/example/elinux/runner/CMakeLists.txt @@ -0,0 +1,23 @@ +cmake_minimum_required(VERSION 3.15) +project(runner LANGUAGES CXX) + +if(FLUTTER_TARGET_BACKEND_TYPE MATCHES "gbm") + add_definitions(-DFLUTTER_TARGET_BACKEND_GBM) +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "eglstream") + add_definitions(-DFLUTTER_TARGET_BACKEND_EGLSTREAM) +elseif(FLUTTER_TARGET_BACKEND_TYPE MATCHES "x11") + add_definitions(-DFLUTTER_TARGET_BACKEND_X11) +else() + add_definitions(-DFLUTTER_TARGET_BACKEND_WAYLAND) +endif() + +add_executable(${BINARY_NAME} + "flutter_window.cc" + "main.cc" + "${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc" +) +apply_standard_settings(${BINARY_NAME}) +target_link_libraries(${BINARY_NAME} PRIVATE flutter) +target_link_libraries(${BINARY_NAME} PRIVATE flutter flutter_wrapper_app) +target_include_directories(${BINARY_NAME} PRIVATE "${CMAKE_SOURCE_DIR}") +add_dependencies(${BINARY_NAME} flutter_assemble) \ No newline at end of file diff --git a/example/elinux/runner/command_options.h b/example/elinux/runner/command_options.h new file mode 100644 index 0000000000..ea17634117 --- /dev/null +++ b/example/elinux/runner/command_options.h @@ -0,0 +1,398 @@ +#ifndef COMMAND_OPTIONS_ +#define COMMAND_OPTIONS_ + +#include +#include +#include +#include +#include +#include +#include + +namespace commandline { + +namespace { +constexpr char kOptionStyleNormal[] = "--"; +constexpr char kOptionStyleShort[] = "-"; +constexpr char kOptionValueForHelpMessage[] = "="; +} // namespace + +class Exception : public std::exception { + public: + Exception(const std::string& msg) : msg_(msg) {} + ~Exception() throw() {} + + const char* what() const throw() { return msg_.c_str(); } + + private: + std::string msg_; +}; + +class CommandOptions { + public: + CommandOptions() = default; + ~CommandOptions() = default; + + void AddWithoutValue(const std::string& name, + const std::string& short_name, + const std::string& description, + bool required) { + Add(name, short_name, description, "", + ReaderString(), required, false); + } + + void AddInt(const std::string& name, + const std::string& short_name, + const std::string& description, + const int& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderInt(), required, true); + } + + void AddDouble(const std::string& name, + const std::string& short_name, + const std::string& description, + const double& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderDouble(), required, true); + } + + void AddString(const std::string& name, + const std::string& short_name, + const std::string& description, + const std::string& default_value, + bool required) { + Add(name, short_name, description, default_value, + ReaderString(), required, true); + } + + template + void Add(const std::string& name, + const std::string& short_name, + const std::string& description, + const T default_value, + F reader = F(), + bool required = true, + bool required_value = true) { + if (options_.find(name) != options_.end()) { + std::cerr << "Already registered option: " << name << std::endl; + return; + } + + if (lut_short_options_.find(short_name) != lut_short_options_.end()) { + std::cerr << short_name << "is already registered" << std::endl; + return; + } + lut_short_options_[short_name] = name; + + options_[name] = std::make_unique>( + name, short_name, description, default_value, reader, required, + required_value); + + // register to show help message. + registration_order_options_.push_back(options_[name].get()); + } + + bool Exist(const std::string& name) { + auto itr = options_.find(name); + return itr != options_.end() && itr->second->HasValue(); + } + + template + const T& GetValue(const std::string& name) { + auto itr = options_.find(name); + if (itr == options_.end()) { + throw Exception("Not found: " + name); + } + + auto* option_value = dynamic_cast*>(itr->second.get()); + if (!option_value) { + throw Exception("Type mismatch: " + name); + } + return option_value->GetValue(); + } + + bool Parse(int argc, const char* const* argv) { + if (argc < 1) { + errors_.push_back("No options"); + return false; + } + + command_name_ = argv[0]; + for (auto i = 1; i < argc; i++) { + const std::string arg(argv[i]); + + // normal options: e.g. --bundle=/data/sample/bundle --fullscreen + if (arg.length() > 2 && + arg.substr(0, 2).compare(kOptionStyleNormal) == 0) { + const size_t option_value_len = arg.find("=") != std::string::npos + ? (arg.length() - arg.find("=")) + : 0; + const bool has_value = option_value_len != 0; + std::string option_name = + arg.substr(2, arg.length() - 2 - option_value_len); + + if (options_.find(option_name) == options_.end()) { + errors_.push_back("Not found option: " + option_name); + continue; + } + + if (!has_value && options_[option_name]->IsRequiredValue()) { + errors_.push_back(option_name + " requres an option value"); + continue; + } + + if (has_value && !options_[option_name]->IsRequiredValue()) { + errors_.push_back(option_name + " doesn't requres an option value"); + continue; + } + + if (has_value) { + SetOptionValue(option_name, arg.substr(arg.find("=") + 1)); + } else { + SetOption(option_name); + } + } + // short options: e.g. -f /foo/file.txt -h 640 -abc + else if (arg.length() > 1 && + arg.substr(0, 1).compare(kOptionStyleShort) == 0) { + for (size_t j = 1; j < arg.length(); j++) { + const std::string option_name{argv[i][j]}; + + if (lut_short_options_.find(option_name) == + lut_short_options_.end()) { + errors_.push_back("Not found short option: " + option_name); + break; + } + + if (j == arg.length() - 1 && + options_[lut_short_options_[option_name]]->IsRequiredValue()) { + if (i == argc - 1) { + errors_.push_back("Invalid format option: " + option_name); + break; + } + SetOptionValue(lut_short_options_[option_name], argv[++i]); + } else { + SetOption(lut_short_options_[option_name]); + } + } + } else { + errors_.push_back("Invalid format option: " + arg); + } + } + + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (registration_order_options_[i]->IsRequired() && + !registration_order_options_[i]->HasValue()) { + errors_.push_back( + std::string(registration_order_options_[i]->GetName()) + + " option is mandatory."); + } + } + + return errors_.size() == 0; + } + + std::string GetError() { return errors_.size() > 0 ? errors_[0] : ""; } + + std::vector& GetErrors() { return errors_; } + + std::string ShowHelp() { + std::ostringstream ostream; + + ostream << "Usage: " << command_name_ << " "; + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (registration_order_options_[i]->IsRequired()) { + ostream << registration_order_options_[i]->GetHelpShortMessage() << " "; + } + } + ostream << std::endl; + + ostream << "Global options:" << std::endl; + size_t max_name_len = 0; + for (size_t i = 0; i < registration_order_options_.size(); i++) { + max_name_len = std::max( + max_name_len, registration_order_options_[i]->GetName().length()); + } + + for (size_t i = 0; i < registration_order_options_.size(); i++) { + if (!registration_order_options_[i]->GetShortName().empty()) { + ostream << kOptionStyleShort + << registration_order_options_[i]->GetShortName() << ", "; + } else { + ostream << std::string(4, ' '); + } + + size_t index_adjust = 0; + constexpr int kSpacerNum = 10; + auto need_value = registration_order_options_[i]->IsRequiredValue(); + ostream << kOptionStyleNormal + << registration_order_options_[i]->GetName(); + if (need_value) { + ostream << kOptionValueForHelpMessage; + index_adjust += std::string(kOptionValueForHelpMessage).length(); + } + ostream << std::string( + max_name_len + kSpacerNum - index_adjust - + registration_order_options_[i]->GetName().length(), + ' '); + ostream << registration_order_options_[i]->GetDescription() << std::endl; + } + + return ostream.str(); + } + + private: + struct ReaderInt { + int operator()(const std::string& value) { return std::stoi(value); } + }; + + struct ReaderString { + std::string operator()(const std::string& value) { return value; } + }; + + struct ReaderDouble { + double operator()(const std::string& value) { return std::stod(value); } + }; + + class Option { + public: + Option(const std::string& name, + const std::string& short_name, + const std::string& description, + bool required, + bool required_value) + : name_(name), + short_name_(short_name), + description_(description), + is_required_(required), + is_required_value_(required_value), + value_set_(false){}; + virtual ~Option() = default; + + const std::string& GetName() const { return name_; }; + + const std::string& GetShortName() const { return short_name_; }; + + const std::string& GetDescription() const { return description_; }; + + const std::string GetHelpShortMessage() const { + std::string message = kOptionStyleNormal + name_; + if (is_required_value_) { + message += kOptionValueForHelpMessage; + } + return message; + } + + bool IsRequired() const { return is_required_; }; + + bool IsRequiredValue() const { return is_required_value_; }; + + void Set() { value_set_ = true; }; + + virtual bool SetValue(const std::string& value) = 0; + + virtual bool HasValue() const = 0; + + protected: + std::string name_; + std::string short_name_; + std::string description_; + bool is_required_; + bool is_required_value_; + bool value_set_; + }; + + template + class OptionValue : public Option { + public: + OptionValue(const std::string& name, + const std::string& short_name, + const std::string& description, + const T& default_value, + bool required, + bool required_value) + : Option(name, short_name, description, required, required_value), + default_value_(default_value), + value_(default_value){}; + virtual ~OptionValue() = default; + + bool SetValue(const std::string& value) { + value_ = Read(value); + value_set_ = true; + return true; + } + + bool HasValue() const { return value_set_; } + + const T& GetValue() const { return value_; } + + protected: + virtual T Read(const std::string& s) = 0; + + T default_value_; + T value_; + }; + + template + class OptionValueReader : public OptionValue { + public: + OptionValueReader(const std::string& name, + const std::string& short_name, + const std::string& description, + const T default_value, + F reader, + bool required, + bool required_value) + : OptionValue(name, + short_name, + description, + default_value, + required, + required_value), + reader_(reader) {} + ~OptionValueReader() = default; + + private: + T Read(const std::string& value) { return reader_(value); } + + F reader_; + }; + + bool SetOption(const std::string& name) { + auto itr = options_.find(name); + if (itr == options_.end()) { + errors_.push_back("Unknown option: " + name); + return false; + } + + itr->second->Set(); + return true; + } + + bool SetOptionValue(const std::string& name, const std::string& value) { + auto itr = options_.find(name); + if (itr == options_.end()) { + errors_.push_back("Unknown option: " + name); + return false; + } + + if (!itr->second->SetValue(value)) { + errors_.push_back("Invalid option value: " + name + " = " + value); + return false; + } + return true; + } + + std::string command_name_; + std::unordered_map> options_; + std::unordered_map lut_short_options_; + std::vector registration_order_options_; + std::vector errors_; +}; + +} // namespace commandline + +#endif // COMMAND_OPTIONS_ \ No newline at end of file diff --git a/example/elinux/runner/flutter_embedder_options.h b/example/elinux/runner/flutter_embedder_options.h new file mode 100644 index 0000000000..1d616a4984 --- /dev/null +++ b/example/elinux/runner/flutter_embedder_options.h @@ -0,0 +1,199 @@ +#ifndef FLUTTER_EMBEDDER_OPTIONS_ +#define FLUTTER_EMBEDDER_OPTIONS_ + +#include + +#include + +#include "command_options.h" + +class FlutterEmbedderOptions { + public: + FlutterEmbedderOptions() { + options_.AddString("bundle", "b", "Path to Flutter project bundle", + "./bundle", true); + options_.AddWithoutValue("no-cursor", "n", "No mouse cursor/pointer", + false); + options_.AddInt("rotation", "r", + "Window rotation(degree) [0(default)|90|180|270]", 0, + false); + options_.AddDouble("text-scaling-factor", "x", "Text scaling factor", 1.0, + false); + options_.AddWithoutValue("enable-high-contrast", "i", + "Request that UI be rendered with darker colors.", + false); + options_.AddDouble("force-scale-factor", "s", + "Force a scale factor instead using default value", 1.0, + false); + options_.AddWithoutValue( + "async-vblank", "v", + "Don't sync to compositor redraw/vblank (eglSwapInterval 0)", false); + +#if defined(FLUTTER_TARGET_BACKEND_GBM) || \ + defined(FLUTTER_TARGET_BACKEND_EGLSTREAM) + // no more options. +#elif defined(FLUTTER_TARGET_BACKEND_X11) + options_.AddString("title", "t", "Window title", "Flutter", false); + options_.AddWithoutValue("fullscreen", "f", "Always full-screen display", + false); + options_.AddInt("width", "w", "Window width", 1280, false); + options_.AddInt("height", "h", "Window height", 720, false); +#else // FLUTTER_TARGET_BACKEND_WAYLAND + options_.AddString("title", "t", "Window title", "Flutter", false); + options_.AddString("app-id", "a", "XDG App ID", "dev.flutter.elinux", + false); + options_.AddWithoutValue("onscreen-keyboard", "k", + "Enable on-screen keyboard", false); + options_.AddWithoutValue("window-decoration", "d", + "Enable window decorations", false); + options_.AddWithoutValue("fullscreen", "f", "Always full-screen display", + false); + options_.AddInt("width", "w", "Window width", 1280, false); + options_.AddInt("height", "h", "Window height", 720, false); +#endif + } + ~FlutterEmbedderOptions() = default; + + bool Parse(int argc, char** argv) { + if (!options_.Parse(argc, argv)) { + std::cerr << options_.GetError() << std::endl; + std::cout << options_.ShowHelp(); + return false; + } + + bundle_path_ = options_.GetValue("bundle"); + use_mouse_cursor_ = !options_.Exist("no-cursor"); + if (options_.Exist("rotation")) { + switch (options_.GetValue("rotation")) { + case 90: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_90; + break; + case 180: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_180; + break; + case 270: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_270; + break; + default: + window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_0; + break; + } + } + + text_scale_factor_ = options_.GetValue("text-scaling-factor"); + enable_high_contrast_ = options_.Exist("enable-high-contrast"); + + if (options_.Exist("force-scale-factor")) { + is_force_scale_factor_ = true; + scale_factor_ = options_.GetValue("force-scale-factor"); + } else { + is_force_scale_factor_ = false; + scale_factor_ = 1.0; + } + + enable_vsync_ = !options_.Exist("async-vblank"); + +#if defined(FLUTTER_TARGET_BACKEND_GBM) || \ + defined(FLUTTER_TARGET_BACKEND_EGLSTREAM) + use_onscreen_keyboard_ = false; + use_window_decoration_ = false; + window_view_mode_ = flutter::FlutterViewController::ViewMode::kFullscreen; +#elif defined(FLUTTER_TARGET_BACKEND_X11) + use_onscreen_keyboard_ = false; + use_window_decoration_ = false; + window_title_ = options_.GetValue("title"); + window_view_mode_ = + options_.Exist("fullscreen") + ? flutter::FlutterViewController::ViewMode::kFullscreen + : flutter::FlutterViewController::ViewMode::kNormal; + window_width_ = options_.GetValue("width"); + window_height_ = options_.GetValue("height"); +#else // FLUTTER_TARGET_BACKEND_WAYLAND + window_title_ = options_.GetValue("title"); + window_app_id_ = options_.GetValue("app-id"); + use_onscreen_keyboard_ = options_.Exist("onscreen-keyboard"); + use_window_decoration_ = options_.Exist("window-decoration"); + window_view_mode_ = + options_.Exist("fullscreen") + ? flutter::FlutterViewController::ViewMode::kFullscreen + : flutter::FlutterViewController::ViewMode::kNormal; + window_width_ = options_.GetValue("width"); + window_height_ = options_.GetValue("height"); +#endif + + return true; + } + + std::string BundlePath() const { + return bundle_path_; + } + std::string WindowTitle() const { + return window_title_; + } + std::string WindowAppID() const { + return window_app_id_; + } + bool IsUseMouseCursor() const { + return use_mouse_cursor_; + } + bool IsUseOnscreenKeyboard() const { + return use_onscreen_keyboard_; + } + bool IsUseWindowDecoraation() const { + return use_window_decoration_; + } + flutter::FlutterViewController::ViewMode WindowViewMode() const { + return window_view_mode_; + } + int WindowWidth() const { + return window_width_; + } + int WindowHeight() const { + return window_height_; + } + flutter::FlutterViewController::ViewRotation WindowRotation() const { + return window_view_rotation_; + } + double TextScaleFactor() const { + return text_scale_factor_; + } + bool EnableHighContrast() const { + return enable_high_contrast_; + } + bool IsForceScaleFactor() const { + return is_force_scale_factor_; + } + double ScaleFactor() const { + return scale_factor_; + } + bool EnableVsync() const { + return enable_vsync_; + } + + private: + commandline::CommandOptions options_; + + std::string bundle_path_; + std::string window_title_; + std::string window_app_id_; + bool use_mouse_cursor_ = true; + bool use_onscreen_keyboard_ = false; + bool use_window_decoration_ = false; + flutter::FlutterViewController::ViewMode window_view_mode_ = + flutter::FlutterViewController::ViewMode::kNormal; + int window_width_ = 1280; + int window_height_ = 720; + flutter::FlutterViewController::ViewRotation window_view_rotation_ = + flutter::FlutterViewController::ViewRotation::kRotation_0; + bool is_force_scale_factor_; + double scale_factor_; + double text_scale_factor_; + bool enable_high_contrast_; + bool enable_vsync_; +}; + +#endif // FLUTTER_EMBEDDER_OPTIONS_ \ No newline at end of file diff --git a/example/elinux/runner/flutter_window.cc b/example/elinux/runner/flutter_window.cc new file mode 100644 index 0000000000..d3217615d3 --- /dev/null +++ b/example/elinux/runner/flutter_window.cc @@ -0,0 +1,75 @@ +#include "flutter_window.h" + +#include +#include +#include +#include + +#include "flutter/generated_plugin_registrant.h" + +FlutterWindow::FlutterWindow( + const flutter::FlutterViewController::ViewProperties view_properties, + const flutter::DartProject project) + : view_properties_(view_properties), project_(project) {} + +bool FlutterWindow::OnCreate() { + flutter_view_controller_ = std::make_unique( + view_properties_, project_); + + // Ensure that basic setup of the controller was successful. + if (!flutter_view_controller_->engine() || + !flutter_view_controller_->view()) { + return false; + } + + // Register Flutter plugins. + RegisterPlugins(flutter_view_controller_->engine()); + + return true; +} + +void FlutterWindow::OnDestroy() { + if (flutter_view_controller_) { + flutter_view_controller_ = nullptr; + } +} + +void FlutterWindow::Run() { + // Main loop. + auto next_flutter_event_time = + std::chrono::steady_clock::time_point::clock::now(); + while (flutter_view_controller_->view()->DispatchEvent()) { + // Wait until the next event. + { + auto wait_duration = + std::max(std::chrono::nanoseconds(0), + next_flutter_event_time - + std::chrono::steady_clock::time_point::clock::now()); + std::this_thread::sleep_for( + std::chrono::duration_cast(wait_duration)); + } + + // Processes any pending events in the Flutter engine, and returns the + // number of nanoseconds until the next scheduled event (or max, if none). + auto wait_duration = flutter_view_controller_->engine()->ProcessMessages(); + { + auto next_event_time = std::chrono::steady_clock::time_point::max(); + if (wait_duration != std::chrono::nanoseconds::max()) { + next_event_time = + std::min(next_event_time, + std::chrono::steady_clock::time_point::clock::now() + + wait_duration); + } else { + // Wait for the next frame if no events. + auto frame_rate = flutter_view_controller_->view()->GetFrameRate(); + next_event_time = std::min( + next_event_time, + std::chrono::steady_clock::time_point::clock::now() + + std::chrono::milliseconds( + static_cast(std::trunc(1000000.0 / frame_rate)))); + } + next_flutter_event_time = + std::max(next_flutter_event_time, next_event_time); + } + } +} \ No newline at end of file diff --git a/example/elinux/runner/flutter_window.h b/example/elinux/runner/flutter_window.h new file mode 100644 index 0000000000..d15085b7bc --- /dev/null +++ b/example/elinux/runner/flutter_window.h @@ -0,0 +1,30 @@ +#ifndef FLUTTER_WINDOW_ +#define FLUTTER_WINDOW_ + +#include +#include + +#include + +class FlutterWindow { + public: + explicit FlutterWindow( + const flutter::FlutterViewController::ViewProperties view_properties, + const flutter::DartProject project); + ~FlutterWindow() = default; + + // Prevent copying. + FlutterWindow(FlutterWindow const&) = delete; + FlutterWindow& operator=(FlutterWindow const&) = delete; + + bool OnCreate(); + void OnDestroy(); + void Run(); + + private: + flutter::FlutterViewController::ViewProperties view_properties_; + flutter::DartProject project_; + std::unique_ptr flutter_view_controller_; +}; + +#endif // FLUTTER_WINDOW_ \ No newline at end of file diff --git a/example/elinux/runner/main.cc b/example/elinux/runner/main.cc new file mode 100644 index 0000000000..36637ab942 --- /dev/null +++ b/example/elinux/runner/main.cc @@ -0,0 +1,49 @@ +#include +#include + +#include +#include +#include + +#include "flutter_embedder_options.h" +#include "flutter_window.h" + +int main(int argc, char** argv) { + FlutterEmbedderOptions options; + if (!options.Parse(argc, argv)) { + return 0; + } + + // Creates the Flutter project. + const auto bundle_path = options.BundlePath(); + const std::wstring fl_path(bundle_path.begin(), bundle_path.end()); + flutter::DartProject project(fl_path); + auto command_line_arguments = std::vector(); + project.set_dart_entrypoint_arguments(std::move(command_line_arguments)); + + flutter::FlutterViewController::ViewProperties view_properties = {}; + view_properties.width = options.WindowWidth(); + view_properties.height = options.WindowHeight(); + view_properties.view_mode = options.WindowViewMode(); + view_properties.view_rotation = options.WindowRotation(); + view_properties.title = options.WindowTitle(); + view_properties.app_id = options.WindowAppID(); + view_properties.use_mouse_cursor = options.IsUseMouseCursor(); + view_properties.use_onscreen_keyboard = options.IsUseOnscreenKeyboard(); + view_properties.use_window_decoration = options.IsUseWindowDecoraation(); + view_properties.text_scale_factor = options.TextScaleFactor(); + view_properties.enable_high_contrast = options.EnableHighContrast(); + view_properties.force_scale_factor = options.IsForceScaleFactor(); + view_properties.scale_factor = options.ScaleFactor(); + view_properties.enable_vsync = options.EnableVsync(); + + // The Flutter instance hosted by this window. + FlutterWindow window(view_properties, project); + if (!window.OnCreate()) { + return 0; + } + window.Run(); + window.OnDestroy(); + + return 0; +} diff --git a/example/ios/Flutter/AppFrameworkInfo.plist b/example/ios/Flutter/AppFrameworkInfo.plist index 1dc6cf7652..7c56964006 100644 --- a/example/ios/Flutter/AppFrameworkInfo.plist +++ b/example/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 13.0 + 12.0 diff --git a/example/ios/Podfile b/example/ios/Podfile index ed16470330..a81dcda7c9 100644 --- a/example/ios/Podfile +++ b/example/ios/Podfile @@ -1,5 +1,4 @@ -# Uncomment this line to define a global platform for your project -platform :ios, '13.0' +platform :ios, '14.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' @@ -10,6 +9,9 @@ project 'Runner', { 'Release' => :release, } + + + def flutter_root generated_xcode_build_settings_path = File.expand_path(File.join('..', 'Flutter', 'Generated.xcconfig'), __FILE__) unless File.exist?(generated_xcode_build_settings_path) @@ -28,6 +30,9 @@ require File.expand_path(File.join('packages', 'flutter_tools', 'bin', 'podhelpe flutter_ios_podfile_setup target 'Runner' do + use_frameworks! :linkage => :static + use_modular_headers! + flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__)) target 'RunnerTests' do inherit! :search_paths @@ -37,5 +42,11 @@ end post_install do |installer| installer.pods_project.targets.each do |target| flutter_additional_ios_build_settings(target) + target.build_configurations.each do |config| + config.build_settings['GCC_PREPROCESSOR_DEFINITIONS'] ||= [ + 'PERMISSION_CAMERA=1', + 'PERMISSION_MICROPHONE=1', + ] + end end end diff --git a/example/ios/Runner.xcodeproj/project.pbxproj b/example/ios/Runner.xcodeproj/project.pbxproj index d974dc02cf..9ceb85c015 100644 --- a/example/ios/Runner.xcodeproj/project.pbxproj +++ b/example/ios/Runner.xcodeproj/project.pbxproj @@ -7,6 +7,7 @@ objects = { /* Begin PBXBuildFile section */ + 110D18F826D56564227003AA /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 84FB9711B5394A66BE315EF5 /* Pods_Runner.framework */; }; 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; 331C80F4294D02FB00263BE5 /* RunnerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 331C80F3294D02FB00263BE5 /* RunnerTests.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; @@ -15,6 +16,7 @@ 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; + E3A156816086F4B962F04CD8 /* Pods_RunnerTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 44E0D5D38FC9B8DCBD21875A /* Pods_RunnerTests.framework */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -41,14 +43,18 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ + 12E5780FBB4964270E45B79F /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; 331C80F1294D02FB00263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 331C80F3294D02FB00263BE5 /* RunnerTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RunnerTests.m; sourceTree = ""; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; + 44E0D5D38FC9B8DCBD21875A /* Pods_RunnerTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_RunnerTests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; + 809910BDB882FE252DA2E090 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 84FB9711B5394A66BE315EF5 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -57,6 +63,10 @@ 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + BCAAA38C90E4322E54BF7FA2 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; + C8E0F8087135E2B79E40845B /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; + E5DFFAD7830661480FE54667 /* Pods-RunnerTests.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.profile.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.profile.xcconfig"; sourceTree = ""; }; + E6A3695E61DACE9FF6FD0C5C /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -64,6 +74,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + E3A156816086F4B962F04CD8 /* Pods_RunnerTests.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -71,12 +82,36 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 110D18F826D56564227003AA /* Pods_Runner.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 0B849DA9A47570547C45EFFB /* Pods */ = { + isa = PBXGroup; + children = ( + 809910BDB882FE252DA2E090 /* Pods-Runner.debug.xcconfig */, + BCAAA38C90E4322E54BF7FA2 /* Pods-Runner.release.xcconfig */, + E6A3695E61DACE9FF6FD0C5C /* Pods-Runner.profile.xcconfig */, + 12E5780FBB4964270E45B79F /* Pods-RunnerTests.debug.xcconfig */, + C8E0F8087135E2B79E40845B /* Pods-RunnerTests.release.xcconfig */, + E5DFFAD7830661480FE54667 /* Pods-RunnerTests.profile.xcconfig */, + ); + name = Pods; + path = Pods; + sourceTree = ""; + }; + 1F05490B61F1820C1421B876 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 84FB9711B5394A66BE315EF5 /* Pods_Runner.framework */, + 44E0D5D38FC9B8DCBD21875A /* Pods_RunnerTests.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; 331C80F2294D02FB00263BE5 /* RunnerTests */ = { isa = PBXGroup; children = ( @@ -103,6 +138,8 @@ 97C146F01CF9000F007C117D /* Runner */, 331C80F2294D02FB00263BE5 /* RunnerTests */, 97C146EF1CF9000F007C117D /* Products */, + 0B849DA9A47570547C45EFFB /* Pods */, + 1F05490B61F1820C1421B876 /* Frameworks */, ); sourceTree = ""; }; @@ -146,6 +183,7 @@ isa = PBXNativeTarget; buildConfigurationList = 331C80F7294D02FB00263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */; buildPhases = ( + 941B7E92BA14DB73D5BD4AEC /* [CP] Check Pods Manifest.lock */, 331C80ED294D02FB00263BE5 /* Sources */, 331C80EE294D02FB00263BE5 /* Frameworks */, 331C80EF294D02FB00263BE5 /* Resources */, @@ -164,12 +202,15 @@ isa = PBXNativeTarget; buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( + FFE2D50325E41746F2BE6949 /* [CP] Check Pods Manifest.lock */, 9740EEB61CF901F6004384FC /* Run Script */, 97C146EA1CF9000F007C117D /* Sources */, 97C146EB1CF9000F007C117D /* Frameworks */, 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 47D6C8E8D69268382AC8E104 /* [CP] Embed Pods Frameworks */, + 901AB9F7ED5F6839924C3AF3 /* [CP] Copy Pods Resources */, ); buildRules = ( ); @@ -186,7 +227,7 @@ 97C146E61CF9000F007C117D /* Project object */ = { isa = PBXProject; attributes = { - LastUpgradeCheck = 1300; + LastUpgradeCheck = 1510; ORGANIZATIONNAME = ""; TargetAttributes = { 331C80F0294D02FB00263BE5 = { @@ -255,6 +296,62 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; + 47D6C8E8D69268382AC8E104 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; + 901AB9F7ED5F6839924C3AF3 /* [CP] Copy Pods Resources */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Copy Pods Resources"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n"; + showEnvVarsInLog = 0; + }; + 941B7E92BA14DB73D5BD4AEC /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; @@ -270,6 +367,28 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; + FFE2D50325E41746F2BE6949 /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ @@ -362,7 +481,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 11.0; + IPHONEOS_DEPLOYMENT_TARGET = 12.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -377,13 +496,14 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 8GZ776NSU2; ENABLE_BITCODE = NO; INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", ); - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample"; PRODUCT_NAME = "$(TARGET_NAME)"; VERSIONING_SYSTEM = "apple-generic"; }; @@ -391,12 +511,13 @@ }; 331C80F8294D02FB00263BE5 /* Debug */ = { isa = XCBuildConfiguration; + baseConfigurationReference = 12E5780FBB4964270E45B79F /* Pods-RunnerTests.debug.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; PRODUCT_NAME = "$(TARGET_NAME)"; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; }; @@ -404,12 +525,13 @@ }; 331C80F9294D02FB00263BE5 /* Release */ = { isa = XCBuildConfiguration; + baseConfigurationReference = C8E0F8087135E2B79E40845B /* Pods-RunnerTests.release.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; PRODUCT_NAME = "$(TARGET_NAME)"; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; }; @@ -417,12 +539,13 @@ }; 331C80FA294D02FB00263BE5 /* Profile */ = { isa = XCBuildConfiguration; + baseConfigurationReference = E5DFFAD7830661480FE54667 /* Pods-RunnerTests.profile.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; PRODUCT_NAME = "$(TARGET_NAME)"; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Runner"; }; @@ -475,7 +598,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 11.0; + IPHONEOS_DEPLOYMENT_TARGET = 12.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -524,7 +647,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 11.0; + IPHONEOS_DEPLOYMENT_TARGET = 12.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -539,13 +662,14 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 8GZ776NSU2; ENABLE_BITCODE = NO; INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", ); - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample"; PRODUCT_NAME = "$(TARGET_NAME)"; VERSIONING_SYSTEM = "apple-generic"; }; @@ -557,13 +681,14 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 8GZ776NSU2; ENABLE_BITCODE = NO; INFOPLIST_FILE = Runner/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", ); - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample"; PRODUCT_NAME = "$(TARGET_NAME)"; VERSIONING_SYSTEM = "apple-generic"; }; diff --git a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index f7213505ac..3d0fb007b1 100644 --- a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -1,6 +1,6 @@ + + diff --git a/example/lib/main.dart b/example/lib/main.dart index 9b24aef841..d60894c894 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -4,7 +4,7 @@ import 'package:flutter/foundation.dart' show debugDefaultTargetPlatformOverride; import 'package:flutter/material.dart'; import 'package:flutter_background/flutter_background.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; import 'package:flutter_webrtc_example/src/capture_frame_sample.dart'; import 'src/device_enumeration_sample.dart'; @@ -29,7 +29,7 @@ Future startForegroundService() async { final androidConfig = FlutterBackgroundAndroidConfig( notificationTitle: 'Title of the notification', notificationText: 'Text of the notification', - notificationImportance: AndroidNotificationImportance.Default, + notificationImportance: AndroidNotificationImportance.normal, notificationIcon: AndroidResource( name: 'background_icon', defType: 'drawable'), // Default is ic_launcher from folder mipmap diff --git a/example/lib/src/capture_frame_sample.dart b/example/lib/src/capture_frame_sample.dart index 56a3f77464..dd6c495c9d 100644 --- a/example/lib/src/capture_frame_sample.dart +++ b/example/lib/src/capture_frame_sample.dart @@ -1,7 +1,7 @@ import 'dart:typed_data'; import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; class CaptureFrameSample extends StatefulWidget { @override diff --git a/example/lib/src/device_enumeration_sample.dart b/example/lib/src/device_enumeration_sample.dart index 4630001572..005889d482 100644 --- a/example/lib/src/device_enumeration_sample.dart +++ b/example/lib/src/device_enumeration_sample.dart @@ -3,7 +3,7 @@ import 'package:collection/collection.dart'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; import 'package:permission_handler/permission_handler.dart'; class VideoSize { diff --git a/example/lib/src/get_display_media_sample.dart b/example/lib/src/get_display_media_sample.dart index 5fe193d08b..c82c9e53b2 100644 --- a/example/lib/src/get_display_media_sample.dart +++ b/example/lib/src/get_display_media_sample.dart @@ -3,7 +3,7 @@ import 'dart:core'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter_background/flutter_background.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; import 'package:flutter_webrtc_example/src/widgets/screen_select_dialog.dart'; /* @@ -58,11 +58,11 @@ class _GetDisplayMediaSampleState extends State { try { var hasPermissions = await FlutterBackground.hasPermissions; if (!isRetry) { - const androidConfig = FlutterBackgroundAndroidConfig( + var androidConfig = FlutterBackgroundAndroidConfig( notificationTitle: 'Screen Sharing', notificationText: 'LiveKit Example is sharing the screen.', - notificationImportance: AndroidNotificationImportance.Default, - notificationIcon: AndroidResource( + notificationImportance: AndroidNotificationImportance.normal, + notificationIcon: const AndroidResource( name: 'livekit_ic_launcher', defType: 'mipmap'), ); hasPermissions = await FlutterBackground.initialize( diff --git a/example/lib/src/get_user_media_sample.dart b/example/lib/src/get_user_media_sample.dart index d9c427a0de..2edcc8d8ce 100644 --- a/example/lib/src/get_user_media_sample.dart +++ b/example/lib/src/get_user_media_sample.dart @@ -1,9 +1,9 @@ import 'dart:core'; import 'dart:io'; - +import 'dart:math'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; import 'package:path_provider/path_provider.dart'; /* @@ -21,6 +21,7 @@ class _GetUserMediaSampleState extends State { final _localRenderer = RTCVideoRenderer(); bool _inCalling = false; bool _isTorchOn = false; + bool _isFrontCamera = true; MediaRecorder? _mediaRecorder; bool get _isRec => _mediaRecorder != null; @@ -127,6 +128,15 @@ class _GetUserMediaSampleState extends State { }); } + void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) { + final point = Point( + details.localPosition.dx / constraints.maxWidth, + details.localPosition.dy / constraints.maxHeight, + ); + Helper.setFocusPoint(_localStream!.getVideoTracks().first, point); + Helper.setExposurePoint(_localStream!.getVideoTracks().first, point); + } + void _toggleTorch() async { if (_localStream == null) throw Exception('Stream is not initialized'); @@ -152,17 +162,19 @@ class _GetUserMediaSampleState extends State { final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); - await WebRTC.invokeMethod('mediaStreamTrackSetZoom', - {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}); + await Helper.setZoom(videoTrack, zoomLevel); } - void _toggleCamera() async { + void _switchCamera() async { if (_localStream == null) throw Exception('Stream is not initialized'); final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); await Helper.switchCamera(videoTrack); + setState(() { + _isFrontCamera = _isFrontCamera; + }); } void _captureFrame() async { @@ -199,7 +211,7 @@ class _GetUserMediaSampleState extends State { ), IconButton( icon: Icon(Icons.switch_video), - onPressed: _toggleCamera, + onPressed: _switchCamera, ), IconButton( icon: Icon(Icons.camera), @@ -236,15 +248,20 @@ class _GetUserMediaSampleState extends State { width: MediaQuery.of(context).size.width, height: MediaQuery.of(context).size.height, decoration: BoxDecoration(color: Colors.black54), - child: GestureDetector( - onScaleStart: (details) {}, - onScaleUpdate: (details) { - if (details.scale != 1.0) { - setZoom(details.scale); - } - }, - child: RTCVideoView(_localRenderer, mirror: true), - ), + child: LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) { + return GestureDetector( + onScaleStart: (details) {}, + onScaleUpdate: (details) { + if (details.scale != 1.0) { + setZoom(details.scale); + } + }, + onTapDown: (TapDownDetails details) => + onViewFinderTap(details, constraints), + child: RTCVideoView(_localRenderer, mirror: false), + ); + }), )); }, ), diff --git a/example/lib/src/get_user_media_sample_web.dart b/example/lib/src/get_user_media_sample_web.dart index b8b5b5dfe6..cd6df083d6 100644 --- a/example/lib/src/get_user_media_sample_web.dart +++ b/example/lib/src/get_user_media_sample_web.dart @@ -4,7 +4,7 @@ import 'dart:html' as html; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; /* * getUserMedia sample diff --git a/example/lib/src/loopback_data_channel_sample.dart b/example/lib/src/loopback_data_channel_sample.dart index d65be13a59..a2412fc9c3 100644 --- a/example/lib/src/loopback_data_channel_sample.dart +++ b/example/lib/src/loopback_data_channel_sample.dart @@ -2,7 +2,7 @@ import 'dart:async'; import 'dart:core'; import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; class DataChannelLoopBackSample extends StatefulWidget { static String tag = 'data_channel_sample'; diff --git a/example/lib/src/loopback_sample_unified_tracks.dart b/example/lib/src/loopback_sample_unified_tracks.dart index 030c33c1da..ea60a93ada 100644 --- a/example/lib/src/loopback_sample_unified_tracks.dart +++ b/example/lib/src/loopback_sample_unified_tracks.dart @@ -3,7 +3,7 @@ import 'dart:core'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; class LoopBackSampleUnifiedTracks extends StatefulWidget { static String tag = 'loopback_sample_unified_tracks'; @@ -581,11 +581,7 @@ class _MyAppState extends State { if (fromConnection) { await _connectionRemoveTrack(track); } - try { - await _localStream!.removeTrack(track); - } catch (e) { - print(e.toString()); - } + await _localStream!.removeTrack(track); await track.stop(); } } @@ -597,11 +593,7 @@ class _MyAppState extends State { if (fromConnection) { await _connectionRemoveTrack(track); } - try { - await _localStream!.removeTrack(track); - } catch (e) { - print(e.toString()); - } + await _localStream!.removeTrack(track); await track.stop(); } } @@ -852,7 +844,7 @@ class _MyAppState extends State { ), Align( alignment: Alignment.bottomCenter, - child: ButtonBar( + child: OverflowBar( children: [ FloatingActionButton( heroTag: null, diff --git a/example/lib/src/loopback_sample_with_get_stats.dart b/example/lib/src/loopback_sample_with_get_stats.dart index 2e457a69d0..4c4b4afe3d 100644 --- a/example/lib/src/loopback_sample_with_get_stats.dart +++ b/example/lib/src/loopback_sample_with_get_stats.dart @@ -1,7 +1,7 @@ import 'dart:core'; import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; class LoopBackSampleWithGetStats extends StatefulWidget { static String tag = 'loopback_sample_with_get_stats'; diff --git a/example/lib/src/utils.dart b/example/lib/src/utils.dart index 5cb6027f92..e3b26383bb 100644 --- a/example/lib/src/utils.dart +++ b/example/lib/src/utils.dart @@ -1,4 +1,4 @@ -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; void setPreferredCodec(RTCSessionDescription description, diff --git a/example/lib/src/widgets/screen_select_dialog.dart b/example/lib/src/widgets/screen_select_dialog.dart index af929ba017..57ccc647dc 100644 --- a/example/lib/src/widgets/screen_select_dialog.dart +++ b/example/lib/src/widgets/screen_select_dialog.dart @@ -2,7 +2,7 @@ import 'dart:async'; import 'dart:typed_data'; import 'package:flutter/material.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; class ThumbnailWidget extends StatefulWidget { const ThumbnailWidget( @@ -279,7 +279,7 @@ class ScreenSelectDialog extends Dialog { ), Container( width: double.infinity, - child: ButtonBar( + child: OverflowBar( children: [ MaterialButton( child: Text( diff --git a/example/linux/flutter/generated_plugin_registrant.cc b/example/linux/flutter/generated_plugin_registrant.cc index 3f48831149..424066ad45 100644 --- a/example/linux/flutter/generated_plugin_registrant.cc +++ b/example/linux/flutter/generated_plugin_registrant.cc @@ -6,10 +6,10 @@ #include "generated_plugin_registrant.h" -#include +#include void fl_register_plugins(FlPluginRegistry* registry) { - g_autoptr(FlPluginRegistrar) flutter_webrtc_registrar = + g_autoptr(FlPluginRegistrar) videosdk_webrtc_registrar = fl_plugin_registry_get_registrar_for_plugin(registry, "FlutterWebRTCPlugin"); - flutter_web_r_t_c_plugin_register_with_registrar(flutter_webrtc_registrar); + flutter_web_r_t_c_plugin_register_with_registrar(videosdk_webrtc_registrar); } diff --git a/example/linux/flutter/generated_plugins.cmake b/example/linux/flutter/generated_plugins.cmake index 57172770e6..3b36ecf251 100644 --- a/example/linux/flutter/generated_plugins.cmake +++ b/example/linux/flutter/generated_plugins.cmake @@ -3,7 +3,7 @@ # list(APPEND FLUTTER_PLUGIN_LIST - flutter_webrtc + videosdk_webrtc ) list(APPEND FLUTTER_FFI_PLUGIN_LIST diff --git a/example/macos/Flutter/GeneratedPluginRegistrant.swift b/example/macos/Flutter/GeneratedPluginRegistrant.swift index 194710fc5a..058e3079ba 100644 --- a/example/macos/Flutter/GeneratedPluginRegistrant.swift +++ b/example/macos/Flutter/GeneratedPluginRegistrant.swift @@ -5,10 +5,10 @@ import FlutterMacOS import Foundation -import flutter_webrtc import path_provider_foundation +import videosdk_webrtc func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) { - FlutterWebRTCPlugin.register(with: registry.registrar(forPlugin: "FlutterWebRTCPlugin")) PathProviderPlugin.register(with: registry.registrar(forPlugin: "PathProviderPlugin")) + FlutterWebRTCPlugin.register(with: registry.registrar(forPlugin: "FlutterWebRTCPlugin")) } diff --git a/example/macos/Podfile b/example/macos/Podfile index c795730db8..b52666a103 100644 --- a/example/macos/Podfile +++ b/example/macos/Podfile @@ -1,4 +1,4 @@ -platform :osx, '10.14' +platform :osx, '10.15' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/example/macos/Podfile.lock b/example/macos/Podfile.lock new file mode 100644 index 0000000000..024709ac25 --- /dev/null +++ b/example/macos/Podfile.lock @@ -0,0 +1,36 @@ +PODS: + - FlutterMacOS (1.0.0) + - path_provider_foundation (0.0.1): + - Flutter + - FlutterMacOS + - videosdk_webrtc (0.0.6): + - FlutterMacOS + - WebRTC-SDK (= 125.6422.06) + - WebRTC-SDK (125.6422.06) + +DEPENDENCIES: + - FlutterMacOS (from `Flutter/ephemeral`) + - path_provider_foundation (from `Flutter/ephemeral/.symlinks/plugins/path_provider_foundation/darwin`) + - videosdk_webrtc (from `Flutter/ephemeral/.symlinks/plugins/videosdk_webrtc/macos`) + +SPEC REPOS: + trunk: + - WebRTC-SDK + +EXTERNAL SOURCES: + FlutterMacOS: + :path: Flutter/ephemeral + path_provider_foundation: + :path: Flutter/ephemeral/.symlinks/plugins/path_provider_foundation/darwin + videosdk_webrtc: + :path: Flutter/ephemeral/.symlinks/plugins/videosdk_webrtc/macos + +SPEC CHECKSUMS: + FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24 + path_provider_foundation: 080d55be775b7414fd5a5ef3ac137b97b097e564 + videosdk_webrtc: 9b6365afff44bd6cb560df0d48895bd50b77a901 + WebRTC-SDK: 79942c006ea64f6fb48d7da8a4786dfc820bc1db + +PODFILE CHECKSUM: 9ebaf0ce3d369aaa26a9ea0e159195ed94724cf3 + +COCOAPODS: 1.16.2 diff --git a/example/macos/Runner.xcodeproj/project.pbxproj b/example/macos/Runner.xcodeproj/project.pbxproj index 06a654dde9..9a3ab600b5 100644 --- a/example/macos/Runner.xcodeproj/project.pbxproj +++ b/example/macos/Runner.xcodeproj/project.pbxproj @@ -21,12 +21,14 @@ /* End PBXAggregateTarget section */ /* Begin PBXBuildFile section */ + 0892DC0C7CE27563863840D0 /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 16546265D594088E3AB49590 /* Pods_Runner.framework */; }; 331C80D8294CF71000263BE5 /* RunnerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 331C80D7294CF71000263BE5 /* RunnerTests.swift */; }; 335BBD1B22A9A15E00E9071D /* GeneratedPluginRegistrant.swift in Sources */ = {isa = PBXBuildFile; fileRef = 335BBD1A22A9A15E00E9071D /* GeneratedPluginRegistrant.swift */; }; 33CC10F12044A3C60003C045 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 33CC10F02044A3C60003C045 /* AppDelegate.swift */; }; 33CC10F32044A3C60003C045 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 33CC10F22044A3C60003C045 /* Assets.xcassets */; }; 33CC10F62044A3C60003C045 /* MainMenu.xib in Resources */ = {isa = PBXBuildFile; fileRef = 33CC10F42044A3C60003C045 /* MainMenu.xib */; }; 33CC11132044BFA00003C045 /* MainFlutterWindow.swift in Sources */ = {isa = PBXBuildFile; fileRef = 33CC11122044BFA00003C045 /* MainFlutterWindow.swift */; }; + 9EB1ADF65BDBF1622935A3EC /* Pods_RunnerTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 9B6358C3F495A7CD20565795 /* Pods_RunnerTests.framework */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -60,11 +62,12 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ + 16546265D594088E3AB49590 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 331C80D5294CF71000263BE5 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 331C80D7294CF71000263BE5 /* RunnerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RunnerTests.swift; sourceTree = ""; }; 333000ED22D3DE5D00554162 /* Warnings.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Warnings.xcconfig; sourceTree = ""; }; 335BBD1A22A9A15E00E9071D /* GeneratedPluginRegistrant.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GeneratedPluginRegistrant.swift; sourceTree = ""; }; - 33CC10ED2044A3C60003C045 /* flutter_webrtc_example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "flutter_webrtc_example.app"; sourceTree = BUILT_PRODUCTS_DIR; }; + 33CC10ED2044A3C60003C045 /* flutter_webrtc_example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = flutter_webrtc_example.app; sourceTree = BUILT_PRODUCTS_DIR; }; 33CC10F02044A3C60003C045 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 33CC10F22044A3C60003C045 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Assets.xcassets; path = Runner/Assets.xcassets; sourceTree = ""; }; 33CC10F52044A3C60003C045 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/MainMenu.xib; sourceTree = ""; }; @@ -76,8 +79,15 @@ 33E51913231747F40026EE4D /* DebugProfile.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = DebugProfile.entitlements; sourceTree = ""; }; 33E51914231749380026EE4D /* Release.entitlements */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.entitlements; path = Release.entitlements; sourceTree = ""; }; 33E5194F232828860026EE4D /* AppInfo.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = AppInfo.xcconfig; sourceTree = ""; }; + 37911EF4B6FE07924BEE298B /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; + 6B921EA29CF093418346E678 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Release.xcconfig; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = Debug.xcconfig; sourceTree = ""; }; + 9B6358C3F495A7CD20565795 /* Pods_RunnerTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_RunnerTests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + A4AE20BACE4633AC49E4D523 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; + D56A4E44E645EDFE4F0E2222 /* Pods-RunnerTests.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.profile.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.profile.xcconfig"; sourceTree = ""; }; + DBF31B82F332AE474821DD91 /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = ""; }; + F3B85F37AAA20521C1324694 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -85,6 +95,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 9EB1ADF65BDBF1622935A3EC /* Pods_RunnerTests.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -92,6 +103,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 0892DC0C7CE27563863840D0 /* Pods_Runner.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -124,7 +136,8 @@ 33CEB47122A05771004F2AC0 /* Flutter */, 331C80D6294CF71000263BE5 /* RunnerTests */, 33CC10EE2044A3C60003C045 /* Products */, - D73912EC22F37F3D000D13A0 /* Frameworks */, + 6B8A1C6AB35D6D23ED4DED52 /* Pods */, + 9A93334EA792AC6659AC4465 /* Frameworks */, ); sourceTree = ""; }; @@ -172,9 +185,25 @@ path = Runner; sourceTree = ""; }; - D73912EC22F37F3D000D13A0 /* Frameworks */ = { + 6B8A1C6AB35D6D23ED4DED52 /* Pods */ = { isa = PBXGroup; children = ( + F3B85F37AAA20521C1324694 /* Pods-Runner.debug.xcconfig */, + A4AE20BACE4633AC49E4D523 /* Pods-Runner.release.xcconfig */, + DBF31B82F332AE474821DD91 /* Pods-Runner.profile.xcconfig */, + 37911EF4B6FE07924BEE298B /* Pods-RunnerTests.debug.xcconfig */, + 6B921EA29CF093418346E678 /* Pods-RunnerTests.release.xcconfig */, + D56A4E44E645EDFE4F0E2222 /* Pods-RunnerTests.profile.xcconfig */, + ); + name = Pods; + path = Pods; + sourceTree = ""; + }; + 9A93334EA792AC6659AC4465 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 16546265D594088E3AB49590 /* Pods_Runner.framework */, + 9B6358C3F495A7CD20565795 /* Pods_RunnerTests.framework */, ); name = Frameworks; sourceTree = ""; @@ -186,6 +215,7 @@ isa = PBXNativeTarget; buildConfigurationList = 331C80DE294CF71000263BE5 /* Build configuration list for PBXNativeTarget "RunnerTests" */; buildPhases = ( + 7515CC69BE1116A0D8848E7A /* [CP] Check Pods Manifest.lock */, 331C80D1294CF70F00263BE5 /* Sources */, 331C80D2294CF70F00263BE5 /* Frameworks */, 331C80D3294CF70F00263BE5 /* Resources */, @@ -204,11 +234,13 @@ isa = PBXNativeTarget; buildConfigurationList = 33CC10FB2044A3C60003C045 /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( + 621B883B9EDEC491D99FF92D /* [CP] Check Pods Manifest.lock */, 33CC10E92044A3C60003C045 /* Sources */, 33CC10EA2044A3C60003C045 /* Frameworks */, 33CC10EB2044A3C60003C045 /* Resources */, 33CC110E2044A8840003C045 /* Bundle Framework */, 3399D490228B24CF009A79C7 /* ShellScript */, + EABB73181C9043AE5EB174C8 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -227,7 +259,7 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0920; - LastUpgradeCheck = 1300; + LastUpgradeCheck = 1510; ORGANIZATIONNAME = ""; TargetAttributes = { 331C80D4294CF70F00263BE5 = { @@ -328,6 +360,67 @@ shellPath = /bin/sh; shellScript = "\"$FLUTTER_ROOT\"/packages/flutter_tools/bin/macos_assemble.sh && touch Flutter/ephemeral/tripwire"; }; + 621B883B9EDEC491D99FF92D /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; + 7515CC69BE1116A0D8848E7A /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; + EABB73181C9043AE5EB174C8 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ @@ -379,12 +472,13 @@ /* Begin XCBuildConfiguration section */ 331C80DB294CF71000263BE5 /* Debug */ = { isa = XCBuildConfiguration; + baseConfigurationReference = 37911EF4B6FE07924BEE298B /* Pods-RunnerTests.debug.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_VERSION = 5.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; @@ -393,12 +487,13 @@ }; 331C80DC294CF71000263BE5 /* Release */ = { isa = XCBuildConfiguration; + baseConfigurationReference = 6B921EA29CF093418346E678 /* Pods-RunnerTests.release.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_VERSION = 5.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; @@ -407,12 +502,13 @@ }; 331C80DD294CF71000263BE5 /* Profile */ = { isa = XCBuildConfiguration; + baseConfigurationReference = D56A4E44E645EDFE4F0E2222 /* Pods-RunnerTests.profile.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CURRENT_PROJECT_VERSION = 1; GENERATE_INFOPLIST_FILE = YES; MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests; + PRODUCT_BUNDLE_IDENTIFIER = "com.cloudwebrtc.flutter-flutter-example.flutterWebrtcExample.RunnerTests"; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_VERSION = 5.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/flutter_webrtc_example.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/flutter_webrtc_example"; @@ -479,6 +575,7 @@ "$(inherited)", "@executable_path/../Frameworks", ); + MACOSX_DEPLOYMENT_TARGET = 15.0; PROVISIONING_PROFILE_SPECIFIER = ""; SWIFT_VERSION = 5.0; }; @@ -605,6 +702,7 @@ "$(inherited)", "@executable_path/../Frameworks", ); + MACOSX_DEPLOYMENT_TARGET = 15.0; PROVISIONING_PROFILE_SPECIFIER = ""; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; @@ -625,6 +723,7 @@ "$(inherited)", "@executable_path/../Frameworks", ); + MACOSX_DEPLOYMENT_TARGET = 15.0; PROVISIONING_PROFILE_SPECIFIER = ""; SWIFT_VERSION = 5.0; }; diff --git a/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index a4df20256a..36272f4ba6 100644 --- a/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -1,6 +1,6 @@ + + diff --git a/example/macos/Runner/AppDelegate.swift b/example/macos/Runner/AppDelegate.swift index d53ef64377..b3c1761412 100644 --- a/example/macos/Runner/AppDelegate.swift +++ b/example/macos/Runner/AppDelegate.swift @@ -1,9 +1,13 @@ import Cocoa import FlutterMacOS -@NSApplicationMain +@main class AppDelegate: FlutterAppDelegate { override func applicationShouldTerminateAfterLastWindowClosed(_ sender: NSApplication) -> Bool { return true } + + override func applicationSupportsSecureRestorableState(_ app: NSApplication) -> Bool { + return true + } } diff --git a/example/macos/Runner/Info.plist b/example/macos/Runner/Info.plist index f2c091fe03..33b8087148 100644 --- a/example/macos/Runner/Info.plist +++ b/example/macos/Runner/Info.plist @@ -20,8 +20,6 @@ $(PRODUCT_NAME) Camera Usage! NSMicrophoneUsageDescription $(PRODUCT_NAME) Microphone Usage! - NSCameraUseContinuityCameraDeviceType - $(PRODUCT_NAME) Continuity Camera Usage! CFBundleShortVersionString $(FLUTTER_BUILD_NAME) CFBundleVersion diff --git a/example/pubspec.yaml b/example/pubspec.yaml index b774024978..db0f748b85 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -3,20 +3,20 @@ description: Demonstrates how to use the webrtc plugin. version: 1.0.0 publish_to: none environment: - sdk: '>=2.12.0 <3.0.0' + sdk: '>=3.3.0 <4.0.0' dependencies: # The following adds the Cupertino Icons font to your application. # Use with the CupertinoIcons class for iOS style icons. - cupertino_icons: ^1.0.2 + cupertino_icons: 1.0.8 flutter: sdk: flutter flutter_background: ^1.0.0 - flutter_webrtc: + videosdk_webrtc: path: ../ # Required for MediaRecorder example - path_provider: ^2.0.2 - permission_handler: ^10.2.0 + path_provider: ^2.1.4 + permission_handler: ^11.3.1 sdp_transform: ^0.3.2 diff --git a/example/windows/flutter/CMakeLists.txt b/example/windows/flutter/CMakeLists.txt index 903f4899d6..930d2071a3 100644 --- a/example/windows/flutter/CMakeLists.txt +++ b/example/windows/flutter/CMakeLists.txt @@ -10,11 +10,6 @@ include(${EPHEMERAL_DIR}/generated_config.cmake) # https://github.com/flutter/flutter/issues/57146. set(WRAPPER_ROOT "${EPHEMERAL_DIR}/cpp_client_wrapper") -# Set fallback configurations for older versions of the flutter tool. -if (NOT DEFINED FLUTTER_TARGET_PLATFORM) - set(FLUTTER_TARGET_PLATFORM "windows-x64") -endif() - # === Flutter Library === set(FLUTTER_LIBRARY "${EPHEMERAL_DIR}/flutter_windows.dll") @@ -97,7 +92,7 @@ add_custom_command( COMMAND ${CMAKE_COMMAND} -E env ${FLUTTER_TOOL_ENVIRONMENT} "${FLUTTER_ROOT}/packages/flutter_tools/bin/tool_backend.bat" - ${FLUTTER_TARGET_PLATFORM} $ + windows-x64 $ VERBATIM ) add_custom_target(flutter_assemble DEPENDS diff --git a/example/windows/flutter/generated_plugin_registrant.cc b/example/windows/flutter/generated_plugin_registrant.cc index d5acadb305..f5fd090041 100644 --- a/example/windows/flutter/generated_plugin_registrant.cc +++ b/example/windows/flutter/generated_plugin_registrant.cc @@ -6,12 +6,12 @@ #include "generated_plugin_registrant.h" -#include #include +#include void RegisterPlugins(flutter::PluginRegistry* registry) { - FlutterWebRTCPluginRegisterWithRegistrar( - registry->GetRegistrarForPlugin("FlutterWebRTCPlugin")); PermissionHandlerWindowsPluginRegisterWithRegistrar( registry->GetRegistrarForPlugin("PermissionHandlerWindowsPlugin")); + FlutterWebRTCPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("FlutterWebRTCPlugin")); } diff --git a/example/windows/flutter/generated_plugins.cmake b/example/windows/flutter/generated_plugins.cmake index cb004cdc57..70345344f0 100644 --- a/example/windows/flutter/generated_plugins.cmake +++ b/example/windows/flutter/generated_plugins.cmake @@ -3,8 +3,8 @@ # list(APPEND FLUTTER_PLUGIN_LIST - flutter_webrtc permission_handler_windows + videosdk_webrtc ) list(APPEND FLUTTER_FFI_PLUGIN_LIST diff --git a/example/windows/runner/flutter_window.cpp b/example/windows/runner/flutter_window.cpp index 955ee3038f..b25e363efa 100644 --- a/example/windows/runner/flutter_window.cpp +++ b/example/windows/runner/flutter_window.cpp @@ -31,11 +31,6 @@ bool FlutterWindow::OnCreate() { this->Show(); }); - // Flutter can complete the first frame before the "show window" callback is - // registered. The following call ensures a frame is pending to ensure the - // window is shown. It is a no-op if the first frame hasn't completed yet. - flutter_controller_->ForceRedraw(); - return true; } diff --git a/ios/Classes/AudioManager.h b/ios/Classes/AudioManager.h new file mode 100644 index 0000000000..211f3f1e4c --- /dev/null +++ b/ios/Classes/AudioManager.h @@ -0,0 +1,20 @@ +#import +#import +#import "AudioProcessingAdapter.h" + +@interface AudioManager : NSObject + +@property(nonatomic, strong) RTCDefaultAudioProcessingModule* _Nonnull audioProcessingModule; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull capturePostProcessingAdapter; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull renderPreProcessingAdapter; + ++ (_Nonnull instancetype)sharedInstance; + +- (void)addLocalAudioRenderer:(nonnull id)renderer; + +- (void)removeLocalAudioRenderer:(nonnull id)renderer; + +@end + diff --git a/ios/Classes/AudioManager.m b/ios/Classes/AudioManager.m new file mode 100644 index 0000000000..efc3a8741e --- /dev/null +++ b/ios/Classes/AudioManager.m @@ -0,0 +1,50 @@ +#import "AudioManager.h" +#import "AudioProcessingAdapter.h" + +@implementation AudioManager { + RTCDefaultAudioProcessingModule* _audioProcessingModule; + AudioProcessingAdapter* _capturePostProcessingAdapter; + AudioProcessingAdapter* _renderPreProcessingAdapter; +} + +@synthesize capturePostProcessingAdapter = _capturePostProcessingAdapter; +@synthesize renderPreProcessingAdapter = _renderPreProcessingAdapter; +@synthesize audioProcessingModule = _audioProcessingModule; + ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + static AudioManager* sharedInstance = nil; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + if (self = [super init]) { + _audioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + _capturePostProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _renderPreProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _audioProcessingModule.capturePostProcessingDelegate = _capturePostProcessingAdapter; + _audioProcessingModule.renderPreProcessingDelegate = _renderPreProcessingAdapter; + } + return self; +} + +- (void)addLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter addAudioRenderer:renderer]; +} + +- (void)removeLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter removeAudioRenderer:renderer]; +} + +- (void)addRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter addAudioRenderer:sink]; +} + +- (void)removeRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter removeAudioRenderer:sink]; +} + +@end \ No newline at end of file diff --git a/ios/Classes/AudioProcessingAdapter.h b/ios/Classes/AudioProcessingAdapter.h new file mode 100644 index 0000000000..91498b45d8 --- /dev/null +++ b/ios/Classes/AudioProcessingAdapter.h @@ -0,0 +1,26 @@ +#import +#import + +@protocol ExternalAudioProcessingDelegate + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels; + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) * _Nonnull)audioBuffer; + +- (void)audioProcessingRelease; + +@end + +@interface AudioProcessingAdapter : NSObject + +- (nonnull instancetype)init; + +- (void)addProcessing:(id _Nonnull)processor; + +- (void)removeProcessing:(id _Nonnull)processor; + +- (void)addAudioRenderer:(nonnull id)renderer; + +- (void)removeAudioRenderer:(nonnull id)renderer; + +@end \ No newline at end of file diff --git a/ios/Classes/AudioProcessingAdapter.m b/ios/Classes/AudioProcessingAdapter.m new file mode 100644 index 0000000000..73fa3dda1f --- /dev/null +++ b/ios/Classes/AudioProcessingAdapter.m @@ -0,0 +1,105 @@ +#import "AudioProcessingAdapter.h" +#import +#import + +@implementation AudioProcessingAdapter { + NSMutableArray>* _renderers; + NSMutableArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)init { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _renderers = [[NSMutableArray> alloc] init]; + _processors = [[NSMutableArray> alloc] init]; + } + return self; +} + +- (void)addProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + [_processors addObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + _processors = [[_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)addAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + _renderers = [[_renderers + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != renderer; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingInitializeWithSampleRate:sampleRateHz channels:channels]; + } + os_unfair_lock_unlock(&_lock); +} + +- (AVAudioPCMBuffer*)toPCMBuffer:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + AVAudioFormat* format = + [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16 + sampleRate:audioBuffer.frames * 100.0 + channels:(AVAudioChannelCount)audioBuffer.channels + interleaved:NO]; + AVAudioPCMBuffer* pcmBuffer = + [[AVAudioPCMBuffer alloc] initWithPCMFormat:format + frameCapacity:(AVAudioFrameCount)audioBuffer.frames]; + if (!pcmBuffer) { + NSLog(@"Failed to create AVAudioPCMBuffer"); + return nil; + } + pcmBuffer.frameLength = (AVAudioFrameCount)audioBuffer.frames; + for (int i = 0; i < audioBuffer.channels; i++) { + float* sourceBuffer = [audioBuffer rawBufferForChannel:i]; + int16_t* targetBuffer = (int16_t*)pcmBuffer.int16ChannelData[i]; + for (int frame = 0; frame < audioBuffer.frames; frame++) { + targetBuffer[frame] = sourceBuffer[frame]; + } + } + return pcmBuffer; +} + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingProcess:audioBuffer]; + } + + for (id renderer in _renderers) { + [renderer renderPCMBuffer:[self toPCMBuffer:audioBuffer]]; + } + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingRelease { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingRelease]; + } + os_unfair_lock_unlock(&_lock); +} + +@end \ No newline at end of file diff --git a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m index 24d882803d..7485a3492a 100644 --- a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m +++ b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m @@ -224,7 +224,7 @@ - (void)didCaptureVideoFrame:(CVPixelBufferRef)pixelBuffer break; } - RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer + RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:[rtcPixelBuffer toI420] rotation:rotation timeStampNs:frameTimeStampNs]; diff --git a/ios/Classes/CameraUtils.h b/ios/Classes/CameraUtils.h new file mode 100644 index 0000000000..efe9ae87aa --- /dev/null +++ b/ios/Classes/CameraUtils.h @@ -0,0 +1,43 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (CameraUtils) + +- (void)mediaStreamTrackHasTorch:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetTorch:(nonnull RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetZoom:(nonnull RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSwitchCamera:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (NSInteger)selectFpsForFormat:(nonnull AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps; + +- (nullable AVCaptureDeviceFormat*)selectFormatForDevice:(nonnull AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight; + +- (nullable AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position; + + +@end \ No newline at end of file diff --git a/ios/Classes/CameraUtils.m b/ios/Classes/CameraUtils.m new file mode 100644 index 0000000000..fea72a6b8e --- /dev/null +++ b/ios/Classes/CameraUtils.m @@ -0,0 +1,350 @@ +#import "CameraUtils.h" + +@implementation FlutterWebRTCPlugin (CameraUtils) + +-(AVCaptureDevice*) currentDevice { + if (!self.videoCapturer) { + return nil; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + return nil; + } + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + return deviceInput.device; +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't check torch"); + result(@NO); + return; + } + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +#else + NSLog(@"Not supported on macOS. Can't check torch"); + result(@NO); +#endif +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device is nil" details:nil]); + return; + } + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device does not support torch" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:error.localizedDescription details:nil]); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"device is nil" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:error.localizedDescription details:nil]); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#else + NSLog(@"Not supported on macOS. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)applyFocusMode:(NSString*)focusMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } else if([@"auto" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"device is nil" details:nil]); + return; + } + self.focusMode = focusMode; + [self applyFocusMode:focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"device is nil" details:nil]); + return; + } + BOOL reset = ((NSNumber *)focusPoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)focusPoint[@"x"]).doubleValue; + y = ((NSNumber *)focusPoint[@"y"]).doubleValue; + } + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + + [device setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyFocusMode:self.focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void) applyExposureMode:(NSString*)exposureMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } else if([@"auto" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { + [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; + } else if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result{ +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"device is nil" details:nil]); + return; + } + self.exposureMode = exposureMode; + [self applyExposureMode:exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +#if TARGET_OS_IPHONE +- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation + x:(double)x + y:(double)y { + double oldX = x, oldY = y; + switch (orientation) { + case UIDeviceOrientationPortrait: // 90 ccw + y = 1 - oldX; + x = oldY; + break; + case UIDeviceOrientationPortraitUpsideDown: // 90 cw + x = 1 - oldY; + y = oldX; + break; + case UIDeviceOrientationLandscapeRight: // 180 + x = 1 - x; + y = 1 - y; + break; + case UIDeviceOrientationLandscapeLeft: + default: + // No rotation required + break; + } + return CGPointMake(x, y); +} +#endif + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"device is nil" details:nil]); + return; + } + + BOOL reset = ((NSNumber *)exposurePoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)exposurePoint[@"x"]).doubleValue; + y = ((NSNumber *)exposurePoint[@"y"]).doubleValue; + } + if (!device.isExposurePointOfInterestSupported) { + NSLog(@"Exposure point of interest is not supported. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Exposure point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + [device setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyExposureMode:self.exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } +#if TARGET_OS_IPHONE + [self.videoCapturer stopCapture]; +#endif + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = + self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice* videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:self._lastTargetWidth + targetHeight:self._lastTargetHeight]; + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:[self selectFpsForFormat:selectedFormat + targetFps:self._lastTargetFps] + completionHandler:^(NSError* error) { + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" + message:@"Error while switching camera" + details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + + +@end \ No newline at end of file diff --git a/ios/Classes/CustomCapturerDelegate.h b/ios/Classes/CustomCapturerDelegate.h new file mode 100644 index 0000000000..cc2adedaba --- /dev/null +++ b/ios/Classes/CustomCapturerDelegate.h @@ -0,0 +1,10 @@ +#import +#import + +@interface CustomCapturerDelegate : NSObject + +@property (nonatomic, strong) RTCVideoSource *videoSource; + +- (instancetype)initWithVideoSource:(RTCVideoSource *)videoSource; + +@end \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoPlatformView.h b/ios/Classes/FlutterRTCVideoPlatformView.h index 2930062136..fa34728243 100644 --- a/ios/Classes/FlutterRTCVideoPlatformView.h +++ b/ios/Classes/FlutterRTCVideoPlatformView.h @@ -6,12 +6,12 @@ #import -@interface FlutterRTCVideoPlatformView : UIView +@interface FlutterRTCVideoPlatformView : UIView -@property(nonatomic, readonly) __kindof UIView *videoRenderer; +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame; - (instancetype)initWithFrame:(CGRect)frame; --(void)setObjectFit:(NSNumber *)index; +- (void)setSize:(CGSize)size; -@end +@end \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoPlatformView.m b/ios/Classes/FlutterRTCVideoPlatformView.m index 6f207420c0..3279cfec71 100644 --- a/ios/Classes/FlutterRTCVideoPlatformView.m +++ b/ios/Classes/FlutterRTCVideoPlatformView.m @@ -1,45 +1,131 @@ #import "FlutterRTCVideoPlatformView.h" @implementation FlutterRTCVideoPlatformView { - CGSize _videoSize; - RTCMTLVideoView *_videoView; + CGSize _videoSize; + AVSampleBufferDisplayLayer* _videoLayer; + CGSize _remoteVideoSize; + CATransform3D _bufferTransform; + RTCVideoRotation _lastVideoRotation; } -@synthesize videoRenderer = _videoRenderer; - - (instancetype)initWithFrame:(CGRect)frame { - if (self = [super initWithFrame:frame]) { - _videoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero]; - _videoView.videoContentMode = UIViewContentModeScaleAspectFit; - _videoView.delegate = self; - _videoRenderer = _videoView; - self.opaque = NO; - [self addSubview:_videoRenderer]; - } - return self; + if (self = [super initWithFrame:frame]) { + _videoLayer = [[AVSampleBufferDisplayLayer alloc] init]; + _videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; + _videoLayer.frame = CGRectZero; + _bufferTransform = CATransform3DIdentity; + _lastVideoRotation = RTCVideoRotation_0; + [self.layer addSublayer:_videoLayer]; + self.opaque = NO; + } + return self; } - (void)layoutSubviews { - CGRect bounds = self.bounds; - _videoRenderer.frame = bounds; + _videoLayer.frame = self.bounds; + [_videoLayer removeAllAnimations]; } --(void)setObjectFit:(NSNumber *)index { - if ([index intValue] == 0) { - _videoView.videoContentMode = UIViewContentModeScaleAspectFit; - } else if([index intValue] == 1) { - // for Cover mode - _videoView.contentMode = UIViewContentModeScaleAspectFit; - _videoView.videoContentMode = UIViewContentModeScaleAspectFill; +- (void)setSize:(CGSize)size { + _remoteVideoSize = size; +} + +- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + + CVPixelBufferRef pixelBuffer = nil; + if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBuffer = ((RTCCVPixelBuffer*)frame.buffer).pixelBuffer; + CFRetain(pixelBuffer); + } else if ([frame.buffer isKindOfClass:[RTCI420Buffer class]]) { + pixelBuffer = [self toCVPixelBuffer:frame]; + } + + if (_lastVideoRotation != frame.rotation) { + _bufferTransform = [self fromFrameRotation:frame.rotation]; + _videoLayer.transform = _bufferTransform; + [_videoLayer layoutIfNeeded]; + _lastVideoRotation = frame.rotation; + } + + CMSampleBufferRef sampleBuffer = [self sampleBufferFromPixelBuffer:pixelBuffer]; + if (sampleBuffer) { + if([_videoLayer requiresFlushToResumeDecoding]) { + [_videoLayer flushAndRemoveImage]; } + [_videoLayer enqueueSampleBuffer:sampleBuffer]; + CFRelease(sampleBuffer); + } + + CFRelease(pixelBuffer); +} + +- (CVPixelBufferRef)toCVPixelBuffer:(RTCVideoFrame*)frame { + CVPixelBufferRef outputPixelBuffer; + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, frame.width, frame.height, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + (__bridge CFDictionaryRef)(pixelAttributes), &outputPixelBuffer); + id i420Buffer = (RTCI420Buffer*)frame.buffer; + + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); + return outputPixelBuffer; +} + +- (CMSampleBufferRef)sampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer { + CMSampleBufferRef sampleBuffer = NULL; + OSStatus err = noErr; + CMVideoFormatDescriptionRef formatDesc = NULL; + err = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &formatDesc); + if (err != noErr) { + return nil; + } + CMSampleTimingInfo sampleTimingInfo = kCMTimingInfoInvalid; + err = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDesc, + &sampleTimingInfo, &sampleBuffer); + if (sampleBuffer) { + CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); + CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); + CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); + } + if (err != noErr) { + return nil; + } + formatDesc = nil; + return sampleBuffer; } -#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate) -- (void)videoView:(id)videoView didChangeVideoSize:(CGSize)size { - if (videoView == _videoRenderer) { - _videoSize = size; +- (CATransform3D)fromFrameRotation:(RTCVideoRotation)rotation { + switch (rotation) { + case RTCVideoRotation_0: + return CATransform3DIdentity; + case RTCVideoRotation_90: + return CATransform3DMakeRotation(M_PI / 2.0, 0, 0, 1); + case RTCVideoRotation_180: + return CATransform3DMakeRotation(M_PI, 0, 0, 1); + case RTCVideoRotation_270: + return CATransform3DMakeRotation(-M_PI / 0, 0, 0, 1); } - [self setNeedsLayout]; + return CATransform3DIdentity; } -@end +@end \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoPlatformViewController.h b/ios/Classes/FlutterRTCVideoPlatformViewController.h index fb12578077..0f2a16c814 100644 --- a/ios/Classes/FlutterRTCVideoPlatformViewController.h +++ b/ios/Classes/FlutterRTCVideoPlatformViewController.h @@ -6,20 +6,18 @@ #import -@interface FlutterRTCVideoPlatformViewController : NSObject +@interface FlutterRTCVideoPlatformViewController + : NSObject @property(nonatomic, strong) NSObject* _Nonnull messenger; @property(nonatomic, strong) FlutterEventSink _Nonnull eventSink; @property(nonatomic) int64_t viewId; @property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack; -- (instancetype _Nullable )initWithMessenger:(NSObject* _Nonnull)messenger - viewIdentifier:(int64_t)viewId - frame:(CGRect)frame - objectFit:(NSNumber * _Nonnull)fit; +- (instancetype _Nullable)initWithMessenger:(NSObject* _Nonnull)messenger + viewIdentifier:(int64_t)viewId + frame:(CGRect)frame; --(void)setObjectFit:(NSNumber * _Nonnull)index; +- (UIView* _Nonnull)view; --(UIView* _Nonnull)view; - -@end +@end \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoPlatformViewController.m b/ios/Classes/FlutterRTCVideoPlatformViewController.m index 54c4eeaff8..519e6fc747 100644 --- a/ios/Classes/FlutterRTCVideoPlatformViewController.m +++ b/ios/Classes/FlutterRTCVideoPlatformViewController.m @@ -3,12 +3,11 @@ #import "FlutterWebRTCPlugin.h" @implementation FlutterRTCVideoPlatformViewController { - FlutterRTCVideoPlatformView* _videoView; - FlutterEventChannel* _eventChannel; - bool _isFirstFrameRendered; - CGSize _frameSize; - CGSize _renderSize; - RTCVideoRotation _rotation; + FlutterRTCVideoPlatformView* _videoView; + FlutterEventChannel* _eventChannel; + bool _isFirstFrameRendered; + CGSize _renderSize; + RTCVideoRotation _rotation; } @synthesize messenger = _messenger; @@ -17,30 +16,27 @@ @implementation FlutterRTCVideoPlatformViewController { - (instancetype)initWithMessenger:(NSObject*)messenger viewIdentifier:(int64_t)viewId - frame:(CGRect)frame - objectFit:(NSNumber * _Nonnull)fit { - self = [super init]; - if (self) { - _isFirstFrameRendered = false; - _frameSize = CGSizeZero; - _renderSize = CGSizeZero; - _rotation = -1; - _messenger = messenger; - _videoView = [[FlutterRTCVideoPlatformView alloc] initWithFrame:frame]; - [_videoView setObjectFit:fit]; - _viewId = viewId; - /*Create Event Channel.*/ - _eventChannel = [FlutterEventChannel - eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/PlatformViewId%lld", viewId] - binaryMessenger:messenger]; - [_eventChannel setStreamHandler:self]; - } - - return self; + frame:(CGRect)frame { + self = [super init]; + if (self) { + _isFirstFrameRendered = false; + _renderSize = CGSizeZero; + _rotation = -1; + _messenger = messenger; + _videoView = [[FlutterRTCVideoPlatformView alloc] initWithFrame:frame]; + _viewId = viewId; + /*Create Event Channel.*/ + _eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/PlatformViewId%lld", viewId] + binaryMessenger:messenger]; + [_eventChannel setStreamHandler:self]; + } + + return self; } - (UIView*)view { - return _videoView; + return _videoView; } - (void)setVideoTrack:(RTCVideoTrack*)videoTrack { @@ -50,48 +46,49 @@ - (void)setVideoTrack:(RTCVideoTrack*)videoTrack { } _videoTrack = videoTrack; _isFirstFrameRendered = false; - if(!oldValue) { + if (!oldValue) { [oldValue removeRenderer:(id)self]; + _videoView.frame = CGRectZero; } - if(videoTrack) { + if (videoTrack) { [videoTrack addRenderer:(id)self]; } } #pragma mark - RTCVideoRenderer methods - (void)renderFrame:(RTCVideoFrame*)frame { - - if (_renderSize.width != frame.width || _renderSize.height != frame.height || !_isFirstFrameRendered) { - if (self.eventSink) { - postEvent( self.eventSink, @{ - @"event" : @"didPlatformViewChangeVideoSize", - @"id" : @(self.viewId), - @"width" : @(frame.width), - @"height" : @(frame.height), - }); - } + if (_renderSize.width != frame.width || _renderSize.height != frame.height || + !_isFirstFrameRendered) { + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"didPlatformViewChangeVideoSize", + @"id" : @(self.viewId), + @"width" : @(frame.width), + @"height" : @(frame.height), + }); + } _renderSize = CGSizeMake(frame.width, frame.height); } if (frame.rotation != _rotation || !_isFirstFrameRendered) { - if (self.eventSink) { - postEvent( self.eventSink,@{ - @"event" : @"didPlatformViewChangeRotation", - @"id" : @(self.viewId), - @"rotation" : @(frame.rotation), - }); - } + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"didPlatformViewChangeRotation", + @"id" : @(self.viewId), + @"rotation" : @(frame.rotation), + }); + } _rotation = frame.rotation; } - - [_videoView.videoRenderer renderFrame:frame]; if (!_isFirstFrameRendered) { if (self.eventSink) { postEvent(self.eventSink, @{@"event" : @"didFirstFrameRendered"}); } self->_isFirstFrameRendered = true; } + + [_videoView renderFrame:frame]; } /** @@ -100,10 +97,7 @@ - (void)renderFrame:(RTCVideoFrame*)frame { * @param size The size of the video frame to render. */ - (void)setSize:(CGSize)size { - if (size.width != _frameSize.width || size.height != _frameSize.height) { - _frameSize = size; - } - [_videoView.videoRenderer setSize:size]; + [_videoView setSize:size]; } #pragma mark - FlutterStreamHandler methods @@ -119,4 +113,4 @@ - (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments return nil; } -@end +@end \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoPlatformViewFactory.h b/ios/Classes/FlutterRTCVideoPlatformViewFactory.h index 936cbf4fd6..5206194aa7 100644 --- a/ios/Classes/FlutterRTCVideoPlatformViewFactory.h +++ b/ios/Classes/FlutterRTCVideoPlatformViewFactory.h @@ -4,16 +4,16 @@ #import #endif - #define FLutterRTCVideoPlatformViewFactoryID @"rtc_video_platform_view" @class FlutterRTCVideoPlatformViewController; -@interface FLutterRTCVideoPlatformViewFactory : NSObject +@interface FLutterRTCVideoPlatformViewFactory : NSObject @property(nonatomic, strong) NSObject* _Nonnull messenger; -@property(nonatomic, strong) NSMutableDictionary* _Nullable renders; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable renders; - (_Nonnull instancetype)initWithMessenger:(NSObject* _Nonnull)messenger; -@end +@end \ No newline at end of file diff --git a/ios/Classes/FlutterRTCVideoPlatformViewFactory.m b/ios/Classes/FlutterRTCVideoPlatformViewFactory.m index 5af6fb653c..c0e1647232 100644 --- a/ios/Classes/FlutterRTCVideoPlatformViewFactory.m +++ b/ios/Classes/FlutterRTCVideoPlatformViewFactory.m @@ -2,20 +2,18 @@ #import "FlutterRTCVideoPlatformViewController.h" @implementation FLutterRTCVideoPlatformViewFactory { - } @synthesize messenger = _messenger; - (instancetype)initWithMessenger:(NSObject*)messenger { + self = [super init]; + if (self) { + _messenger = messenger; + self.renders = [NSMutableDictionary new]; + } - self = [super init]; - if (self) { - _messenger = messenger; - self.renders = [NSMutableDictionary new]; - } - - return self; + return self; } - (NSObject*)createArgsCodec { @@ -25,10 +23,12 @@ - (instancetype)initWithMessenger:(NSObject*)messenger { - (NSObject*)createWithFrame:(CGRect)frame viewIdentifier:(int64_t)viewId arguments:(id _Nullable)args { - NSNumber *fit = args[@"objectFit"]; - FlutterRTCVideoPlatformViewController * render = [[FlutterRTCVideoPlatformViewController alloc] initWithMessenger:_messenger viewIdentifier:viewId frame:frame objectFit:fit]; - self.renders[@(viewId)] = render; - return render; + FlutterRTCVideoPlatformViewController* render = + [[FlutterRTCVideoPlatformViewController alloc] initWithMessenger:_messenger + viewIdentifier:viewId + frame:frame]; + self.renders[@(viewId)] = render; + return render; } -@end +@end \ No newline at end of file diff --git a/ios/Classes/LocalAudioTrack.h b/ios/Classes/LocalAudioTrack.h new file mode 100644 index 0000000000..7cd1861a06 --- /dev/null +++ b/ios/Classes/LocalAudioTrack.h @@ -0,0 +1,19 @@ +#import +#import "AudioProcessingAdapter.h" +#import "LocalTrack.h" + +@interface LocalAudioTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCAudioTrack* _Nonnull)track; + +@property(nonatomic, strong) RTCAudioTrack* _Nonnull audioTrack; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/ios/Classes/LocalAudioTrack.m b/ios/Classes/LocalAudioTrack.m new file mode 100644 index 0000000000..a080d4f090 --- /dev/null +++ b/ios/Classes/LocalAudioTrack.m @@ -0,0 +1,38 @@ +#import "LocalAudioTrack.h" +#import "AudioManager.h" + +@implementation LocalAudioTrack { + RTCAudioTrack* _track; +} + +@synthesize audioTrack = _track; + +- (instancetype)initWithTrack:(RTCAudioTrack*)track { + self = [super init]; + if (self) { + _track = track; + } + return self; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +- (void)addRenderer:(id)renderer { + [AudioManager.sharedInstance addLocalAudioRenderer:renderer]; +} + +- (void)removeRenderer:(id)renderer { + [AudioManager.sharedInstance removeLocalAudioRenderer:renderer]; +} + +- (void)addProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter addProcessing:processor]; +} + +- (void)removeProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter removeProcessing:processor]; +} + +@end diff --git a/ios/Classes/LocalTrack.h b/ios/Classes/LocalTrack.h new file mode 100644 index 0000000000..e224df4c89 --- /dev/null +++ b/ios/Classes/LocalTrack.h @@ -0,0 +1,7 @@ +#import + +@protocol LocalTrack + +- (RTCMediaStreamTrack*)track; + +@end \ No newline at end of file diff --git a/ios/Classes/LocalVideoTrack.h b/ios/Classes/LocalVideoTrack.h new file mode 100644 index 0000000000..3d4654e336 --- /dev/null +++ b/ios/Classes/LocalVideoTrack.h @@ -0,0 +1,24 @@ +#import +#import "LocalTrack.h" +#import "VideoProcessingAdapter.h" + +@interface LocalVideoTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track; + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track + videoProcessing:(VideoProcessingAdapter* _Nullable)processing; + +@property(nonatomic, strong) RTCVideoTrack* _Nonnull videoTrack; + +@property(nonatomic, strong) VideoProcessingAdapter* _Nonnull processing; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end \ No newline at end of file diff --git a/ios/Classes/LocalVideoTrack.m b/ios/Classes/LocalVideoTrack.m new file mode 100644 index 0000000000..d08c432f02 --- /dev/null +++ b/ios/Classes/LocalVideoTrack.m @@ -0,0 +1,47 @@ +#import "LocalVideoTrack.h" + +@implementation LocalVideoTrack { + RTCVideoTrack* _track; + VideoProcessingAdapter* _processing; +} + +@synthesize videoTrack = _track; +@synthesize processing = _processing; + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + videoProcessing:(VideoProcessingAdapter*)processing { + self = [super init]; + if (self) { + _track = track; + _processing = processing; + } + return self; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track { + return [self initWithTrack:track videoProcessing:nil]; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +/** Register a renderer that will render all frames received on this track. */ +- (void)addRenderer:(id)renderer { + [_track addRenderer:renderer]; +} + +/** Deregister a renderer. */ +- (void)removeRenderer:(id)renderer { + [_track removeRenderer:renderer]; +} + +- (void)addProcessing:(id)processor { + [_processing addProcessing:processor]; +} + +- (void)removeProcessing:(id)processor { + [_processing removeProcessing:processor]; +} + +@end \ No newline at end of file diff --git a/ios/Classes/VideoProcessingAdapter.h b/ios/Classes/VideoProcessingAdapter.h new file mode 100644 index 0000000000..c953316eec --- /dev/null +++ b/ios/Classes/VideoProcessingAdapter.h @@ -0,0 +1,18 @@ +#import +#import + +@protocol ExternalVideoProcessingDelegate +- (RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)onFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)frame; +@end + +@interface VideoProcessingAdapter : NSObject + +- (_Nonnull instancetype)initWithRTCVideoSource:(RTCVideoSource* _Nonnull)source; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +- (RTCVideoSource* _Nonnull) source; + +@end diff --git a/ios/Classes/VideoProcessingAdapter.m b/ios/Classes/VideoProcessingAdapter.m new file mode 100644 index 0000000000..f3e7966522 --- /dev/null +++ b/ios/Classes/VideoProcessingAdapter.m @@ -0,0 +1,55 @@ +#import "VideoProcessingAdapter.h" +#import + +@implementation VideoProcessingAdapter { + RTCVideoSource* _videoSource; + CGSize _frameSize; + NSArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)initWithRTCVideoSource:(RTCVideoSource*)source { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _videoSource = source; + _processors = [NSArray> new]; + } + return self; +} + +- (RTCVideoSource* _Nonnull) source { + return _videoSource; +} + +- (void)addProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors arrayByAddingObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]]; + os_unfair_lock_unlock(&_lock); +} + +- (void)setSize:(CGSize)size { + _frameSize = size; +} + +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + frame = [processor onFrame:frame]; + } + [_videoSource capturer:capturer didCaptureVideoFrame:frame]; + os_unfair_lock_unlock(&_lock); +} + +@end \ No newline at end of file diff --git a/ios/Classes/VideoProcessor.h b/ios/Classes/VideoProcessor.h new file mode 100644 index 0000000000..02e1599d4f --- /dev/null +++ b/ios/Classes/VideoProcessor.h @@ -0,0 +1,10 @@ +#import +#import + +// Define Processor class +@interface VideoProcessor : NSObject + +// Declare any properties and methods needed +- (RTCVideoFrame *)onFrameReceived:(RTCVideoFrame *)frame; + +@end \ No newline at end of file diff --git a/ios/Classes/VideoProcessor.m b/ios/Classes/VideoProcessor.m new file mode 100644 index 0000000000..48c33a8998 --- /dev/null +++ b/ios/Classes/VideoProcessor.m @@ -0,0 +1,7 @@ +#import "VideoProcessor.h" + +@implementation VideoProcessor + +// Empty implementation + +@end diff --git a/ios/Classes/WebRTCService.h b/ios/Classes/WebRTCService.h new file mode 100644 index 0000000000..3b4c740c8e --- /dev/null +++ b/ios/Classes/WebRTCService.h @@ -0,0 +1,18 @@ + +#import +#import "VideoProcessor.h" // Import Processor header file + +@interface WebRTCService : NSObject + +@property (nonatomic, strong) VideoProcessor *videoProcessor; + +// Singleton instance method ++ (instancetype)sharedInstance; + +// Method to set the Processor +- (void)setVideoProcessor:(VideoProcessor *)videoProcessor; + +// Method to get the current Processor +- (VideoProcessor *)getVideoProcessor; + +@end \ No newline at end of file diff --git a/ios/Classes/WebRTCService.m b/ios/Classes/WebRTCService.m new file mode 100644 index 0000000000..2f7e5d4f71 --- /dev/null +++ b/ios/Classes/WebRTCService.m @@ -0,0 +1,36 @@ +#import "WebRTCService.h" + +@implementation WebRTCService + +// Static variable for the singleton instance +static WebRTCService *instance = nil; + +// Private initializer to prevent instantiation from outside +- (instancetype)initPrivate { + self = [super init]; + if (self) { + // Initialization logic if any + } + return self; +} + +// Singleton instance method ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + instance = [[self alloc] initPrivate]; + }); + return instance; +} + +// Method to set the Processor +- (void)setVideoProcessor:(VideoProcessor *)videoProcessor { + _videoProcessor = videoProcessor; +} + +// Method to get the current Processor +- (VideoProcessor *)getVideoProcessor { + return _videoProcessor; +} + +@end \ No newline at end of file diff --git a/ios/flutter_webrtc.podspec b/ios/videosdk_webrtc.podspec similarity index 66% rename from ios/flutter_webrtc.podspec rename to ios/videosdk_webrtc.podspec index 7ddc73bb35..e33592aa05 100644 --- a/ios/flutter_webrtc.podspec +++ b/ios/videosdk_webrtc.podspec @@ -2,20 +2,20 @@ # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| - s.name = 'flutter_webrtc' - s.version = '0.11.0' + s.name = 'videosdk_webrtc' + s.version = '0.0.6' s.summary = 'Flutter WebRTC plugin for iOS.' s.description = <<-DESC A new flutter plugin project. DESC - s.homepage = 'https://github.com/cloudwebrtc/flutter-webrtc' + s.homepage = 'https://www.videosdk.live' s.license = { :file => '../LICENSE' } - s.author = { 'CloudWebRTC' => 'duanweiwei1982@gmail.com' } + s.author = { 'VideoSDK' => 'sdk@videosdk.live' } s.source = { :path => '.' } s.source_files = 'Classes/**/*' s.public_header_files = 'Classes/**/*.h' s.dependency 'Flutter' - s.dependency 'WebRTC-SDK', '125.6422.02' + s.dependency 'WebRTC-SDK', '125.6422.06' s.ios.deployment_target = '13.0' s.static_framework = true end diff --git a/lib/flutter_webrtc.dart b/lib/flutter_webrtc.dart index 12921d598e..b6e29c7450 100644 --- a/lib/flutter_webrtc.dart +++ b/lib/flutter_webrtc.dart @@ -8,14 +8,15 @@ export 'src/desktop_capturer.dart'; export 'src/media_devices.dart'; export 'src/media_recorder.dart'; export 'src/native/factory_impl.dart' - if (dart.library.html) 'src/web/factory_impl.dart'; + if (dart.library.js_interop) 'src/web/factory_impl.dart'; export 'src/native/rtc_video_renderer_impl.dart' - if (dart.library.html) 'src/web/rtc_video_renderer_impl.dart'; + if (dart.library.js_interop) 'src/web/rtc_video_renderer_impl.dart'; export 'src/native/rtc_video_view_impl.dart' - if (dart.library.html) 'src/web/rtc_video_view_impl.dart'; -export 'src/native/utils.dart' if (dart.library.html) 'src/web/utils.dart'; + if (dart.library.js_interop) 'src/web/rtc_video_view_impl.dart'; +export 'src/native/utils.dart' + if (dart.library.js_interop) 'src/web/utils.dart'; export 'src/native/adapter_type.dart'; +export 'src/native/camera_utils.dart'; +export 'src/native/audio_management.dart'; export 'src/native/android/audio_configuration.dart'; export 'src/native/ios/audio_configuration.dart'; -export 'src/native/rtc_video_platform_view_controller.dart'; -export 'src/native/rtc_video_platform_view.dart'; diff --git a/lib/src/helper.dart b/lib/src/helper.dart index e52e3bcfe7..6f1e9666bf 100644 --- a/lib/src/helper.dart +++ b/lib/src/helper.dart @@ -1,7 +1,8 @@ +import 'dart:math'; + import 'package:flutter/foundation.dart'; import '../flutter_webrtc.dart'; -import 'native/audio_management.dart'; class Helper { static Future> enumerateDevices(String type) async { @@ -67,17 +68,24 @@ class Helper { return Future.value(true); } - static Future setZoom( - MediaStreamTrack videoTrack, double zoomLevel) async { - if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { - await WebRTC.invokeMethod( - 'mediaStreamTrackSetZoom', - {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}, - ); - } else { - throw Exception('setZoom only support for mobile devices!'); - } - } + static Future setZoom(MediaStreamTrack videoTrack, double zoomLevel) => + CameraUtils.setZoom(videoTrack, zoomLevel); + + static Future setFocusMode( + MediaStreamTrack videoTrack, CameraFocusMode focusMode) => + CameraUtils.setFocusMode(videoTrack, focusMode); + + static Future setFocusPoint( + MediaStreamTrack videoTrack, Point? point) => + CameraUtils.setFocusPoint(videoTrack, point); + + static Future setExposureMode( + MediaStreamTrack videoTrack, CameraExposureMode exposureMode) => + CameraUtils.setExposureMode(videoTrack, exposureMode); + + static Future setExposurePoint( + MediaStreamTrack videoTrack, Point? point) => + CameraUtils.setExposurePoint(videoTrack, point); /// Used to select a specific audio output device. /// diff --git a/lib/src/native/adapter_type.dart b/lib/src/native/adapter_type.dart index 385b1dd0ed..fa5f85a833 100644 --- a/lib/src/native/adapter_type.dart +++ b/lib/src/native/adapter_type.dart @@ -1,5 +1,3 @@ -import 'package:flutter/foundation.dart'; - enum AdapterType { adapterTypeUnknown, adapterTypeEthernet, @@ -9,7 +7,3 @@ enum AdapterType { adapterTypeLoopback, adapterTypeAny } - -extension AdapterTypeExt on AdapterType { - String get value => describeEnum(this); -} diff --git a/lib/src/native/android/audio_configuration.dart b/lib/src/native/android/audio_configuration.dart index ff312b07e2..150962b6a0 100644 --- a/lib/src/native/android/audio_configuration.dart +++ b/lib/src/native/android/audio_configuration.dart @@ -1,5 +1,3 @@ -import 'package:flutter/foundation.dart'; - import '../utils.dart'; enum AndroidAudioMode { @@ -10,13 +8,9 @@ enum AndroidAudioMode { ringtone, } -extension AndroidAudioModeExt on AndroidAudioMode { - String get value => describeEnum(this); -} - extension AndroidAudioModeEnumEx on String { - AndroidAudioMode toAndroidAudioMode() => AndroidAudioMode.values - .firstWhere((d) => describeEnum(d) == toLowerCase()); + AndroidAudioMode toAndroidAudioMode() => + AndroidAudioMode.values.firstWhere((d) => d.name == toLowerCase()); } enum AndroidAudioFocusMode { @@ -26,14 +20,9 @@ enum AndroidAudioFocusMode { gainTransientMayDuck } -extension AndroidAudioFocusModeExt on AndroidAudioFocusMode { - String get value => describeEnum(this); -} - extension AndroidAudioFocusModeEnumEx on String { AndroidAudioFocusMode toAndroidAudioFocusMode() => - AndroidAudioFocusMode.values - .firstWhere((d) => describeEnum(d) == toLowerCase()); + AndroidAudioFocusMode.values.firstWhere((d) => d.name == toLowerCase()); } enum AndroidAudioStreamType { @@ -47,14 +36,9 @@ enum AndroidAudioStreamType { voiceCall } -extension AndroidAudioStreamTypeExt on AndroidAudioStreamType { - String get value => describeEnum(this); -} - extension AndroidAudioStreamTypeEnumEx on String { AndroidAudioStreamType toAndroidAudioStreamType() => - AndroidAudioStreamType.values - .firstWhere((d) => describeEnum(d) == toLowerCase()); + AndroidAudioStreamType.values.firstWhere((d) => d.name == toLowerCase()); } enum AndroidAudioAttributesUsageType { @@ -73,15 +57,10 @@ enum AndroidAudioAttributesUsageType { voiceCommunicationSignalling } -extension AndroidAudioAttributesUsageTypeExt - on AndroidAudioAttributesUsageType { - String get value => describeEnum(this); -} - extension AndroidAudioAttributesUsageTypeEnumEx on String { AndroidAudioAttributesUsageType toAndroidAudioAttributesUsageType() => AndroidAudioAttributesUsageType.values - .firstWhere((d) => describeEnum(d) == toLowerCase()); + .firstWhere((d) => d.name == toLowerCase()); } enum AndroidAudioAttributesContentType { @@ -92,15 +71,10 @@ enum AndroidAudioAttributesContentType { unknown } -extension AndroidAudioAttributesContentTypeExt - on AndroidAudioAttributesContentType { - String get value => describeEnum(this); -} - extension AndroidAudioAttributesContentTypeEnumEx on String { AndroidAudioAttributesContentType toAndroidAudioAttributesContentType() => AndroidAudioAttributesContentType.values - .firstWhere((d) => describeEnum(d) == toLowerCase()); + .firstWhere((d) => d.name == toLowerCase()); } class AndroidAudioConfiguration { @@ -133,17 +107,17 @@ class AndroidAudioConfiguration { Map toMap() => { if (manageAudioFocus != null) 'manageAudioFocus': manageAudioFocus!, if (androidAudioMode != null) - 'androidAudioMode': androidAudioMode!.value, + 'androidAudioMode': androidAudioMode!.name, if (androidAudioFocusMode != null) - 'androidAudioFocusMode': androidAudioFocusMode!.value, + 'androidAudioFocusMode': androidAudioFocusMode!.name, if (androidAudioStreamType != null) - 'androidAudioStreamType': androidAudioStreamType!.value, + 'androidAudioStreamType': androidAudioStreamType!.name, if (androidAudioAttributesUsageType != null) 'androidAudioAttributesUsageType': - androidAudioAttributesUsageType!.value, + androidAudioAttributesUsageType!.name, if (androidAudioAttributesContentType != null) 'androidAudioAttributesContentType': - androidAudioAttributesContentType!.value, + androidAudioAttributesContentType!.name, if (forceHandleAudioRouting != null) 'forceHandleAudioRouting': forceHandleAudioRouting!, }; diff --git a/lib/src/native/camera_utils.dart b/lib/src/native/camera_utils.dart new file mode 100644 index 0000000000..3557a64e46 --- /dev/null +++ b/lib/src/native/camera_utils.dart @@ -0,0 +1,93 @@ +import 'dart:math'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'utils.dart'; + +enum CameraFocusMode { auto, locked } + +enum CameraExposureMode { auto, locked } + +class CameraUtils { + static Future setZoom( + MediaStreamTrack videoTrack, double zoomLevel) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetZoom', + {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}, + ); + } else { + throw Exception('setZoom only support for mobile devices!'); + } + } + + /// Set the exposure point for the camera, focusMode can be: + /// 'auto', 'locked' + static Future setFocusMode( + MediaStreamTrack videoTrack, CameraFocusMode focusMode) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetFocusMode', + { + 'trackId': videoTrack.id, + 'focusMode': focusMode.name, + }, + ); + } else { + throw Exception('setFocusMode only support for mobile devices!'); + } + } + + static Future setFocusPoint( + MediaStreamTrack videoTrack, Point? point) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetFocusPoint', + { + 'trackId': videoTrack.id, + 'focusPoint': { + 'reset': point == null, + 'x': point?.x, + 'y': point?.y, + }, + }, + ); + } else { + throw Exception('setFocusPoint only support for mobile devices!'); + } + } + + static Future setExposureMode( + MediaStreamTrack videoTrack, CameraExposureMode exposureMode) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetExposureMode', + { + 'trackId': videoTrack.id, + 'exposureMode': exposureMode.name, + }, + ); + } else { + throw Exception('setExposureMode only support for mobile devices!'); + } + } + + static Future setExposurePoint( + MediaStreamTrack videoTrack, Point? point) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetExposurePoint', + { + 'trackId': videoTrack.id, + 'exposurePoint': { + 'reset': point == null, + 'x': point?.x, + 'y': point?.y, + }, + }, + ); + } else { + throw Exception('setExposurePoint only support for mobile devices!'); + } + } +} diff --git a/lib/src/native/factory_impl.dart b/lib/src/native/factory_impl.dart index 676e8c67da..1e0932a9b6 100644 --- a/lib/src/native/factory_impl.dart +++ b/lib/src/native/factory_impl.dart @@ -17,13 +17,14 @@ class RTCFactoryNative extends RTCFactory { RTCFactoryNative._internal(); static final RTCFactory instance = RTCFactoryNative._internal(); - @override Future createLocalMediaStream(String label) async { final response = await WebRTC.invokeMethod('createLocalMediaStream'); + if (response == null) { throw Exception('createLocalMediaStream return null, something wrong'); } + return MediaStreamNative(response['streamId'], label); } @@ -47,6 +48,7 @@ class RTCFactoryNative extends RTCFactory { ); String peerConnectionId = response['peerConnectionId']; + return RTCPeerConnectionNative(peerConnectionId, configuration); } diff --git a/lib/src/native/ios/audio_configuration.dart b/lib/src/native/ios/audio_configuration.dart index f603c87a2c..b840a0a8ae 100644 --- a/lib/src/native/ios/audio_configuration.dart +++ b/lib/src/native/ios/audio_configuration.dart @@ -1,5 +1,3 @@ -import 'package:flutter/foundation.dart'; - import '../utils.dart'; enum AppleAudioMode { @@ -14,13 +12,9 @@ enum AppleAudioMode { voicePrompt, } -extension AppleAudioModeExt on AppleAudioMode { - String get value => describeEnum(this); -} - extension AppleAudioModeEnumEx on String { AppleAudioMode toAppleAudioMode() => - AppleAudioMode.values.firstWhere((d) => describeEnum(d) == toLowerCase()); + AppleAudioMode.values.firstWhere((d) => d.name == toLowerCase()); } enum AppleAudioCategory { @@ -31,13 +25,9 @@ enum AppleAudioCategory { multiRoute, } -extension AppleAudioCategoryExt on AppleAudioCategory { - String get value => describeEnum(this); -} - extension AppleAudioCategoryEnumEx on String { - AppleAudioCategory toAppleAudioCategory() => AppleAudioCategory.values - .firstWhere((d) => describeEnum(d) == toLowerCase()); + AppleAudioCategory toAppleAudioCategory() => + AppleAudioCategory.values.firstWhere((d) => d.name == toLowerCase()); } enum AppleAudioCategoryOption { @@ -50,14 +40,10 @@ enum AppleAudioCategoryOption { defaultToSpeaker, } -extension AppleAudioCategoryOptionExt on AppleAudioCategoryOption { - String get value => describeEnum(this); -} - extension AppleAudioCategoryOptionEnumEx on String { AppleAudioCategoryOption toAppleAudioCategoryOption() => AppleAudioCategoryOption.values - .firstWhere((d) => describeEnum(d) == toLowerCase()); + .firstWhere((d) => d.name == toLowerCase()); } class AppleAudioConfiguration { @@ -72,11 +58,11 @@ class AppleAudioConfiguration { Map toMap() => { if (appleAudioCategory != null) - 'appleAudioCategory': appleAudioCategory!.value, + 'appleAudioCategory': appleAudioCategory!.name, if (appleAudioCategoryOptions != null) 'appleAudioCategoryOptions': - appleAudioCategoryOptions!.map((e) => e.value).toList(), - if (appleAudioMode != null) 'appleAudioMode': appleAudioMode!.value, + appleAudioCategoryOptions!.map((e) => e.name).toList(), + if (appleAudioMode != null) 'appleAudioMode': appleAudioMode!.name, }; } diff --git a/lib/src/native/media_stream_impl.dart b/lib/src/native/media_stream_impl.dart index ca4c9773e3..49f27a7e5a 100644 --- a/lib/src/native/media_stream_impl.dart +++ b/lib/src/native/media_stream_impl.dart @@ -7,8 +7,7 @@ import 'media_stream_track_impl.dart'; import 'utils.dart'; class MediaStreamNative extends MediaStream { - MediaStreamNative(String streamId, String ownerTag) - : super(streamId, ownerTag); + MediaStreamNative(super.streamId, super.ownerTag); factory MediaStreamNative.fromMap(Map map) { return MediaStreamNative(map['streamId'], map['ownerTag']) diff --git a/lib/src/native/rtc_video_platform_view.dart b/lib/src/native/rtc_video_platform_view.dart index e0befc3952..0dd5cb6bcf 100644 --- a/lib/src/native/rtc_video_platform_view.dart +++ b/lib/src/native/rtc_video_platform_view.dart @@ -24,10 +24,11 @@ class RTCVideoPlatFormView extends StatefulWidget { class NativeVideoPlayerViewState extends State { RTCVideoPlatformViewController? _controller; - + bool _showVideoView = false; @override void dispose() { _controller?.onFirstFrameRendered = null; + _controller?.onSrcObjectChange = null; _controller?.onResize = null; _controller = null; super.dispose(); @@ -41,19 +42,29 @@ class NativeVideoPlayerViewState extends State { } Widget _buildVideoView(BuildContext context, BoxConstraints constraints) { - return FittedBox( - clipBehavior: Clip.hardEdge, - fit: - widget.objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain - ? BoxFit.contain - : BoxFit.cover, - child: SizedBox( - width: constraints.maxWidth, - height: constraints.maxHeight, - child: Transform( - transform: Matrix4.identity()..rotateY(widget.mirror ? -pi : 0.0), - alignment: FractionalOffset.center, - child: _buildNativeView(), + return Center( + child: FittedBox( + clipBehavior: Clip.hardEdge, + fit: widget.objectFit == + RTCVideoViewObjectFit.RTCVideoViewObjectFitContain + ? BoxFit.contain + : BoxFit.cover, + child: Center( + child: SizedBox( + width: _showVideoView + ? widget.objectFit == + RTCVideoViewObjectFit.RTCVideoViewObjectFitCover + ? constraints.maxWidth + : constraints.maxHeight * + (_controller?.value.aspectRatio ?? 1.0) + : 0.1, + height: _showVideoView ? constraints.maxHeight : 0.1, + child: Transform( + transform: Matrix4.identity()..rotateY(widget.mirror ? -pi : 0.0), + alignment: FractionalOffset.center, + child: _buildNativeView(), + ), + ), ), ), ); @@ -65,17 +76,16 @@ class NativeVideoPlayerViewState extends State { return UiKitView( viewType: viewType, onPlatformViewCreated: onPlatformViewCreated, - creationParams: { - 'objectFit': widget.objectFit.index, - }, + creationParams: {}, creationParamsCodec: const StandardMessageCodec(), ); } return Text('RTCVideoPlatformView only support for iOS.'); } - void reBuildView() { + void showVideoView(bool show) { if (mounted) { + _showVideoView = show; setState(() {}); } } @@ -83,9 +93,10 @@ class NativeVideoPlayerViewState extends State { Future onPlatformViewCreated(int id) async { final controller = RTCVideoPlatformViewController(id); _controller = controller; + controller.onFirstFrameRendered = () => showVideoView(true); + controller.onSrcObjectChange = () => showVideoView(false); + controller.onResize = () => showVideoView(true); widget.onViewReady?.call(controller); - controller.onFirstFrameRendered = reBuildView; - controller.onResize = reBuildView; await _controller?.initialize(); } } diff --git a/lib/src/native/rtc_video_platform_view_controller.dart b/lib/src/native/rtc_video_platform_view_controller.dart index b082ddc27f..0c1ccc3c26 100644 --- a/lib/src/native/rtc_video_platform_view_controller.dart +++ b/lib/src/native/rtc_video_platform_view_controller.dart @@ -15,6 +15,7 @@ class RTCVideoPlatformViewController extends ValueNotifier } int? _viewId; bool _disposed = false; + MediaStream? _srcObject; StreamSubscription? _eventSubscription; @@ -32,6 +33,9 @@ class RTCVideoPlatformViewController extends ValueNotifier @override int get videoHeight => value.height.toInt(); + @override + RTCVideoValue get videoValue => value; + @override int? get textureId => _viewId; @@ -44,14 +48,17 @@ class RTCVideoPlatformViewController extends ValueNotifier @override Function? onFirstFrameRendered; + Function? onSrcObjectChange; + @override set srcObject(MediaStream? stream) { if (_disposed) { throw 'Can\'t set srcObject: The RTCVideoPlatformController is disposed'; } if (_viewId == null) throw 'Call initialize before setting the stream'; + if (_srcObject == stream) return; _srcObject = stream; - _reset(); + onSrcObjectChange?.call(); WebRTC.invokeMethod( 'videoPlatformViewRendererSetSrcObject', { 'viewId': _viewId, @@ -72,7 +79,9 @@ class RTCVideoPlatformViewController extends ValueNotifier throw 'Can\'t set srcObject: The RTCVideoPlatformController is disposed'; } if (_viewId == null) throw 'Call initialize before setting the stream'; + if (_srcObject == stream) return; _srcObject = stream; + onSrcObjectChange?.call(); var oldviewId = _viewId; try { await WebRTC.invokeMethod( @@ -145,11 +154,6 @@ class RTCVideoPlatformViewController extends ValueNotifier @override bool get muted => _srcObject?.getAudioTracks()[0].muted ?? true; - void _reset() { - value = value.copyWith( - width: 0.0, height: 0.0, renderVideo: false, rotation: 0); - } - @override set muted(bool mute) { if (_disposed) { @@ -180,20 +184,4 @@ class RTCVideoPlatformViewController extends ValueNotifier } return true; } - - Future updateObjectFit(RTCVideoViewObjectFit objectFit) async { - if (_disposed) { - throw 'Can\'t set objectFit: The RTCVideoPlatformController is disposed'; - } - if (_viewId == null) throw 'Call initialize before setting the objectFit'; - try { - await WebRTC.invokeMethod( - 'videoPlatformViewRendererSetObjectFit', { - 'viewId': _viewId, - 'objectFit': objectFit.index, - }); - } on PlatformException catch (e) { - throw 'Got exception for RTCVideoPlatformController::setObjectFit: ${e.message}'; - } - } } diff --git a/lib/src/native/rtc_video_renderer_impl.dart b/lib/src/native/rtc_video_renderer_impl.dart index 9607b3902b..d3628a7521 100644 --- a/lib/src/native/rtc_video_renderer_impl.dart +++ b/lib/src/native/rtc_video_renderer_impl.dart @@ -11,6 +11,7 @@ import 'utils.dart'; class RTCVideoRenderer extends ValueNotifier implements VideoRenderer { RTCVideoRenderer() : super(RTCVideoValue.empty); + Completer? _initializing; int? _textureId; bool _disposed = false; MediaStream? _srcObject; @@ -18,14 +19,18 @@ class RTCVideoRenderer extends ValueNotifier @override Future initialize() async { - if (_textureId != null) { + if (_initializing != null) { + await _initializing!.future; + return; } + _initializing = Completer(); final response = await WebRTC.invokeMethod('createVideoRenderer', {}); _textureId = response['textureId']; _eventSubscription = EventChannel('FlutterWebRTC/Texture$textureId') .receiveBroadcastStream() .listen(eventListener, onError: errorListener); + _initializing!.complete(null); } @override @@ -34,6 +39,9 @@ class RTCVideoRenderer extends ValueNotifier @override int get videoHeight => value.height.toInt(); + @override + RTCVideoValue get videoValue => value; + @override int? get textureId => _textureId; diff --git a/lib/src/native/rtc_video_view_impl.dart b/lib/src/native/rtc_video_view_impl.dart index c579466c14..9d236143c1 100644 --- a/lib/src/native/rtc_video_view_impl.dart +++ b/lib/src/native/rtc_video_view_impl.dart @@ -9,12 +9,12 @@ import 'rtc_video_renderer_impl.dart'; class RTCVideoView extends StatelessWidget { RTCVideoView( this._renderer, { - Key? key, + super.key, this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, this.mirror = false, this.filterQuality = FilterQuality.low, this.placeholderBuilder, - }) : super(key: key); + }); final RTCVideoRenderer _renderer; final RTCVideoViewObjectFit objectFit; diff --git a/lib/src/native/utils.dart b/lib/src/native/utils.dart index caa518f164..362e7917cd 100644 --- a/lib/src/native/utils.dart +++ b/lib/src/native/utils.dart @@ -48,6 +48,8 @@ class WebRTC { /// "forceSWCodecList": a list of strings of software codecs that should use software. /// /// "androidAudioConfiguration": an AndroidAudioConfiguration object mapped with toMap() + /// + /// "bypassVoiceProcessing": a boolean that bypasses the audio processing for the audio device. static Future initialize({Map? options}) async { if (!initialized) { await _channel.invokeMethod('initialize', { diff --git a/lib/src/web/rtc_video_renderer_impl.dart b/lib/src/web/rtc_video_renderer_impl.dart index 36eb0297fa..2b9c7f3959 100644 --- a/lib/src/web/rtc_video_renderer_impl.dart +++ b/lib/src/web/rtc_video_renderer_impl.dart @@ -40,7 +40,7 @@ class RTCVideoRenderer extends ValueNotifier static const _elementIdForAudioManager = 'html_webrtc_audio_manager_list'; - web.AudioElement? _audioElement; + web.HTMLAudioElement? _audioElement; static int _textureCounter = 1; @@ -72,6 +72,8 @@ class RTCVideoRenderer extends ValueNotifier @override int get videoHeight => value.height.toInt(); + @override + RTCVideoValue get videoValue => value; @override int get textureId => _textureId; @@ -134,7 +136,7 @@ class RTCVideoRenderer extends ValueNotifier if (null != _audioStream) { if (null == _audioElement) { - _audioElement = web.AudioElement() + _audioElement = web.HTMLAudioElement() ..id = _elementIdForAudio ..muted = stream.ownerTag == 'local' ..autoplay = true; @@ -184,7 +186,7 @@ class RTCVideoRenderer extends ValueNotifier if (null != _audioStream) { if (null == _audioElement) { - _audioElement = web.AudioElement() + _audioElement = web.HTMLAudioElement() ..id = _elementIdForAudio ..muted = stream.ownerTag == 'local' ..autoplay = true; @@ -213,9 +215,9 @@ class RTCVideoRenderer extends ValueNotifier return div as web.HTMLDivElement; } - web.VideoElement? findHtmlView() { + web.HTMLVideoElement? findHtmlView() { final element = web.document.getElementById(_elementIdForVideo); - if (null != element) return element as web.VideoElement; + if (null != element) return element as web.HTMLVideoElement; return null; } @@ -261,7 +263,7 @@ class RTCVideoRenderer extends ValueNotifier } _subscriptions.clear(); - final element = web.VideoElement() + final element = web.HTMLVideoElement() ..autoplay = true ..muted = true ..controls = false @@ -311,7 +313,7 @@ class RTCVideoRenderer extends ValueNotifier }); } - void _applyDefaultVideoStyles(web.VideoElement element) { + void _applyDefaultVideoStyles(web.HTMLVideoElement element) { // Flip the video horizontally if is mirrored. if (mirror) { element.style.transform = 'scaleX(-1)'; diff --git a/lib/src/web/rtc_video_view_impl.dart b/lib/src/web/rtc_video_view_impl.dart index c55df09534..9ef8ff1461 100644 --- a/lib/src/web/rtc_video_view_impl.dart +++ b/lib/src/web/rtc_video_view_impl.dart @@ -10,12 +10,12 @@ import 'rtc_video_renderer_impl.dart'; class RTCVideoView extends StatefulWidget { RTCVideoView( this._renderer, { - Key? key, + super.key, this.objectFit = RTCVideoViewObjectFit.RTCVideoViewObjectFitContain, this.mirror = false, this.filterQuality = FilterQuality.low, this.placeholderBuilder, - }) : super(key: key); + }); final RTCVideoRenderer _renderer; final RTCVideoViewObjectFit objectFit; diff --git a/lib/src/web/utils.dart b/lib/src/web/utils.dart index 966425d724..9203763bf2 100644 --- a/lib/src/web/utils.dart +++ b/lib/src/web/utils.dart @@ -18,4 +18,7 @@ class WebRTC { static Future invokeMethod(String methodName, [dynamic param]) async => throw UnimplementedError(); + + static Future initialize({Map? options}) async => + throw UnimplementedError('initialize is not supported on web'); } diff --git a/linux/CMakeLists.txt b/linux/CMakeLists.txt index 005dac86e3..82345d3443 100644 --- a/linux/CMakeLists.txt +++ b/linux/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.10) -set(PROJECT_NAME "flutter_webrtc") +set(PROJECT_NAME "videosdk_webrtc") project(${PROJECT_NAME} LANGUAGES CXX) set(PLUGIN_NAME "${PROJECT_NAME}_plugin") @@ -20,7 +20,7 @@ add_library(${PLUGIN_NAME} SHARED "../common/cpp/src/flutter_webrtc.cc" "../common/cpp/src/flutter_webrtc_base.cc" "../common/cpp/src/flutter_common.cc" - "../common/cpp/flutter_webrtc_plugin.cc" + "flutter_webrtc_plugin.cc" "flutter/core_implementations.cc" "flutter/standard_codec.cc" "flutter/plugin_registrar.cc" @@ -40,7 +40,7 @@ set_target_properties(${PLUGIN_NAME} PROPERTIES CXX_VISIBILITY_PRESET hidden) target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) target_include_directories(${PLUGIN_NAME} INTERFACE -"${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include") +"${CMAKE_CURRENT_SOURCE_DIR}") target_link_libraries(${PLUGIN_NAME} PRIVATE flutter) target_link_libraries(${PLUGIN_NAME} PRIVATE PkgConfig::GTK) @@ -50,7 +50,7 @@ target_link_libraries(${PLUGIN_NAME} PRIVATE ) # List of absolute paths to libraries that should be bundled with the plugin -set(flutter_webrtc_bundled_libraries +set(videosdk_webrtc_bundled_libraries "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" PARENT_SCOPE ) diff --git a/linux/flutter_webrtc_plugin.cc b/linux/flutter_webrtc_plugin.cc new file mode 100644 index 0000000000..3cd15f8c1a --- /dev/null +++ b/linux/flutter_webrtc_plugin.cc @@ -0,0 +1,73 @@ +#include "flutter_webrtc/flutter_web_r_t_c_plugin.h" + +#include "flutter_common.h" +#include "flutter_webrtc.h" + +const char* kChannelName = "FlutterWebRTC.Method"; + +//#if defined(_WINDOWS) + +namespace flutter_webrtc_plugin { + +// A webrtc plugin for windows/linux. +class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { + public: + static void RegisterWithRegistrar(PluginRegistrar* registrar) { + auto channel = std::make_unique( + registrar->messenger(), kChannelName, + &flutter::StandardMethodCodec::GetInstance()); + + auto* channel_pointer = channel.get(); + + // Uses new instead of make_unique due to private constructor. + std::unique_ptr plugin( + new FlutterWebRTCPluginImpl(registrar, std::move(channel))); + + channel_pointer->SetMethodCallHandler( + [plugin_pointer = plugin.get()](const auto& call, auto result) { + plugin_pointer->HandleMethodCall(call, std::move(result)); + }); + + registrar->AddPlugin(std::move(plugin)); + } + + virtual ~FlutterWebRTCPluginImpl() {} + + BinaryMessenger* messenger() { return messenger_; } + + TextureRegistrar* textures() { return textures_; } + + private: + // Creates a plugin that communicates on the given channel. + FlutterWebRTCPluginImpl(PluginRegistrar* registrar, + std::unique_ptr channel) + : channel_(std::move(channel)), + messenger_(registrar->messenger()), + textures_(registrar->texture_registrar()) { + webrtc_ = std::make_unique(this); + } + + // Called when a method is called on |channel_|; + void HandleMethodCall(const MethodCall& method_call, + std::unique_ptr result) { + // handle method call and forward to webrtc native sdk. + auto method_call_proxy = MethodCallProxy::Create(method_call); + webrtc_->HandleMethodCall(*method_call_proxy.get(), + MethodResultProxy::Create(std::move(result))); + } + + private: + std::unique_ptr channel_; + std::unique_ptr webrtc_; + BinaryMessenger* messenger_; + TextureRegistrar* textures_; +}; + +} // namespace flutter_webrtc_plugin + +void flutter_web_r_t_c_plugin_register_with_registrar( + FlPluginRegistrar* registrar) { + static auto* plugin_registrar = new flutter::PluginRegistrar(registrar); + flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + plugin_registrar); +} \ No newline at end of file diff --git a/macos/Classes/AudioManager.h b/macos/Classes/AudioManager.h new file mode 100644 index 0000000000..211f3f1e4c --- /dev/null +++ b/macos/Classes/AudioManager.h @@ -0,0 +1,20 @@ +#import +#import +#import "AudioProcessingAdapter.h" + +@interface AudioManager : NSObject + +@property(nonatomic, strong) RTCDefaultAudioProcessingModule* _Nonnull audioProcessingModule; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull capturePostProcessingAdapter; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull renderPreProcessingAdapter; + ++ (_Nonnull instancetype)sharedInstance; + +- (void)addLocalAudioRenderer:(nonnull id)renderer; + +- (void)removeLocalAudioRenderer:(nonnull id)renderer; + +@end + diff --git a/macos/Classes/AudioManager.m b/macos/Classes/AudioManager.m new file mode 100644 index 0000000000..efc3a8741e --- /dev/null +++ b/macos/Classes/AudioManager.m @@ -0,0 +1,50 @@ +#import "AudioManager.h" +#import "AudioProcessingAdapter.h" + +@implementation AudioManager { + RTCDefaultAudioProcessingModule* _audioProcessingModule; + AudioProcessingAdapter* _capturePostProcessingAdapter; + AudioProcessingAdapter* _renderPreProcessingAdapter; +} + +@synthesize capturePostProcessingAdapter = _capturePostProcessingAdapter; +@synthesize renderPreProcessingAdapter = _renderPreProcessingAdapter; +@synthesize audioProcessingModule = _audioProcessingModule; + ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + static AudioManager* sharedInstance = nil; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + if (self = [super init]) { + _audioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + _capturePostProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _renderPreProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _audioProcessingModule.capturePostProcessingDelegate = _capturePostProcessingAdapter; + _audioProcessingModule.renderPreProcessingDelegate = _renderPreProcessingAdapter; + } + return self; +} + +- (void)addLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter addAudioRenderer:renderer]; +} + +- (void)removeLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter removeAudioRenderer:renderer]; +} + +- (void)addRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter addAudioRenderer:sink]; +} + +- (void)removeRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter removeAudioRenderer:sink]; +} + +@end \ No newline at end of file diff --git a/macos/Classes/AudioProcessingAdapter.h b/macos/Classes/AudioProcessingAdapter.h new file mode 100644 index 0000000000..91498b45d8 --- /dev/null +++ b/macos/Classes/AudioProcessingAdapter.h @@ -0,0 +1,26 @@ +#import +#import + +@protocol ExternalAudioProcessingDelegate + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels; + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) * _Nonnull)audioBuffer; + +- (void)audioProcessingRelease; + +@end + +@interface AudioProcessingAdapter : NSObject + +- (nonnull instancetype)init; + +- (void)addProcessing:(id _Nonnull)processor; + +- (void)removeProcessing:(id _Nonnull)processor; + +- (void)addAudioRenderer:(nonnull id)renderer; + +- (void)removeAudioRenderer:(nonnull id)renderer; + +@end \ No newline at end of file diff --git a/macos/Classes/AudioProcessingAdapter.m b/macos/Classes/AudioProcessingAdapter.m new file mode 100644 index 0000000000..73fa3dda1f --- /dev/null +++ b/macos/Classes/AudioProcessingAdapter.m @@ -0,0 +1,105 @@ +#import "AudioProcessingAdapter.h" +#import +#import + +@implementation AudioProcessingAdapter { + NSMutableArray>* _renderers; + NSMutableArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)init { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _renderers = [[NSMutableArray> alloc] init]; + _processors = [[NSMutableArray> alloc] init]; + } + return self; +} + +- (void)addProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + [_processors addObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + _processors = [[_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)addAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + _renderers = [[_renderers + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != renderer; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingInitializeWithSampleRate:sampleRateHz channels:channels]; + } + os_unfair_lock_unlock(&_lock); +} + +- (AVAudioPCMBuffer*)toPCMBuffer:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + AVAudioFormat* format = + [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16 + sampleRate:audioBuffer.frames * 100.0 + channels:(AVAudioChannelCount)audioBuffer.channels + interleaved:NO]; + AVAudioPCMBuffer* pcmBuffer = + [[AVAudioPCMBuffer alloc] initWithPCMFormat:format + frameCapacity:(AVAudioFrameCount)audioBuffer.frames]; + if (!pcmBuffer) { + NSLog(@"Failed to create AVAudioPCMBuffer"); + return nil; + } + pcmBuffer.frameLength = (AVAudioFrameCount)audioBuffer.frames; + for (int i = 0; i < audioBuffer.channels; i++) { + float* sourceBuffer = [audioBuffer rawBufferForChannel:i]; + int16_t* targetBuffer = (int16_t*)pcmBuffer.int16ChannelData[i]; + for (int frame = 0; frame < audioBuffer.frames; frame++) { + targetBuffer[frame] = sourceBuffer[frame]; + } + } + return pcmBuffer; +} + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingProcess:audioBuffer]; + } + + for (id renderer in _renderers) { + [renderer renderPCMBuffer:[self toPCMBuffer:audioBuffer]]; + } + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingRelease { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingRelease]; + } + os_unfair_lock_unlock(&_lock); +} + +@end \ No newline at end of file diff --git a/macos/Classes/CameraUtils.h b/macos/Classes/CameraUtils.h new file mode 100644 index 0000000000..efe9ae87aa --- /dev/null +++ b/macos/Classes/CameraUtils.h @@ -0,0 +1,43 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (CameraUtils) + +- (void)mediaStreamTrackHasTorch:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetTorch:(nonnull RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetZoom:(nonnull RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSwitchCamera:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (NSInteger)selectFpsForFormat:(nonnull AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps; + +- (nullable AVCaptureDeviceFormat*)selectFormatForDevice:(nonnull AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight; + +- (nullable AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position; + + +@end \ No newline at end of file diff --git a/macos/Classes/CameraUtils.m b/macos/Classes/CameraUtils.m new file mode 100644 index 0000000000..fea72a6b8e --- /dev/null +++ b/macos/Classes/CameraUtils.m @@ -0,0 +1,350 @@ +#import "CameraUtils.h" + +@implementation FlutterWebRTCPlugin (CameraUtils) + +-(AVCaptureDevice*) currentDevice { + if (!self.videoCapturer) { + return nil; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + return nil; + } + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + return deviceInput.device; +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't check torch"); + result(@NO); + return; + } + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +#else + NSLog(@"Not supported on macOS. Can't check torch"); + result(@NO); +#endif +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device is nil" details:nil]); + return; + } + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device does not support torch" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:error.localizedDescription details:nil]); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"device is nil" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:error.localizedDescription details:nil]); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#else + NSLog(@"Not supported on macOS. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)applyFocusMode:(NSString*)focusMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } else if([@"auto" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"device is nil" details:nil]); + return; + } + self.focusMode = focusMode; + [self applyFocusMode:focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"device is nil" details:nil]); + return; + } + BOOL reset = ((NSNumber *)focusPoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)focusPoint[@"x"]).doubleValue; + y = ((NSNumber *)focusPoint[@"y"]).doubleValue; + } + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + + [device setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyFocusMode:self.focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void) applyExposureMode:(NSString*)exposureMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } else if([@"auto" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { + [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; + } else if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result{ +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"device is nil" details:nil]); + return; + } + self.exposureMode = exposureMode; + [self applyExposureMode:exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +#if TARGET_OS_IPHONE +- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation + x:(double)x + y:(double)y { + double oldX = x, oldY = y; + switch (orientation) { + case UIDeviceOrientationPortrait: // 90 ccw + y = 1 - oldX; + x = oldY; + break; + case UIDeviceOrientationPortraitUpsideDown: // 90 cw + x = 1 - oldY; + y = oldX; + break; + case UIDeviceOrientationLandscapeRight: // 180 + x = 1 - x; + y = 1 - y; + break; + case UIDeviceOrientationLandscapeLeft: + default: + // No rotation required + break; + } + return CGPointMake(x, y); +} +#endif + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"device is nil" details:nil]); + return; + } + + BOOL reset = ((NSNumber *)exposurePoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)exposurePoint[@"x"]).doubleValue; + y = ((NSNumber *)exposurePoint[@"y"]).doubleValue; + } + if (!device.isExposurePointOfInterestSupported) { + NSLog(@"Exposure point of interest is not supported. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Exposure point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + [device setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyExposureMode:self.exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } +#if TARGET_OS_IPHONE + [self.videoCapturer stopCapture]; +#endif + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = + self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice* videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:self._lastTargetWidth + targetHeight:self._lastTargetHeight]; + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:[self selectFpsForFormat:selectedFormat + targetFps:self._lastTargetFps] + completionHandler:^(NSError* error) { + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" + message:@"Error while switching camera" + details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + + +@end \ No newline at end of file diff --git a/macos/Classes/CustomCapturerDelegate.h b/macos/Classes/CustomCapturerDelegate.h new file mode 100644 index 0000000000..cc2adedaba --- /dev/null +++ b/macos/Classes/CustomCapturerDelegate.h @@ -0,0 +1,10 @@ +#import +#import + +@interface CustomCapturerDelegate : NSObject + +@property (nonatomic, strong) RTCVideoSource *videoSource; + +- (instancetype)initWithVideoSource:(RTCVideoSource *)videoSource; + +@end \ No newline at end of file diff --git a/macos/Classes/LocalAudioTrack.h b/macos/Classes/LocalAudioTrack.h new file mode 100644 index 0000000000..7cd1861a06 --- /dev/null +++ b/macos/Classes/LocalAudioTrack.h @@ -0,0 +1,19 @@ +#import +#import "AudioProcessingAdapter.h" +#import "LocalTrack.h" + +@interface LocalAudioTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCAudioTrack* _Nonnull)track; + +@property(nonatomic, strong) RTCAudioTrack* _Nonnull audioTrack; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/macos/Classes/LocalAudioTrack.m b/macos/Classes/LocalAudioTrack.m new file mode 100644 index 0000000000..a080d4f090 --- /dev/null +++ b/macos/Classes/LocalAudioTrack.m @@ -0,0 +1,38 @@ +#import "LocalAudioTrack.h" +#import "AudioManager.h" + +@implementation LocalAudioTrack { + RTCAudioTrack* _track; +} + +@synthesize audioTrack = _track; + +- (instancetype)initWithTrack:(RTCAudioTrack*)track { + self = [super init]; + if (self) { + _track = track; + } + return self; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +- (void)addRenderer:(id)renderer { + [AudioManager.sharedInstance addLocalAudioRenderer:renderer]; +} + +- (void)removeRenderer:(id)renderer { + [AudioManager.sharedInstance removeLocalAudioRenderer:renderer]; +} + +- (void)addProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter addProcessing:processor]; +} + +- (void)removeProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter removeProcessing:processor]; +} + +@end diff --git a/macos/Classes/LocalTrack.h b/macos/Classes/LocalTrack.h new file mode 100644 index 0000000000..e224df4c89 --- /dev/null +++ b/macos/Classes/LocalTrack.h @@ -0,0 +1,7 @@ +#import + +@protocol LocalTrack + +- (RTCMediaStreamTrack*)track; + +@end \ No newline at end of file diff --git a/macos/Classes/LocalVideoTrack.h b/macos/Classes/LocalVideoTrack.h new file mode 100644 index 0000000000..3d4654e336 --- /dev/null +++ b/macos/Classes/LocalVideoTrack.h @@ -0,0 +1,24 @@ +#import +#import "LocalTrack.h" +#import "VideoProcessingAdapter.h" + +@interface LocalVideoTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track; + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track + videoProcessing:(VideoProcessingAdapter* _Nullable)processing; + +@property(nonatomic, strong) RTCVideoTrack* _Nonnull videoTrack; + +@property(nonatomic, strong) VideoProcessingAdapter* _Nonnull processing; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end \ No newline at end of file diff --git a/macos/Classes/LocalVideoTrack.m b/macos/Classes/LocalVideoTrack.m new file mode 100644 index 0000000000..d08c432f02 --- /dev/null +++ b/macos/Classes/LocalVideoTrack.m @@ -0,0 +1,47 @@ +#import "LocalVideoTrack.h" + +@implementation LocalVideoTrack { + RTCVideoTrack* _track; + VideoProcessingAdapter* _processing; +} + +@synthesize videoTrack = _track; +@synthesize processing = _processing; + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + videoProcessing:(VideoProcessingAdapter*)processing { + self = [super init]; + if (self) { + _track = track; + _processing = processing; + } + return self; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track { + return [self initWithTrack:track videoProcessing:nil]; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +/** Register a renderer that will render all frames received on this track. */ +- (void)addRenderer:(id)renderer { + [_track addRenderer:renderer]; +} + +/** Deregister a renderer. */ +- (void)removeRenderer:(id)renderer { + [_track removeRenderer:renderer]; +} + +- (void)addProcessing:(id)processor { + [_processing addProcessing:processor]; +} + +- (void)removeProcessing:(id)processor { + [_processing removeProcessing:processor]; +} + +@end \ No newline at end of file diff --git a/macos/Classes/VideoProcessingAdapter.h b/macos/Classes/VideoProcessingAdapter.h new file mode 100644 index 0000000000..c953316eec --- /dev/null +++ b/macos/Classes/VideoProcessingAdapter.h @@ -0,0 +1,18 @@ +#import +#import + +@protocol ExternalVideoProcessingDelegate +- (RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)onFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)frame; +@end + +@interface VideoProcessingAdapter : NSObject + +- (_Nonnull instancetype)initWithRTCVideoSource:(RTCVideoSource* _Nonnull)source; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +- (RTCVideoSource* _Nonnull) source; + +@end diff --git a/macos/Classes/VideoProcessingAdapter.m b/macos/Classes/VideoProcessingAdapter.m new file mode 100644 index 0000000000..f3e7966522 --- /dev/null +++ b/macos/Classes/VideoProcessingAdapter.m @@ -0,0 +1,55 @@ +#import "VideoProcessingAdapter.h" +#import + +@implementation VideoProcessingAdapter { + RTCVideoSource* _videoSource; + CGSize _frameSize; + NSArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)initWithRTCVideoSource:(RTCVideoSource*)source { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _videoSource = source; + _processors = [NSArray> new]; + } + return self; +} + +- (RTCVideoSource* _Nonnull) source { + return _videoSource; +} + +- (void)addProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors arrayByAddingObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]]; + os_unfair_lock_unlock(&_lock); +} + +- (void)setSize:(CGSize)size { + _frameSize = size; +} + +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + frame = [processor onFrame:frame]; + } + [_videoSource capturer:capturer didCaptureVideoFrame:frame]; + os_unfair_lock_unlock(&_lock); +} + +@end \ No newline at end of file diff --git a/macos/Classes/VideoProcessor.h b/macos/Classes/VideoProcessor.h new file mode 100644 index 0000000000..02e1599d4f --- /dev/null +++ b/macos/Classes/VideoProcessor.h @@ -0,0 +1,10 @@ +#import +#import + +// Define Processor class +@interface VideoProcessor : NSObject + +// Declare any properties and methods needed +- (RTCVideoFrame *)onFrameReceived:(RTCVideoFrame *)frame; + +@end \ No newline at end of file diff --git a/macos/Classes/VideoProcessor.m b/macos/Classes/VideoProcessor.m new file mode 100644 index 0000000000..48c33a8998 --- /dev/null +++ b/macos/Classes/VideoProcessor.m @@ -0,0 +1,7 @@ +#import "VideoProcessor.h" + +@implementation VideoProcessor + +// Empty implementation + +@end diff --git a/macos/Classes/WebRTCService.h b/macos/Classes/WebRTCService.h new file mode 100644 index 0000000000..3b4c740c8e --- /dev/null +++ b/macos/Classes/WebRTCService.h @@ -0,0 +1,18 @@ + +#import +#import "VideoProcessor.h" // Import Processor header file + +@interface WebRTCService : NSObject + +@property (nonatomic, strong) VideoProcessor *videoProcessor; + +// Singleton instance method ++ (instancetype)sharedInstance; + +// Method to set the Processor +- (void)setVideoProcessor:(VideoProcessor *)videoProcessor; + +// Method to get the current Processor +- (VideoProcessor *)getVideoProcessor; + +@end \ No newline at end of file diff --git a/macos/Classes/WebRTCService.m b/macos/Classes/WebRTCService.m new file mode 100644 index 0000000000..2f7e5d4f71 --- /dev/null +++ b/macos/Classes/WebRTCService.m @@ -0,0 +1,36 @@ +#import "WebRTCService.h" + +@implementation WebRTCService + +// Static variable for the singleton instance +static WebRTCService *instance = nil; + +// Private initializer to prevent instantiation from outside +- (instancetype)initPrivate { + self = [super init]; + if (self) { + // Initialization logic if any + } + return self; +} + +// Singleton instance method ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + instance = [[self alloc] initPrivate]; + }); + return instance; +} + +// Method to set the Processor +- (void)setVideoProcessor:(VideoProcessor *)videoProcessor { + _videoProcessor = videoProcessor; +} + +// Method to get the current Processor +- (VideoProcessor *)getVideoProcessor { + return _videoProcessor; +} + +@end \ No newline at end of file diff --git a/macos/flutter_webrtc.podspec b/macos/videosdk_webrtc.podspec similarity index 63% rename from macos/flutter_webrtc.podspec rename to macos/videosdk_webrtc.podspec index 56453e360a..34bd520c99 100644 --- a/macos/flutter_webrtc.podspec +++ b/macos/videosdk_webrtc.podspec @@ -2,19 +2,19 @@ # To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| - s.name = 'flutter_webrtc' - s.version = '0.11.0' + s.name = 'videosdk_webrtc' + s.version = '0.0.6' s.summary = 'Flutter WebRTC plugin for macOS.' s.description = <<-DESC A new flutter plugin project. DESC - s.homepage = 'https://github.com/cloudwebrtc/flutter-webrtc' + s.homepage = 'https://www.videosdk.live' s.license = { :file => '../LICENSE' } - s.author = { 'CloudWebRTC' => 'duanweiwei1982@gmail.com' } + s.author = { 'VideoSDK' => 'sdk@videosdk.live' } s.source = { :path => '.' } s.source_files = ['Classes/**/*'] s.dependency 'FlutterMacOS' - s.dependency 'WebRTC-SDK', '125.6422.02' + s.dependency 'WebRTC-SDK', '125.6422.06' s.osx.deployment_target = '10.14' end diff --git a/pubspec.yaml b/pubspec.yaml index e0c5a77227..0a64cd994b 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,25 +1,25 @@ -name: flutter_webrtc +name: videosdk_webrtc description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC. -version: 0.11.1 -homepage: https://github.com/cloudwebrtc/flutter-webrtc +version: 0.0.6 +homepage: https://www.videosdk.live environment: - sdk: '>=3.3.0 <4.0.0' - flutter: '>=1.22.0' + sdk: ">=3.3.0 <4.0.0" + flutter: ">=1.22.0" dependencies: - collection: ^1.17.0 - dart_webrtc: ^1.4.6 + collection: ^1.18.0 + dart_webrtc: ^1.5.1 flutter: sdk: flutter - path_provider: ^2.0.2 - web: ^0.5.1 - webrtc_interface: ^1.2.0 + path_provider: ^2.1.4 + web: ^1.1.0 + webrtc_interface: ^1.2.1 dev_dependencies: flutter_test: sdk: flutter import_sorter: ^4.6.0 - lints: ^2.0.0 + lints: ^5.0.0 pedantic: ^1.11.1 test: any @@ -27,7 +27,7 @@ flutter: plugin: platforms: android: - package: com.cloudwebrtc.webrtc + package: live.videosdk.webrtc pluginClass: FlutterWebRTCPlugin ios: pluginClass: FlutterWebRTCPlugin @@ -37,3 +37,5 @@ flutter: pluginClass: FlutterWebRTCPlugin linux: pluginClass: FlutterWebRTCPlugin + elinux: + pluginClass: FlutterWebRTCPlugin diff --git a/test/unit/rtc_peerconnection_test.dart b/test/unit/rtc_peerconnection_test.dart index d7f86d9302..6c01ce2b1a 100644 --- a/test/unit/rtc_peerconnection_test.dart +++ b/test/unit/rtc_peerconnection_test.dart @@ -2,8 +2,8 @@ import 'package:flutter/services.dart'; import 'package:flutter_test/flutter_test.dart'; -import 'package:flutter_webrtc/src/native/rtc_data_channel_impl.dart'; -import 'package:flutter_webrtc/src/native/rtc_peerconnection_impl.dart'; +import 'package:videosdk_webrtc/src/native/rtc_data_channel_impl.dart'; +import 'package:videosdk_webrtc/src/native/rtc_peerconnection_impl.dart'; void main() { TestWidgetsFlutterBinding.ensureInitialized(); diff --git a/test/unit/web/rtc_videw_view_test.dart b/test/unit/web/rtc_videw_view_test.dart index 1ef6f29030..741d9f94e7 100644 --- a/test/unit/web/rtc_videw_view_test.dart +++ b/test/unit/web/rtc_videw_view_test.dart @@ -1,8 +1,9 @@ @TestOn('browser') +library; import 'package:flutter_test/flutter_test.dart'; -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:videosdk_webrtc/flutter_webrtc.dart'; void main() { // TODO(wer-mathurin): should revisit after this bug is resolved, https://github.com/flutter/flutter/issues/66045. diff --git a/third_party/libwebrtc/include/rtc_mediaconstraints.h b/third_party/libwebrtc/include/rtc_mediaconstraints.h index 2596e6153f..93c729fdfe 100644 --- a/third_party/libwebrtc/include/rtc_mediaconstraints.h +++ b/third_party/libwebrtc/include/rtc_mediaconstraints.h @@ -51,8 +51,8 @@ class RTCMediaConstraints : public RefCountInterface { LIB_WEBRTC_API static const char* kEnableVideoSuspendBelowMinBitrate; // googSuspendBelowMinBitrate // Constraint to enable combined audio+video bandwidth estimation. - //LIB_WEBRTC_API static const char* - // kCombinedAudioVideoBwe; // googCombinedAudioVideoBwe + LIB_WEBRTC_API static const char* + kCombinedAudioVideoBwe; // googCombinedAudioVideoBwe LIB_WEBRTC_API static const char* kScreencastMinBitrate; // googScreencastMinBitrate LIB_WEBRTC_API static const char* diff --git a/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so b/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so index b4f54d1959..e4528b3aeb 100755 Binary files a/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so and b/third_party/libwebrtc/lib/linux-arm64/libwebrtc.so differ diff --git a/third_party/libwebrtc/lib/linux-x64/libwebrtc.so b/third_party/libwebrtc/lib/linux-x64/libwebrtc.so index 6cbb29eb87..42bfac7875 100755 Binary files a/third_party/libwebrtc/lib/linux-x64/libwebrtc.so and b/third_party/libwebrtc/lib/linux-x64/libwebrtc.so differ diff --git a/third_party/libwebrtc/lib/win64/libwebrtc.dll b/third_party/libwebrtc/lib/win64/libwebrtc.dll index e3fc28e6dc..e85919373d 100644 Binary files a/third_party/libwebrtc/lib/win64/libwebrtc.dll and b/third_party/libwebrtc/lib/win64/libwebrtc.dll differ diff --git a/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib b/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib index c982e9577b..8f287c87b9 100644 Binary files a/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib and b/third_party/libwebrtc/lib/win64/libwebrtc.dll.lib differ diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt index 0ce07a968c..477cc93929 100644 --- a/windows/CMakeLists.txt +++ b/windows/CMakeLists.txt @@ -1,16 +1,16 @@ cmake_minimum_required(VERSION 3.15) -set(PROJECT_NAME "flutter_webrtc") +set(PROJECT_NAME "videosdk_webrtc") project(${PROJECT_NAME} LANGUAGES CXX) # This value is used when generating builds using this plugin, so it must # not be changed -set(PLUGIN_NAME "flutter_webrtc_plugin") +set(PLUGIN_NAME "videosdk_webrtc_plugin") add_definitions(-DLIB_WEBRTC_API_DLL) add_definitions(-DRTC_DESKTOP_DEVICE) add_library(${PLUGIN_NAME} SHARED - "../common/cpp/flutter_webrtc_plugin.cc" + "../common/cpp/src/flutter_common.cc" "../common/cpp/src/flutter_data_channel.cc" "../common/cpp/src/flutter_frame_cryptor.cc" @@ -22,9 +22,11 @@ add_library(${PLUGIN_NAME} SHARED "../common/cpp/src/flutter_webrtc.cc" "../common/cpp/src/flutter_webrtc_base.cc" "../third_party/uuidxx/uuidxx.cc" + "flutter_webrtc_plugin.cc" ) include_directories( + "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/uuidxx" "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/svpng" @@ -36,7 +38,7 @@ set_target_properties(${PLUGIN_NAME} PROPERTIES CXX_VISIBILITY_PRESET hidden) target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) target_include_directories(${PLUGIN_NAME} INTERFACE - "${CMAKE_CURRENT_SOURCE_DIR}/../common/cpp/include" + "${CMAKE_CURRENT_SOURCE_DIR}" ) target_link_libraries(${PLUGIN_NAME} PRIVATE flutter @@ -45,7 +47,7 @@ target_link_libraries(${PLUGIN_NAME} PRIVATE ) # List of absolute paths to libraries that should be bundled with the plugin -set(flutter_webrtc_bundled_libraries +set(videosdk_webrtc_bundled_libraries "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/win64/libwebrtc.dll" PARENT_SCOPE ) diff --git a/windows/flutter_webrtc_plugin.cc b/windows/flutter_webrtc_plugin.cc new file mode 100644 index 0000000000..189a23dc98 --- /dev/null +++ b/windows/flutter_webrtc_plugin.cc @@ -0,0 +1,72 @@ +#include "videosdk_webrtc/flutter_web_r_t_c_plugin.h" + +#include "flutter_common.h" +#include "flutter_webrtc.h" + +const char* kChannelName = "FlutterWebRTC.Method"; + +namespace videosdk_webrtc_plugin { + +// A webrtc plugin for windows/linux. +class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin { + public: + static void RegisterWithRegistrar(PluginRegistrar* registrar) { + auto channel = std::make_unique( + registrar->messenger(), kChannelName, + &flutter::StandardMethodCodec::GetInstance()); + + auto* channel_pointer = channel.get(); + + // Uses new instead of make_unique due to private constructor. + std::unique_ptr plugin( + new FlutterWebRTCPluginImpl(registrar, std::move(channel))); + + channel_pointer->SetMethodCallHandler( + [plugin_pointer = plugin.get()](const auto& call, auto result) { + plugin_pointer->HandleMethodCall(call, std::move(result)); + }); + + registrar->AddPlugin(std::move(plugin)); + } + + virtual ~FlutterWebRTCPluginImpl() {} + + BinaryMessenger* messenger() { return messenger_; } + + TextureRegistrar* textures() { return textures_; } + + private: + // Creates a plugin that communicates on the given channel. + FlutterWebRTCPluginImpl(PluginRegistrar* registrar, + std::unique_ptr channel) + : channel_(std::move(channel)), + messenger_(registrar->messenger()), + textures_(registrar->texture_registrar()) { + webrtc_ = std::make_unique(this); + } + + // Called when a method is called on |channel_|; + void HandleMethodCall(const MethodCall& method_call, + std::unique_ptr result) { + // handle method call and forward to webrtc native sdk. + auto method_call_proxy = MethodCallProxy::Create(method_call); + webrtc_->HandleMethodCall(*method_call_proxy.get(), + MethodResultProxy::Create(std::move(result))); + } + + private: + std::unique_ptr channel_; + std::unique_ptr webrtc_; + BinaryMessenger* messenger_; + TextureRegistrar* textures_; +}; + +} // namespace flutter_webrtc_plugin + + +void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar) { + static auto* plugin_registrar = new flutter::PluginRegistrar(registrar); + videosdk_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar( + plugin_registrar); +} diff --git a/windows/videosdk_webrtc/flutter_web_r_t_c_plugin.h b/windows/videosdk_webrtc/flutter_web_r_t_c_plugin.h new file mode 100644 index 0000000000..79b948eef0 --- /dev/null +++ b/windows/videosdk_webrtc/flutter_web_r_t_c_plugin.h @@ -0,0 +1,23 @@ +#ifndef PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ +#define PLUGINS_FLUTTER_WEBRTC_PLUGIN_CPP_H_ + +#include + +#ifdef FLUTTER_PLUGIN_IMPL +#define FLUTTER_PLUGIN_EXPORT __declspec(dllexport) +#else +#define FLUTTER_PLUGIN_EXPORT __declspec(dllimport) +#endif + +#if defined(__cplusplus) +extern "C" { +#endif + +FLUTTER_PLUGIN_EXPORT void FlutterWebRTCPluginRegisterWithRegistrar( + FlutterDesktopPluginRegistrarRef registrar); + +#if defined(__cplusplus) +} // extern "C" +#endif + +#endif \ No newline at end of file