diff --git a/package.json b/package.json index 22020c3..fcfcefb 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@whereby/jslib-media", "description": "Media library for Whereby", - "version": "1.0.3", + "version": "1.0.4", "private": false, "license": "MIT", "homepage": "https://github.com/whereby/jslib-media", diff --git a/src/webrtc/P2pRtcManager.js b/src/webrtc/P2pRtcManager.js index bf50186..6bd99e1 100644 --- a/src/webrtc/P2pRtcManager.js +++ b/src/webrtc/P2pRtcManager.js @@ -295,7 +295,7 @@ export default class P2pRtcManager extends BaseRtcManager { stopOrResumeVideo(localStream, enable) { // actually turn off the camera. Chrome-only (Firefox has different plans) - if (browserName !== "chrome") { + if (!["chrome", "safari"].includes(browserName)) { return; } if (enable === false) { diff --git a/src/webrtc/VegaRtcManager.js b/src/webrtc/VegaRtcManager.js index 427748f..9bda2a1 100644 --- a/src/webrtc/VegaRtcManager.js +++ b/src/webrtc/VegaRtcManager.js @@ -1104,7 +1104,7 @@ export default class VegaRtcManager { this._pauseResumeWebcam(); - if (browserName === "chrome") { + if (["chrome", "safari"].includes(browserName)) { // actually turn off the camera. Chrome-only (Firefox etc. has different plans) if (!enable) { diff --git a/tests/webrtc/P2pRtcManager.spec.js b/tests/webrtc/P2pRtcManager.spec.js index 9e5e5fa..ea6a50a 100644 --- a/tests/webrtc/P2pRtcManager.spec.js +++ b/tests/webrtc/P2pRtcManager.spec.js @@ -5,23 +5,50 @@ import * as CONNECTION_STATUS from "../../src/model/connectionStatusConstants"; import P2pRtcManager from "../../src/webrtc/P2pRtcManager"; import { RELAY_MESSAGES, PROTOCOL_RESPONSES } from "../../src/model/protocol"; +const originalNavigator = global.navigator; + describe("P2pRtcManager", () => { + let navigator; let serverSocketStub; let serverSocket; let emitter; let webrtcProvider; let clientId; + let mediaContstraints; beforeEach(() => { window.RTCPeerConnection = helpers.createRTCPeerConnectionStub(); + mediaContstraints = { + audio: true, + video: true, + }; serverSocketStub = helpers.createServerSocketStub(); serverSocket = serverSocketStub.socket; webrtcProvider = { webRtcDetectedBrowser: "chrome", webRtcDetectedBrowserVersion: "60", + getMediaConstraints: () => mediaContstraints, }; emitter = helpers.createEmitterStub(); clientId = helpers.randomString("client-"); + + navigator = { + mediaDevices: { + getUserMedia: () => { + throw "must be stubbed"; + }, + }, + }; + + Object.defineProperty(global, "navigator", { + value: navigator, + }); + }); + + afterEach(() => { + Object.defineProperty(global, "navigator", { + value: originalNavigator, + }); }); function createRtcManager({ @@ -409,4 +436,138 @@ describe("P2pRtcManager", () => { }); }); }); + + describe("stopOrResumeVideo", () => { + let clock; + let localStream; + let rtcManager; + + beforeEach(() => { + clock = sinon.useFakeTimers(); + localStream = helpers.createMockedMediaStream(); + rtcManager = createRtcManager(); + }); + + afterEach(() => { + clock.restore(); + }); + + describe("when disabling", () => { + it("should stop the video track after 5 seconds", () => { + const videoTrack = localStream.getVideoTracks()[0]; + videoTrack.enabled = false; + + rtcManager.stopOrResumeVideo(localStream, false); + + expect(videoTrack.stop).not.to.have.been.called(); + clock.tick(5000); + expect(videoTrack.stop).to.have.been.called(); + }); + + it("should NOT stop track if it is still enabled", () => { + const videoTrack = localStream.getVideoTracks()[0]; + videoTrack.enabled = true; + + rtcManager.stopOrResumeVideo(localStream, false); + + expect(videoTrack.stop).not.to.have.been.called(); + clock.tick(5000); + expect(videoTrack.stop).not.to.have.been.called(); + }); + + it("should remove the track from local stream", () => { + const videoTrack = localStream.getVideoTracks()[0]; + videoTrack.enabled = false; + + rtcManager.stopOrResumeVideo(localStream, false); + clock.tick(5000); + + expect(localStream.removeTrack).to.have.been.calledWithExactly(videoTrack); + }); + + it("should emit event", () => { + const videoTrack = localStream.getVideoTracks()[0]; + videoTrack.enabled = false; + + rtcManager.stopOrResumeVideo(localStream, false); + clock.tick(5000); + + expect(emitter.emit).to.have.been.calledWithExactly( + CONNECTION_STATUS.EVENTS.LOCAL_STREAM_TRACK_REMOVED, + { + stream: localStream, + track: videoTrack, + } + ); + }); + }); + + describe("when enabling", () => { + let gumStream; + let gumStub; + + beforeEach(() => { + gumStream = helpers.createMockedMediaStream(); + gumStub = sinon.stub(navigator.mediaDevices, "getUserMedia").resolves(gumStream); + localStream.removeTrack(localStream.getVideoTracks()[0]); + }); + + afterEach(() => { + gumStub.restore(); + }); + + it("should obtain new video track with existing constraints", () => { + mediaContstraints = { video: { some: "constraint" } }; + + rtcManager.stopOrResumeVideo(localStream, true); + + expect(navigator.mediaDevices.getUserMedia).to.have.been.calledWithExactly({ + video: mediaContstraints.video, + }); + }); + + it("should add video track to local stream", async () => { + const expectedTrack = gumStream.getVideoTracks()[0]; + + await rtcManager.stopOrResumeVideo(localStream, true); + + expect(localStream.addTrack).to.have.been.calledWithExactly(expectedTrack); + }); + + it("should emit event", async () => { + const expectedTrack = gumStream.getVideoTracks()[0]; + + await rtcManager.stopOrResumeVideo(localStream, true); + + expect(emitter.emit).to.have.been.calledWithExactly(CONNECTION_STATUS.EVENTS.LOCAL_STREAM_TRACK_ADDED, { + streamId: localStream.id, + tracks: [expectedTrack], + screenShare: false, + }); + }); + + it("should add track to peer connection(s)", async () => { + const expectedTrack = gumStream.getVideoTracks()[0]; + sinon.spy(rtcManager, "_addTrackToPeerConnections"); + + await rtcManager.stopOrResumeVideo(localStream, true); + + expect(rtcManager._addTrackToPeerConnections).to.have.been.calledWithExactly(expectedTrack); + }); + + it("should replace track in peer connection(s) when stopped track exists", async () => { + const expectedTrack = gumStream.getVideoTracks()[0]; + const stoppedTrack = helpers.createMockedMediaStreamTrack({ kind: "video" }); + rtcManager._stoppedVideoTrack = stoppedTrack; + sinon.spy(rtcManager, "_replaceTrackToPeerConnections"); + + await rtcManager.stopOrResumeVideo(localStream, true); + + expect(rtcManager._replaceTrackToPeerConnections).to.have.been.calledWithExactly( + stoppedTrack, + expectedTrack + ); + }); + }); + }); }); diff --git a/tests/webrtc/VegaRtcManager.spec.js b/tests/webrtc/VegaRtcManager.spec.js new file mode 100644 index 0000000..ec86de3 --- /dev/null +++ b/tests/webrtc/VegaRtcManager.spec.js @@ -0,0 +1,184 @@ +import * as helpers from "./webRtcHelpers"; +import * as mediasoupClient from "mediasoup-client"; + +import * as CONNECTION_STATUS from "../../src/model/connectionStatusConstants"; +import VegaRtcManager from "../../src/webrtc/VegaRtcManager"; + +const originalNavigator = global.navigator; +const originalMediasoupDevice = mediasoupClient.Device; + +describe("VegaRtcManager", () => { + let navigator; + let serverSocketStub; + let serverSocket; + let emitter; + let webrtcProvider; + let mediaContstraints; + + let rtcManager; + + beforeEach(() => { + serverSocketStub = helpers.createServerSocketStub(); + serverSocket = serverSocketStub.socket; + webrtcProvider = { + webRtcDetectedBrowser: "chrome", + webRtcDetectedBrowserVersion: "60", + getMediaConstraints: () => mediaContstraints, + }; + + emitter = helpers.createEmitterStub(); + + navigator = { + mediaDevices: { + getUserMedia: () => { + throw "must be stubbed"; + }, + }, + }; + + Object.defineProperty(global, "navigator", { + value: navigator, + }); + + Object.defineProperty(mediasoupClient, "Device", { + value: sinon.stub(), + }); + + rtcManager = new VegaRtcManager({ + selfId: helpers.randomString("client-"), + room: { iceServers: [] }, + emitter, + serverSocket, + webrtcProvider, + features: {}, + eventClaim: helpers.randomString("/claim-"), + }); + }); + + afterEach(() => { + Object.defineProperty(global, "navigator", { + value: originalNavigator, + }); + Object.defineProperty(mediasoupClient, "Device", { + value: originalMediasoupDevice, + }); + }); + + describe("stopOrResumeVideo", () => { + let clock; + let localStream; + + beforeEach(() => { + clock = sinon.useFakeTimers(); + localStream = helpers.createMockedMediaStream(); + }); + + afterEach(() => { + clock.restore(); + }); + + describe("when disabling", () => { + it("should stop the video track after 5 seconds", () => { + const videoTrack = localStream.getVideoTracks()[0]; + videoTrack.enabled = false; + + rtcManager.stopOrResumeVideo(localStream, false); + + expect(videoTrack.stop).not.to.have.been.called(); + clock.tick(5000); + expect(videoTrack.stop).to.have.been.called(); + }); + + it("should NOT stop track if it is still enabled", () => { + const videoTrack = localStream.getVideoTracks()[0]; + videoTrack.enabled = true; + + rtcManager.stopOrResumeVideo(localStream, false); + + expect(videoTrack.stop).not.to.have.been.called(); + clock.tick(5000); + expect(videoTrack.stop).not.to.have.been.called(); + }); + + it("should remove the track from local stream", () => { + const videoTrack = localStream.getVideoTracks()[0]; + videoTrack.enabled = false; + + rtcManager.stopOrResumeVideo(localStream, false); + clock.tick(5000); + + expect(localStream.removeTrack).to.have.been.calledWithExactly(videoTrack); + }); + + it("should emit event", () => { + const videoTrack = localStream.getVideoTracks()[0]; + videoTrack.enabled = false; + + rtcManager.stopOrResumeVideo(localStream, false); + clock.tick(5000); + + expect(emitter.emit).to.have.been.calledWithExactly( + CONNECTION_STATUS.EVENTS.LOCAL_STREAM_TRACK_REMOVED, + { + stream: localStream, + track: videoTrack, + } + ); + }); + }); + + describe("when enabling", () => { + let gumStream; + let gumStub; + + beforeEach(() => { + gumStream = helpers.createMockedMediaStream(); + gumStub = sinon.stub(navigator.mediaDevices, "getUserMedia").resolves(gumStream); + localStream.removeTrack(localStream.getVideoTracks()[0]); + }); + + afterEach(() => { + gumStub.restore(); + }); + + it("should obtain new video track with existing constraints", () => { + mediaContstraints = { video: { some: "constraint" } }; + + rtcManager.stopOrResumeVideo(localStream, true); + + expect(navigator.mediaDevices.getUserMedia).to.have.been.calledWithExactly({ + video: mediaContstraints.video, + }); + }); + + it("should add video track to local stream", async () => { + const expectedTrack = gumStream.getVideoTracks()[0]; + + await rtcManager.stopOrResumeVideo(localStream, true); + + expect(localStream.addTrack).to.have.been.calledWithExactly(expectedTrack); + }); + + it("should emit event", async () => { + const expectedTrack = gumStream.getVideoTracks()[0]; + + await rtcManager.stopOrResumeVideo(localStream, true); + + expect(emitter.emit).to.have.been.calledWithExactly(CONNECTION_STATUS.EVENTS.LOCAL_STREAM_TRACK_ADDED, { + streamId: localStream.id, + tracks: [expectedTrack], + screenShare: false, + }); + }); + + it("should sendWebcam(track)", async () => { + const expectedTrack = gumStream.getVideoTracks()[0]; + sinon.spy(rtcManager, "_sendWebcam"); + + await rtcManager.stopOrResumeVideo(localStream, true); + + expect(rtcManager._sendWebcam).to.have.been.calledWithExactly(expectedTrack); + }); + }); + }); +}); diff --git a/tests/webrtc/webRtcHelpers.js b/tests/webrtc/webRtcHelpers.js index 1be434b..34c9995 100644 --- a/tests/webrtc/webRtcHelpers.js +++ b/tests/webrtc/webRtcHelpers.js @@ -131,10 +131,10 @@ export function createMockedMediaStreamTrack({ kind }) { getSettings: () => { raiseNotImplementedException(); }, - stop: () => { + stop: sinon.spy(() => { result.enabled = false; result.readyState = "ended"; - }, + }), }; return result; } @@ -146,17 +146,19 @@ export function createMockedMediaStream() { const mockedVideoTrack = createMockedMediaStreamTrack({ kind: "video", }); - const raiseNotImplementedException = () => { - throw new Error("Not Implemented function in mock"); - }; + + let tracks = [mockedAudioTrack, mockedVideoTrack]; + const result = { active: true, ended: false, id: randomString(), - addTrack: () => raiseNotImplementedException(), - removeTrack: () => raiseNotImplementedException(), - getAudioTracks: () => [mockedAudioTrack], - getVideoTracks: () => [mockedVideoTrack], + addTrack: sinon.spy((track) => tracks.push(track)), + removeTrack: sinon.spy((track) => { + tracks = tracks.filter((t) => t !== track); + }), + getAudioTracks: () => tracks.filter((t) => t.kind === "audio"), + getVideoTracks: () => tracks.filter((t) => t.kind === "video"), getTracks: () => [].concat(result.getAudioTracks(), result.getVideoTracks()), close: () => { result.active = false;