diff --git a/assets/js/hooks/audio_player.js b/assets/js/hooks/audio_player.js index ba8a145c..fe5b0bb5 100644 --- a/assets/js/hooks/audio_player.js +++ b/assets/js/hooks/audio_player.js @@ -12,6 +12,13 @@ * general player-agnostic fashion. "Playback" and actual playback (i.e. audio or video playback) is decoupled, allowing * us the ability to reconcile bufferring states and other edge cases, mediated by the Media Bridge. * */ +import { + seekTimeBridge, + playPauseBridge, + heartbeatBridge, + playbackMetaBridge, +} from "./mediaEventBridges"; + let rand = (min, max) => Math.floor(Math.random() * (max - min) + min); let isVisible = (el) => !!(el.offsetWidth || el.offsetHeight || el.getClientRects().length > 0); @@ -22,11 +29,6 @@ let execJS = (selector, attr) => { .forEach((el) => liveSocket.execJS(el, el.getAttribute(attr))); }; -import { - seekTimeBridge, - playPauseBridge, - heartbeatBridge, -} from "./media_bridge.js"; import { formatDisplayTime, nowMs } from "../utils/time_utils.js"; AudioPlayer = { @@ -36,11 +38,15 @@ AudioPlayer = { this.player = this.el.querySelector("audio"); console.log("MOUNT PING"); - document.addEventListener("pointerdown", () => this.enableAudio()); - this.player.addEventListener("canplay", (e) => this.handlePlayableState(e)); - this.player.addEventListener("loadedmetadata", (e) => - this.handleMetadataLoad(e), + // TO DEPRECATED + // document.addEventListener("pointerdown", () => this.enableAudio()); + + this.player.addEventListener("canplaythrough", (e) => + this.handlePlayableState(e), ); + // this.player.addEventListener("loadedmetadata", (e) => + // this.handleMetadataLoad(e), + // ); /// Audio playback events: this.handleEvent("stop", () => this.stop()); @@ -53,9 +59,24 @@ AudioPlayer = { this.handleMediaPlayPause(payload), ), heartbeat: heartbeatBridge.sub((payload) => this.echoHeartbeat(payload)), + playbackMeta: playbackMetaBridge.sub((playback) => + this.handlePlaybackMetaUpdate(playback), + ), }; }, - + /** + * Loads the audio onto the audio player and inits the MediaSession as soon as playback information is received. + * This allows the metadata and audio load to happen independently of users' + * actions that effect playback (e.g. play/pause) -- bufferring gets init a lot earlier + * as a result. + * */ + handlePlaybackMetaUpdate(playback) { + console.log("TRACE: handle playback meta update:", playback); + const { meta: playbackMeta } = playback; + const { file_path: filePath } = playbackMeta; + this.loadAudio(filePath); + this.initMediaSession(playback); + }, /// Handlers for events received via the events bridge: handleMediaPlayPause(payload) { console.log( @@ -112,12 +133,13 @@ AudioPlayer = { handlePlayableState(e) { console.log("TRACE HandlePlayableState", e); const playback = JSON.parse(this?.player?.dataset?.playback); - this.initMediaSession(playback); + // this.initMediaSession(playback); }, + // DEPRECATED: the state setting already happens at the point of loading, we don't need to listen to any metadata load event now now. handleMetadataLoad(e) { console.log("TRACE HandleMetadataLoad", e); const playback = JSON.parse(this?.player?.dataset?.playback); - this.initMediaSession(playback); + // this.initMediaSession(playback); }, handlePlayPause() { console.log("{play_pause event triggerred} player:", this.player); @@ -125,10 +147,13 @@ AudioPlayer = { this.play(); } }, - /** + + /* * This "init" behaviour has been mimicked from live_beats. * It is likely there to enable the audio player bufferring. - * */ + */ + // DEPRECATED: the intent of this function is no longer clear. It can be removed in a cleanup commit coming soon. + // the actual loading of audio to the audio player is already handled by loadAudio() enableAudio() { console.log("TRACE: enable audio"); if (this.player.src) { @@ -159,18 +184,33 @@ AudioPlayer = { console.log("TRACE @playMedia", { player: this.player, }); - let currentSrc = this.player.src.split("?")[0]; - + let currentSrc = this.getCurrentSrc(); const isLoadedAndPaused = currentSrc === filePath && !isPlaying && this.player.paused; if (isLoadedAndPaused) { this.play({ sync: true }); } else if (currentSrc !== filePath) { - currentSrc = filePath; - this.player.src = currentSrc; + this.loadAudio(filePath); this.play({ sync: true }); } }, + loadAudio(src) { + const isSrcAlreadyLoaded = src === this.getCurrentSrc(); + if (isSrcAlreadyLoaded) { + return; + } + this.player.src = src; + }, + getCurrentSrc() { + if (!this?.player?.src) { + return null; + } + // since the html5 player's src value is typically a url string, it will have url encodings e.g. ContentType. + // Therefore, we strip away these urlencodes: + const src = this.player.src.split("?")[0]; + + return src; + }, play(opts = {}) { console.log("TRACE Triggered playback, check params", { player: this.player, diff --git a/assets/js/hooks/media/bridged.js b/assets/js/hooks/mediaEventBridges/bridged.js similarity index 100% rename from assets/js/hooks/media/bridged.js rename to assets/js/hooks/mediaEventBridges/bridged.js diff --git a/assets/js/hooks/mediaEventBridges/index.js b/assets/js/hooks/mediaEventBridges/index.js new file mode 100644 index 00000000..97b886f9 --- /dev/null +++ b/assets/js/hooks/mediaEventBridges/index.js @@ -0,0 +1,17 @@ +/** + * This file contains definitions for custom event bridges and keeps + * the exporting of these clean. + * */ + +import { bridged } from "./bridged.js"; + +export const seekTimeBridge = bridged("seekTime"); +export const playPauseBridge = bridged("playPause"); +export const heartbeatBridge = bridged("heartbeat"); +/** + * The playbackMetaBridge is the channel through which playback-metadata related + * messages are passed. + * An example would be when a voice first gets loaded/registered and we can + * update the mediasessions api even if the user has not started the actual playback. + * */ +export const playbackMetaBridge = bridged("playback"); diff --git a/assets/js/hooks/media_bridge.js b/assets/js/hooks/media_bridge.js index 05d6b3e9..3d971565 100644 --- a/assets/js/hooks/media_bridge.js +++ b/assets/js/hooks/media_bridge.js @@ -6,13 +6,13 @@ * * Event-handling is done using custom bridged events as a proxy. * */ -import { bridged } from "./media/bridged.js"; import { formatDisplayTime } from "../utils/time_utils.js"; - -// TODO: consider switching to a map of bridges to support other key events -export const seekTimeBridge = bridged("seekTime"); -export const playPauseBridge = bridged("playPause"); -export const heartbeatBridge = bridged("heartbeat"); +import { + seekTimeBridge, + playPauseBridge, + heartbeatBridge, + playbackMetaBridge, +} from "./mediaEventBridges"; MediaBridge = { mounted() { @@ -27,7 +27,9 @@ MediaBridge = { this.handleEvent("media_bridge:registerEventsTimeline", (params) => this.registerEventsTimeline(params), ); - + this.handleEvent("media_bridge:registerPlayback", (params) => + this.registerPlaybackInfo(params), + ); this.handleEvent("initSession", (sess) => this.initSession(sess)); // pub: external action // this callback pubs to others @@ -207,9 +209,19 @@ MediaBridge = { }, registerEventsTimeline(params) { console.log("Register Events Timeline", params); - this.eventsTimeline = params.voice_events; + const { voice_events } = params; + this.eventsTimeline = voice_events; + }, + /** + * First registers the playback information about a playable medium (e.g. voice). + * The intent of this is to separate out tasks for interfacing with things like MediaSessions api + * from interfacing with the concrete players (e.g. play pause event on the audio player). + * */ + registerPlaybackInfo(params) { + const { playback } = params; + console.log("TRACE: registerPlaybackInfo", params); + playbackMetaBridge.pub(playback); }, - /** * Receives event pushed from the server, then pubs through the * */ diff --git a/assets/js/hooks/progress_bar.js b/assets/js/hooks/progress_bar.js index 14722eb1..9e536286 100644 --- a/assets/js/hooks/progress_bar.js +++ b/assets/js/hooks/progress_bar.js @@ -2,69 +2,71 @@ * Progress Bar hooks intended to sync playback related actions * */ -import {seekTimeBridge, heartbeatBridge} from "./media_bridge.js" +import { seekTimeBridge, heartbeatBridge } from "./mediaEventBridges"; ProgressBar = { mounted() { this.el.addEventListener("click", (e) => { e.preventDefault(); - this.handleProgressBarClick(e) + this.handleProgressBarClick(e); }); - const heartbeatDeregisterer = heartbeatBridge.sub(payload => this.handleHeartbeat(payload)) - const seekTimeDeregisterer = seekTimeBridge.sub(payload => this.handleExternalSeekTime(payload)) + const heartbeatDeregisterer = heartbeatBridge.sub((payload) => + this.handleHeartbeat(payload), + ); + const seekTimeDeregisterer = seekTimeBridge.sub((payload) => + this.handleExternalSeekTime(payload), + ); this.eventBridgeDeregisterers = { seekTime: seekTimeDeregisterer, heartbeat: heartbeatDeregisterer, - } + }; }, handleExternalSeekTime(payload) { - console.log("[progress_bar::seekTimeBridgeSub::seekTimeHandler] this:", {payload}); - const { - seekToMs: timeMs, - originator, - } = payload; + console.log("[progress_bar::seekTimeBridgeSub::seekTimeHandler] this:", { + payload, + }); + const { seekToMs: timeMs, originator } = payload; const shouldIgnoreSignal = originator === "ProgressBar"; if (shouldIgnoreSignal) { - console.info("Ignoring signal for seekTime", payload) + console.info("Ignoring signal for seekTime", payload); return; } - const maxTime = this.el.dataset?.max || this.maxTime - if(!maxTime) { - console.warn("Max time not available in element's state or dataset, ignoring progress bar update.") - return + const maxTime = this.el.dataset?.max || this.maxTime; + if (!maxTime) { + console.warn( + "Max time not available in element's state or dataset, ignoring progress bar update.", + ); + return; } - const playbackPercentage = (timeMs / maxTime) - const progressStyleWidth = `${(playbackPercentage * 100)}%` + const playbackPercentage = timeMs / maxTime; + const progressStyleWidth = `${playbackPercentage * 100}%`; console.log("[DEBUG]", { maxTime, playbackPercentage, - }) - this.setProgressBarWidth(progressStyleWidth) + }); + this.setProgressBarWidth(progressStyleWidth); }, handleHeartbeat(payload) { - console.log("[ProgressBar::handleHeartbeat]", payload) + console.log("[ProgressBar::handleHeartbeat]", payload); const shouldIgnoreSignal = payload.originator === "MediaBridge"; - if(shouldIgnoreSignal) { + if (shouldIgnoreSignal) { return; } - const { - currentTimeMs, - durationMs, - } = payload.currentPlaybackInfo || {}; + const { currentTimeMs, durationMs } = payload.currentPlaybackInfo || {}; - const playbackPercentage = (currentTimeMs / durationMs) - const progressStyleWidth = `${(playbackPercentage * 100)}%` + const playbackPercentage = currentTimeMs / durationMs; + const progressStyleWidth = `${playbackPercentage * 100}%`; console.log("handleHeartbeat, set progress bar width", { progressStyleWidth, - payload - }) - this.setProgressBarWidth(progressStyleWidth) + payload, + }); + this.setProgressBarWidth(progressStyleWidth); }, /* // The progress bar is measured in milliseconds, @@ -89,24 +91,24 @@ ProgressBar = { // }) */ handleProgressBarClick(e) { - const { max: maxTime } = this.el.dataset + const { max: maxTime } = this.el.dataset; if (!maxTime) { - console.log("unable to seek position, payload is incorrect") - return + console.log("unable to seek position, payload is incorrect"); + return; } - const containerNode = document.getElementById("player-progress-container") - const maxOffset = containerNode.offsetWidth + const containerNode = document.getElementById("player-progress-container"); + const maxOffset = containerNode.offsetWidth; this.maxTime = maxTime; this.maxOffset = maxOffset; const currXOffset = e.offsetX; - const maxPlaybackMs = Number(maxTime) - const playbackPercentage = (currXOffset / maxOffset) - const positionMs = maxPlaybackMs * playbackPercentage - const progressStyleWidth = `${(playbackPercentage * 100)}%` - this.setProgressBarWidth(progressStyleWidth) + const maxPlaybackMs = Number(maxTime); + const playbackPercentage = currXOffset / maxOffset; + const positionMs = maxPlaybackMs * playbackPercentage; + const progressStyleWidth = `${playbackPercentage * 100}%`; + this.setProgressBarWidth(progressStyleWidth); // Optimistic update this.el.value = positionMs; @@ -120,23 +122,25 @@ ProgressBar = { playbackPercentage, maxPlaybackMs, positionMs, - }) + }); // pubs & dispatches this position const seekTimePayload = { seekToMs: positionMs, originator: "ProgressBar", - } - seekTimeBridge.dispatch(this, seekTimePayload, "#media-player-container") + }; + seekTimeBridge.dispatch(this, seekTimePayload, "#media-player-container"); return; }, - setProgressBarWidth(progressStyleWidth, selector="#player-progress") { - console.log("setting progress bar width:", progressStyleWidth) - const progressBarNode = document.querySelector(selector) - console.assert(!!progressBarNode, "progress bar node must always be present in the dom.") + setProgressBarWidth(progressStyleWidth, selector = "#player-progress") { + console.log("setting progress bar width:", progressStyleWidth); + const progressBarNode = document.querySelector(selector); + console.assert( + !!progressBarNode, + "progress bar node must always be present in the dom.", + ); progressBarNode.style.width = progressStyleWidth; - } + }, }; - export default ProgressBar; diff --git a/assets/js/hooks/share_quote.js b/assets/js/hooks/share_quote.js index b203de57..f85ad118 100644 --- a/assets/js/hooks/share_quote.js +++ b/assets/js/hooks/share_quote.js @@ -3,13 +3,13 @@ */ import { getSharer } from "./web_share.js"; - ShareQuoteButton = { mounted() { initTooltip(); let callback = () => console.log("share quote!"); - if ("share" in navigator) { // uses webshare api: + if ("share" in navigator) { + // uses webshare api: callback = () => { const shareTitle = this.el.getAttribute("data-share-title"); const shareUrl = window.location.href; @@ -21,9 +21,10 @@ ShareQuoteButton = { window.shareUrl = sharer; window.shareUrl(shareUrl); }; - } else if ("clipboard" in navigator) { // copies to clipboard: + } else if ("clipboard" in navigator) { + // copies to clipboard: callback = () => { - console.log(">> see me:", {"floating ui:": window}) + console.log(">> see me:", { "floating ui:": window }); const { chapter_number: chapterNum, verse_number: verseNum, @@ -31,7 +32,7 @@ ShareQuoteButton = { text, } = JSON.parse(this.el.getAttribute("data-verse")); - const content = `[Gita Chapter ${chapterNum} Verse ${verseNum}] \n${text}\n${transliteration}\nRead more at ${document.URL}` + const content = `[Gita Chapter ${chapterNum} Verse ${verseNum}] \n${text}\n${transliteration}\nRead more at ${document.URL}`; navigator.clipboard.writeText(content); }; } @@ -41,62 +42,59 @@ ShareQuoteButton = { }; function showTooltip() { - const {tooltip} = getButtonAndTooltip(); - tooltip.style.display = 'block'; + const { tooltip } = getButtonAndTooltip(); + tooltip.style.display = "block"; alignTooltip(); } function hideTooltip() { - const {tooltip} = getButtonAndTooltip(); - tooltip.style.display = ''; + const { tooltip } = getButtonAndTooltip(); + tooltip.style.display = ""; } const initTooltip = () => { - const {button} = getButtonAndTooltip(); + const { button } = getButtonAndTooltip(); [ - ['mouseenter', showTooltip], - ['mouseleave', hideTooltip], - ['focus', showTooltip], - ['blur', hideTooltip], + ["mouseenter", showTooltip], + ["mouseleave", hideTooltip], + ["focus", showTooltip], + ["blur", hideTooltip], ].forEach(([event, listener]) => { button.addEventListener(event, listener); }); -} +}; const getButtonAndTooltip = () => { - const id = "ShareQuoteButton" - const button = document.getElementById(id) - const tooltip = document.getElementById(`tooltip-${id}`) - return {button, tooltip} - -} - + const id = "ShareQuoteButton"; + const button = document.getElementById(id); + const tooltip = document.getElementById(`tooltip-${id}`); + return { button, tooltip }; +}; -import { computePosition, flip, shift, offset} from "floating-ui.dom.umd.min"; +import { computePosition, flip, shift, offset } from "floating-ui.dom.umd.min"; const alignTooltip = () => { - const {button, tooltip} = getButtonAndTooltip() + const { button, tooltip } = getButtonAndTooltip(); console.log(">>> found?", { button, tooltip, - }) + }); computePosition(button, tooltip, { - placement: 'right', + placement: "right", // NOTE: order of middleware matters. - middleware: [offset(6), flip(), shift({padding: 16})], - }).then(({x, y}) => { + middleware: [offset(6), flip(), shift({ padding: 16 })], + }).then(({ x, y }) => { console.log(">>> computed new position!", { x, - y - }) + y, + }); Object.assign(tooltip.style, { left: `${x}px`, top: `${y}px`, }); }); -} - +}; export default ShareQuoteButton; diff --git a/assets/js/hooks/youtube_player.js b/assets/js/hooks/youtube_player.js index cd9e4d43..09861fb6 100644 --- a/assets/js/hooks/youtube_player.js +++ b/assets/js/hooks/youtube_player.js @@ -2,61 +2,59 @@ * Follower * Validates if required parameters exist. * */ -import {seekTimeBridge, playPauseBridge} from "./media_bridge.js" -import { isMobileDevice } from "../utils/uncategorised_utils.js" +import { seekTimeBridge, playPauseBridge } from "./mediaEventBridges"; + +import { isMobileDevice } from "../utils/uncategorised_utils.js"; const isYouTubeFnCallable = (dataset) => { - const {functionName, eventName} = dataset; - const areFnAndEventNamesProvided = functionName && eventName - if(!areFnAndEventNamesProvided) { + const { functionName, eventName } = dataset; + const areFnAndEventNamesProvided = functionName && eventName; + if (!areFnAndEventNamesProvided) { console.warn("Need to provide both valid function name and event name"); - return false + return false; } - const supportedEvents = ["click", "mouseover"] + const supportedEvents = ["click", "mouseover"]; if (!supportedEvents.includes(eventName)) { - console.warn(`${eventName} is not a supported event. Supported events include ${supportedEvents}.`); - return false + console.warn( + `${eventName} is not a supported event. Supported events include ${supportedEvents}.`, + ); + return false; } - const supportedFunctionNames = Object.keys(youtubePlayerCallbacks) + const supportedFunctionNames = Object.keys(youtubePlayerCallbacks); if (!supportedFunctionNames.includes(functionName)) { - console.warn(`${functionName} is not a supported youtube function. Supported functions include ${supportedFunctionNames}.`); + console.warn( + `${functionName} is not a supported youtube function. Supported functions include ${supportedFunctionNames}.`, + ); return false; } - - return true -} + return true; +}; // NOTE: the player interface can be found @ https://developers.google.com/youtube/iframe_api_reference#Functions const youtubePlayerCallbacks = { - seekTo: function(options) { - const {targetTimeStamp, player} = options; - const target = Number(targetTimeStamp) - console.log("seeking to: ", target) - return player.seekTo(target) + seekTo: function (options) { + const { targetTimeStamp, player } = options; + const target = Number(targetTimeStamp); + console.log("seeking to: ", target); + return player.seekTo(target); }, - loadVideoById: function(options) { - const { - targetTimeStamp: startSeconds, - videoId, - player, - } = options; - console.log(`Loading video with id ${videoId} at t=${startSeconds}s`) - player.loadVideoById({videoId, startSeconds}) + loadVideoById: function (options) { + const { targetTimeStamp: startSeconds, videoId, player } = options; + console.log(`Loading video with id ${videoId} at t=${startSeconds}s`); + player.loadVideoById({ videoId, startSeconds }); }, - getAllStats: function(options) { // this is a custom function - const { - hook, - player, - } = options; + getAllStats: function (options) { + // this is a custom function + const { hook, player } = options; const stats = { duration: player.getDuration(), videoUrl: player.getVideoUrl(), currentTime: player.getCurrentTime(), - } - hook.pushEventTo("#statsHover", "reportVideoStatus", stats) - } -} + }; + hook.pushEventTo("#statsHover", "reportVideoStatus", stats); + }, +}; /** * Contains client-side logic for the youtube iframe embeded player. @@ -65,67 +63,66 @@ const youtubePlayerCallbacks = { */ export const RenderYouTubePlayer = { mounted() { - const { - videoId, - playerConfig: serialisedPlayerConfig, - } = this.el.dataset; - console.log("Check dataset", this.el.dataset) + const { videoId, playerConfig: serialisedPlayerConfig } = this.el.dataset; + console.log("Check dataset", this.el.dataset); - const playerConfig = JSON.parse(serialisedPlayerConfig) - const updatedConfig = overrideConfigForMobile(playerConfig) - injectIframeDownloadScript() - injectYoutubeInitialiserScript(videoId, updatedConfig) + const playerConfig = JSON.parse(serialisedPlayerConfig); + const updatedConfig = overrideConfigForMobile(playerConfig); + injectIframeDownloadScript(); + injectYoutubeInitialiserScript(videoId, updatedConfig); // TODO: capture youtube player events (play state changes and pub to the same event bridges, so as to control overall playback) this.eventBridgeDeregisterers = { seekTime: seekTimeBridge.sub((payload) => this.handleSeekTime(payload)), - playPause: playPauseBridge.sub(payload => this.handlePlayPause(payload)), - } - this.handleEvent("stop", () => this.stop()) + playPause: playPauseBridge.sub((payload) => + this.handlePlayPause(payload), + ), + }; + this.handleEvent("stop", () => this.stop()); }, handlePlayPause(payload) { - console.log("[playPauseBridge::audio_player::playpause] payload:", payload) - const { - cmd, - playback, - } = payload + console.log("[playPauseBridge::audio_player::playpause] payload:", payload); + const { cmd, playback } = payload; if (cmd === "play") { - this.playMedia(playback) + this.playMedia(playback); } if (cmd === "pause") { - this.pauseMedia() + this.pauseMedia(); } }, handleSeekTime(payload) { - console.log("[youtube_player::seekTimeBridgeSub::seekTimeHandler] check params:", {payload} ); - let {seekToMs: timeMs} = payload; - this.seekToMs(timeMs) + console.log( + "[youtube_player::seekTimeBridgeSub::seekTimeHandler] check params:", + { payload }, + ); + let { seekToMs: timeMs } = payload; + this.seekToMs(timeMs); }, playMedia(playback) { - console.log("youtube player playMedia triggerred", playback) - const {meta: playbackMeta, "playing?": isPlaying, elapsed} = playback; + console.log("youtube player playMedia triggerred", playback); + const { meta: playbackMeta, "playing?": isPlaying, elapsed } = playback; const { title, duration, file_path: filePath, artists } = playbackMeta; // TODO: consider if the elapsed ms should be used here for better sync(?) - window.youtubePlayer.playVideo() + window.youtubePlayer.playVideo(); }, pauseMedia() { - console.log("youtube player pauseMedia triggerred") - window.youtubePlayer.pauseVideo() + console.log("youtube player pauseMedia triggerred"); + window.youtubePlayer.pauseVideo(); }, stop() { - console.log("youtube player stop triggerred") + console.log("youtube player stop triggerred"); }, seekToMs(timeMs) { const timeS = timeMs / 1000; console.log("youtube player seekto triggerred", { timeS, - player: window.youtubePlayer - }) + player: window.youtubePlayer, + }); window.youtubePlayer.seekTo(timeS); - } + }, }; /** @@ -133,11 +130,11 @@ export const RenderYouTubePlayer = { * so that it gets fired before any other script. * */ const injectIframeDownloadScript = () => { - const tag = document.createElement("script"); - tag.src = "https://www.youtube.com/iframe_api"; - const firstScriptTag = document.getElementsByTagName("script")?.[0]; - firstScriptTag && firstScriptTag.parentNode.insertBefore(tag, firstScriptTag); -} + const tag = document.createElement("script"); + tag.src = "https://www.youtube.com/iframe_api"; + const firstScriptTag = document.getElementsByTagName("script")?.[0]; + firstScriptTag && firstScriptTag.parentNode.insertBefore(tag, firstScriptTag); +}; /** * Injects a script that contains initialisation logic for the Youtube Player object. @@ -151,13 +148,13 @@ const injectYoutubeInitialiserScript = (videoId, playerConfig) => { videoId: videoId, events: { onReady: onPlayerReady, - } - } - window.youtubePlayer = new YT.Player("player", assimilatedConfig) - } + }, + }; + window.youtubePlayer = new YT.Player("player", assimilatedConfig); + }; window.callbackOnPlayerReady = (event) => { event.target.playVideo(); - } + }; const stringifiedScript = ` function onYouTubeIframeAPIReady() { @@ -165,40 +162,43 @@ const injectYoutubeInitialiserScript = (videoId, playerConfig) => { } function onPlayerReady(event) { window.callbackOnPlayerReady(event) - }` + }`; const functionCode = document.createTextNode(stringifiedScript); - iframeInitialiserScript.appendChild(functionCode) -} + iframeInitialiserScript.appendChild(functionCode); +}; export const TriggerYouTubeFunction = { mounted() { if (!isYouTubeFnCallable(this.el.dataset)) { - console.warn("YouTube function can not be triggerred.") - return + console.warn("YouTube function can not be triggerred."); + return; } - const {functionName, eventName} = this.el.dataset - const callback = youtubePlayerCallbacks[functionName] - const getOptions = () => ({hook: this,...this.el.dataset, player: window.youtubePlayer}) - this.el.addEventListener(eventName, () => callback(getOptions())) - } -} + const { functionName, eventName } = this.el.dataset; + const callback = youtubePlayerCallbacks[functionName]; + const getOptions = () => ({ + hook: this, + ...this.el.dataset, + player: window.youtubePlayer, + }); + this.el.addEventListener(eventName, () => callback(getOptions())); + }, +}; /// FIXME: this is a temp fix, that overrides the dimensions if it's a mobile. // there has to be a better, more generic way of handling this. // Alternatively, if we can reverse engineer a custom PIP mode (with resize and all that), then // we won't need to fix this. const overrideConfigForMobile = (playerConfig) => { - let overridedConfig = {...playerConfig} - if(isMobileDevice()) { - overridedConfig["height"] = "150", - overridedConfig["width"] = "200", - console.log("[iframe] updating the player config:", { - before: playerConfig, - after: overridedConfig, - - }) + let overridedConfig = { ...playerConfig }; + if (isMobileDevice()) { + (overridedConfig["height"] = "150"), + (overridedConfig["width"] = "200"), + console.log("[iframe] updating the player config:", { + before: playerConfig, + after: overridedConfig, + }); } return overridedConfig; -} +}; diff --git a/lib/vyasa_web/live/media_live/media_bridge.ex b/lib/vyasa_web/live/media_live/media_bridge.ex index 8c3fd516..328c96b3 100644 --- a/lib/vyasa_web/live/media_live/media_bridge.ex +++ b/lib/vyasa_web/live/media_live/media_bridge.ex @@ -247,16 +247,15 @@ defmodule VyasaWeb.MediaLive.MediaBridge do } end - defp receive_voice_ack( + # assigns necessary states if voice is legit and events can be loaded. + defp apply_voice_action( %Socket{} = socket, %Voice{ video: video } = voice, ack_val ) do - %Voice{ - events: voice_events - } = loaded_voice = voice |> Medium.load_events() + loaded_voice = voice |> Medium.load_events() generated_artwork = %{ src: @@ -265,17 +264,31 @@ defmodule VyasaWeb.MediaLive.MediaBridge do sizes: "480x360" } - { - :noreply, - socket - |> assign(voice: loaded_voice) - |> assign(ack_val: ack_val) - |> assign(video: video) - |> assign(playback: Playback.create_playback(loaded_voice, generated_artwork)) - |> push_event("media_bridge:registerEventsTimeline", %{ - voice_events: voice_events |> create_events_payload() - }) - } + playback = Playback.create_playback(loaded_voice, generated_artwork) + + socket + |> assign(voice: loaded_voice) + |> assign(ack_val: ack_val) + |> assign(video: video) + |> assign(playback: playback) + end + + defp dispatch_voice_registering_events( + %Socket{ + assigns: %{ + voice: + %Voice{ + events: voice_events + } = _voice, + playback: playback + } + } = socket + ) do + socket + |> push_event("media_bridge:registerEventsTimeline", %{ + voice_events: voice_events |> create_events_payload() + }) + |> push_event("media_bridge:registerPlayback", %{playback: playback}) end # TODO: consolidate other hook events that need to be sent to the media bridge hook @@ -328,11 +341,17 @@ defmodule VyasaWeb.MediaLive.MediaBridge do } } = socket ) do - is_duplicate_ack = ack_val === prev_ack_val + is_new_voice = ack_val !== prev_ack_val cond do - not is_duplicate_ack -> socket |> receive_voice_ack(voice, ack_val) - true -> {:noreply, socket} + is_new_voice -> + {:noreply, + socket + |> apply_voice_action(voice, ack_val) + |> dispatch_voice_registering_events()} + + true -> + {:noreply, socket} end end