Skip to content

Commit

Permalink
Add playbackMetaBridge, loadAudio @ earliest time
Browse files Browse the repository at this point in the history
Change list:

1. reorganise bridges into hooks/mediaEventBridges, within it there's a
new playbackMetaBridge. Intent is to comms playback info and separate
actions from playback info things.

2. now, when the server does a push_event for the audio events
registration, we also send a separate event that is using this new
bridge ("media_bridge:registerPlayback"), which dispatches the necessary
client side events to register the playback, load the audio and the
mediaSession as early as possible. Previously, this used to happen JIT,
when the user was to press the play/pause button. This seems to have
removed the initial time-delay we used to have between the instance when
user clicks "play" and actual playback starting.

3. naturally, we also prevent redundant re-loads of audio now by
guarding the source-setting for the html5 audio player.

Broad TODOs:
1. consider pushing to the new playbackMetaBridge in the same intervals
as the existing heartbeat.

2. this commit only added in message-passing from MediaBridgeHook to the
AudioPlayerHook, but not in the other direction.
  • Loading branch information
rtshkmr committed Aug 9, 2024
1 parent da67314 commit d627c53
Show file tree
Hide file tree
Showing 8 changed files with 314 additions and 224 deletions.
76 changes: 58 additions & 18 deletions assets/js/hooks/audio_player.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,13 @@
* general player-agnostic fashion. "Playback" and actual playback (i.e. audio or video playback) is decoupled, allowing
* us the ability to reconcile bufferring states and other edge cases, mediated by the Media Bridge.
* */
import {
seekTimeBridge,
playPauseBridge,
heartbeatBridge,
playbackMetaBridge,
} from "./mediaEventBridges";

let rand = (min, max) => Math.floor(Math.random() * (max - min) + min);
let isVisible = (el) =>
!!(el.offsetWidth || el.offsetHeight || el.getClientRects().length > 0);
Expand All @@ -22,11 +29,6 @@ let execJS = (selector, attr) => {
.forEach((el) => liveSocket.execJS(el, el.getAttribute(attr)));
};

import {
seekTimeBridge,
playPauseBridge,
heartbeatBridge,
} from "./media_bridge.js";
import { formatDisplayTime, nowMs } from "../utils/time_utils.js";

AudioPlayer = {
Expand All @@ -36,11 +38,15 @@ AudioPlayer = {
this.player = this.el.querySelector("audio");
console.log("MOUNT PING");

document.addEventListener("pointerdown", () => this.enableAudio());
this.player.addEventListener("canplay", (e) => this.handlePlayableState(e));
this.player.addEventListener("loadedmetadata", (e) =>
this.handleMetadataLoad(e),
// TO DEPRECATED
// document.addEventListener("pointerdown", () => this.enableAudio());

this.player.addEventListener("canplaythrough", (e) =>
this.handlePlayableState(e),
);
// this.player.addEventListener("loadedmetadata", (e) =>
// this.handleMetadataLoad(e),
// );
/// Audio playback events:
this.handleEvent("stop", () => this.stop());

Expand All @@ -53,9 +59,24 @@ AudioPlayer = {
this.handleMediaPlayPause(payload),
),
heartbeat: heartbeatBridge.sub((payload) => this.echoHeartbeat(payload)),
playbackMeta: playbackMetaBridge.sub((playback) =>
this.handlePlaybackMetaUpdate(playback),
),
};
},

/**
* Loads the audio onto the audio player and inits the MediaSession as soon as playback information is received.
* This allows the metadata and audio load to happen independently of users'
* actions that effect playback (e.g. play/pause) -- bufferring gets init a lot earlier
* as a result.
* */
handlePlaybackMetaUpdate(playback) {
console.log("TRACE: handle playback meta update:", playback);
const { meta: playbackMeta } = playback;
const { file_path: filePath } = playbackMeta;
this.loadAudio(filePath);
this.initMediaSession(playback);
},
/// Handlers for events received via the events bridge:
handleMediaPlayPause(payload) {
console.log(
Expand Down Expand Up @@ -112,23 +133,27 @@ AudioPlayer = {
handlePlayableState(e) {
console.log("TRACE HandlePlayableState", e);
const playback = JSON.parse(this?.player?.dataset?.playback);
this.initMediaSession(playback);
// this.initMediaSession(playback);
},
// DEPRECATED: the state setting already happens at the point of loading, we don't need to listen to any metadata load event now now.
handleMetadataLoad(e) {
console.log("TRACE HandleMetadataLoad", e);
const playback = JSON.parse(this?.player?.dataset?.playback);
this.initMediaSession(playback);
// this.initMediaSession(playback);
},
handlePlayPause() {
console.log("{play_pause event triggerred} player:", this.player);
if (this.player.paused) {
this.play();
}
},
/**

/*
* This "init" behaviour has been mimicked from live_beats.
* It is likely there to enable the audio player bufferring.
* */
*/
// DEPRECATED: the intent of this function is no longer clear. It can be removed in a cleanup commit coming soon.
// the actual loading of audio to the audio player is already handled by loadAudio()
enableAudio() {
console.log("TRACE: enable audio");
if (this.player.src) {
Expand Down Expand Up @@ -159,18 +184,33 @@ AudioPlayer = {
console.log("TRACE @playMedia", {
player: this.player,
});
let currentSrc = this.player.src.split("?")[0];

let currentSrc = this.getCurrentSrc();
const isLoadedAndPaused =
currentSrc === filePath && !isPlaying && this.player.paused;
if (isLoadedAndPaused) {
this.play({ sync: true });
} else if (currentSrc !== filePath) {
currentSrc = filePath;
this.player.src = currentSrc;
this.loadAudio(filePath);
this.play({ sync: true });
}
},
loadAudio(src) {
const isSrcAlreadyLoaded = src === this.getCurrentSrc();
if (isSrcAlreadyLoaded) {
return;
}
this.player.src = src;
},
getCurrentSrc() {
if (!this?.player?.src) {
return null;
}
// since the html5 player's src value is typically a url string, it will have url encodings e.g. ContentType.
// Therefore, we strip away these urlencodes:
const src = this.player.src.split("?")[0];

return src;
},
play(opts = {}) {
console.log("TRACE Triggered playback, check params", {
player: this.player,
Expand Down
File renamed without changes.
17 changes: 17 additions & 0 deletions assets/js/hooks/mediaEventBridges/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
/**
* This file contains definitions for custom event bridges and keeps
* the exporting of these clean.
* */

import { bridged } from "./bridged.js";

export const seekTimeBridge = bridged("seekTime");
export const playPauseBridge = bridged("playPause");
export const heartbeatBridge = bridged("heartbeat");
/**
* The playbackMetaBridge is the channel through which playback-metadata related
* messages are passed.
* An example would be when a voice first gets loaded/registered and we can
* update the mediasessions api even if the user has not started the actual playback.
* */
export const playbackMetaBridge = bridged("playback");
30 changes: 21 additions & 9 deletions assets/js/hooks/media_bridge.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@
*
* Event-handling is done using custom bridged events as a proxy.
* */
import { bridged } from "./media/bridged.js";
import { formatDisplayTime } from "../utils/time_utils.js";

// TODO: consider switching to a map of bridges to support other key events
export const seekTimeBridge = bridged("seekTime");
export const playPauseBridge = bridged("playPause");
export const heartbeatBridge = bridged("heartbeat");
import {
seekTimeBridge,
playPauseBridge,
heartbeatBridge,
playbackMetaBridge,
} from "./mediaEventBridges";

MediaBridge = {
mounted() {
Expand All @@ -27,7 +27,9 @@ MediaBridge = {
this.handleEvent("media_bridge:registerEventsTimeline", (params) =>
this.registerEventsTimeline(params),
);

this.handleEvent("media_bridge:registerPlayback", (params) =>
this.registerPlaybackInfo(params),
);
this.handleEvent("initSession", (sess) => this.initSession(sess));
// pub: external action
// this callback pubs to others
Expand Down Expand Up @@ -207,9 +209,19 @@ MediaBridge = {
},
registerEventsTimeline(params) {
console.log("Register Events Timeline", params);
this.eventsTimeline = params.voice_events;
const { voice_events } = params;
this.eventsTimeline = voice_events;
},
/**
* First registers the playback information about a playable medium (e.g. voice).
* The intent of this is to separate out tasks for interfacing with things like MediaSessions api
* from interfacing with the concrete players (e.g. play pause event on the audio player).
* */
registerPlaybackInfo(params) {
const { playback } = params;
console.log("TRACE: registerPlaybackInfo", params);
playbackMetaBridge.pub(playback);
},

/**
* Receives event pushed from the server, then pubs through the
* */
Expand Down
102 changes: 53 additions & 49 deletions assets/js/hooks/progress_bar.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,69 +2,71 @@
* Progress Bar hooks intended to sync playback related actions
* */

import {seekTimeBridge, heartbeatBridge} from "./media_bridge.js"
import { seekTimeBridge, heartbeatBridge } from "./mediaEventBridges";

ProgressBar = {
mounted() {
this.el.addEventListener("click", (e) => {
e.preventDefault();
this.handleProgressBarClick(e)
this.handleProgressBarClick(e);
});

const heartbeatDeregisterer = heartbeatBridge.sub(payload => this.handleHeartbeat(payload))
const seekTimeDeregisterer = seekTimeBridge.sub(payload => this.handleExternalSeekTime(payload))
const heartbeatDeregisterer = heartbeatBridge.sub((payload) =>
this.handleHeartbeat(payload),
);
const seekTimeDeregisterer = seekTimeBridge.sub((payload) =>
this.handleExternalSeekTime(payload),
);

this.eventBridgeDeregisterers = {
seekTime: seekTimeDeregisterer,
heartbeat: heartbeatDeregisterer,
}
};
},
handleExternalSeekTime(payload) {
console.log("[progress_bar::seekTimeBridgeSub::seekTimeHandler] this:", {payload});
const {
seekToMs: timeMs,
originator,
} = payload;
console.log("[progress_bar::seekTimeBridgeSub::seekTimeHandler] this:", {
payload,
});
const { seekToMs: timeMs, originator } = payload;

const shouldIgnoreSignal = originator === "ProgressBar";
if (shouldIgnoreSignal) {
console.info("Ignoring signal for seekTime", payload)
console.info("Ignoring signal for seekTime", payload);

return;
}

const maxTime = this.el.dataset?.max || this.maxTime
if(!maxTime) {
console.warn("Max time not available in element's state or dataset, ignoring progress bar update.")
return
const maxTime = this.el.dataset?.max || this.maxTime;
if (!maxTime) {
console.warn(
"Max time not available in element's state or dataset, ignoring progress bar update.",
);
return;
}

const playbackPercentage = (timeMs / maxTime)
const progressStyleWidth = `${(playbackPercentage * 100)}%`
const playbackPercentage = timeMs / maxTime;
const progressStyleWidth = `${playbackPercentage * 100}%`;
console.log("[DEBUG]", {
maxTime,
playbackPercentage,
})
this.setProgressBarWidth(progressStyleWidth)
});
this.setProgressBarWidth(progressStyleWidth);
},
handleHeartbeat(payload) {
console.log("[ProgressBar::handleHeartbeat]", payload)
console.log("[ProgressBar::handleHeartbeat]", payload);
const shouldIgnoreSignal = payload.originator === "MediaBridge";
if(shouldIgnoreSignal) {
if (shouldIgnoreSignal) {
return;
}
const {
currentTimeMs,
durationMs,
} = payload.currentPlaybackInfo || {};
const { currentTimeMs, durationMs } = payload.currentPlaybackInfo || {};

const playbackPercentage = (currentTimeMs / durationMs)
const progressStyleWidth = `${(playbackPercentage * 100)}%`
const playbackPercentage = currentTimeMs / durationMs;
const progressStyleWidth = `${playbackPercentage * 100}%`;
console.log("handleHeartbeat, set progress bar width", {
progressStyleWidth,
payload
})
this.setProgressBarWidth(progressStyleWidth)
payload,
});
this.setProgressBarWidth(progressStyleWidth);
},
/*
// The progress bar is measured in milliseconds,
Expand All @@ -89,24 +91,24 @@ ProgressBar = {
// })
*/
handleProgressBarClick(e) {
const { max: maxTime } = this.el.dataset
const { max: maxTime } = this.el.dataset;

if (!maxTime) {
console.log("unable to seek position, payload is incorrect")
return
console.log("unable to seek position, payload is incorrect");
return;
}

const containerNode = document.getElementById("player-progress-container")
const maxOffset = containerNode.offsetWidth
const containerNode = document.getElementById("player-progress-container");
const maxOffset = containerNode.offsetWidth;
this.maxTime = maxTime;
this.maxOffset = maxOffset;

const currXOffset = e.offsetX;
const maxPlaybackMs = Number(maxTime)
const playbackPercentage = (currXOffset / maxOffset)
const positionMs = maxPlaybackMs * playbackPercentage
const progressStyleWidth = `${(playbackPercentage * 100)}%`
this.setProgressBarWidth(progressStyleWidth)
const maxPlaybackMs = Number(maxTime);
const playbackPercentage = currXOffset / maxOffset;
const positionMs = maxPlaybackMs * playbackPercentage;
const progressStyleWidth = `${playbackPercentage * 100}%`;
this.setProgressBarWidth(progressStyleWidth);

// Optimistic update
this.el.value = positionMs;
Expand All @@ -120,23 +122,25 @@ ProgressBar = {
playbackPercentage,
maxPlaybackMs,
positionMs,
})
});

// pubs & dispatches this position
const seekTimePayload = {
seekToMs: positionMs,
originator: "ProgressBar",
}
seekTimeBridge.dispatch(this, seekTimePayload, "#media-player-container")
};
seekTimeBridge.dispatch(this, seekTimePayload, "#media-player-container");
return;
},
setProgressBarWidth(progressStyleWidth, selector="#player-progress") {
console.log("setting progress bar width:", progressStyleWidth)
const progressBarNode = document.querySelector(selector)
console.assert(!!progressBarNode, "progress bar node must always be present in the dom.")
setProgressBarWidth(progressStyleWidth, selector = "#player-progress") {
console.log("setting progress bar width:", progressStyleWidth);
const progressBarNode = document.querySelector(selector);
console.assert(
!!progressBarNode,
"progress bar node must always be present in the dom.",
);
progressBarNode.style.width = progressStyleWidth;
}
},
};


export default ProgressBar;
Loading

0 comments on commit d627c53

Please sign in to comment.