diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f730692..11f8114 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -845,6 +845,10 @@ packages: resolution: {integrity: sha512-AxrBwgV6jluoyFCTich5u1SzCS5pie3X71A9W0mijK34/KOh7ch3ruPoP8ikwrcZz3CgmCMQybp4YLTj8giX7g==} engines: {node: '>=18.2.0'} + automation-events@7.0.9: + resolution: {integrity: sha512-BvN5ynKILdG5UoONshTQu+9W1LXXtBR//OHvAjOe1XfQ1Y4muFyApjcG71alVIyVwsJLBjbh1jqbTrU22FuZEA==} + engines: {node: '>=18.2.0'} + babel-plugin-prismjs@2.1.0: resolution: {integrity: sha512-ehzSKYfeAz4U78zi/sfwsjDPlq0LvDKxNefcZTJ/iKBu+plsHsLqZhUeGf1+82LAcA35UZGbU6ksEx2Utphc/g==} peerDependencies: @@ -1898,8 +1902,8 @@ packages: standardized-audio-context-mock@9.7.8: resolution: {integrity: sha512-Mm7AQO7VavfAz76ZZt1mDRJ6oFg3tUf/qb7yBk31s1bU27nHHL+83zFNCuwj4KCI5tuUNgqGsignZhr8RCDezg==} - standardized-audio-context@25.3.76: - resolution: {integrity: sha512-JEoloo9km718O9qYpIUu3AFwZkPosbCAZL6itJnshJQCDomse1AC5QLtuMMDWfkhdHmOUpyFQFpW2EUfZHsSXA==} + standardized-audio-context@25.3.77: + resolution: {integrity: sha512-Ki9zNz6pKcC5Pi+QPjPyVsD9GwJIJWgryji0XL9cAJXMGyn+dPOf6Qik1AHei0+UNVcc4BOCa0hWLBzlwqsW/A==} std-env@3.7.0: resolution: {integrity: sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==} @@ -2005,6 +2009,9 @@ packages: tslib@2.6.3: resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==} + tslib@2.7.0: + resolution: {integrity: sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==} + type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} @@ -3084,6 +3091,11 @@ snapshots: '@babel/runtime': 7.25.6 tslib: 2.6.3 + automation-events@7.0.9: + dependencies: + '@babel/runtime': 7.25.6 + tslib: 2.7.0 + babel-plugin-prismjs@2.1.0(prismjs@1.29.0): dependencies: prismjs: 1.29.0 @@ -4177,15 +4189,15 @@ snapshots: '@babel/runtime': 7.25.6 automation-events: 7.0.8 sinon: 16.1.3 - standardized-audio-context: 25.3.76 + standardized-audio-context: 25.3.77 tslib: 2.6.3 vehicles: 10.0.6 - standardized-audio-context@25.3.76: + standardized-audio-context@25.3.77: dependencies: '@babel/runtime': 7.25.6 - automation-events: 7.0.8 - tslib: 2.6.3 + automation-events: 7.0.9 + tslib: 2.7.0 std-env@3.7.0: {} @@ -4264,6 +4276,8 @@ snapshots: tslib@2.6.3: {} + tslib@2.7.0: {} + type-check@0.4.0: dependencies: prelude-ls: 1.2.1 diff --git a/src/app/pages/index.ts b/src/app/pages/index.ts index b422609..7d95aff 100644 --- a/src/app/pages/index.ts +++ b/src/app/pages/index.ts @@ -4,7 +4,7 @@ import { setupPlayButton } from './play-button' const codeExample = ` import { createSound } from 'ez-web-audio' -function playSound() { +async function playSound() { // 1. Load a sound from a URL const note = await createSound('Eb5.mp3') // 2. Play the sound diff --git a/src/app/pages/play-button.ts b/src/app/pages/play-button.ts index e068e3e..52ffb25 100644 --- a/src/app/pages/play-button.ts +++ b/src/app/pages/play-button.ts @@ -15,10 +15,10 @@ export function setupPlayButton(element: HTMLButtonElement): void { // remove the setup listener element.removeEventListener('click', setup) + note.play() + // add a listener to play the note again when the button is clicked for the rest of the document's life - element.addEventListener('click', async () => { - note.play() - }) + element.addEventListener('click', () => note.play()) } element.addEventListener('click', setup) diff --git a/src/app/pages/timing/drum-machine.ts b/src/app/pages/timing/drum-machine.ts index 9a9b633..b316ff1 100644 --- a/src/app/pages/timing/drum-machine.ts +++ b/src/app/pages/timing/drum-machine.ts @@ -1,10 +1,8 @@ -import { codeBlock } from '../../utils' import nav from './nav' const Content = { setup() { - // const keys = document.querySelector('#keys') - // setupPiano(keys!) + }, html: ` @@ -13,75 +11,12 @@ ${nav}

Timing

- - Note: It is not necessary to understand this concept, as Ember Audio has methods that allow you to ignore it. - I encourage you to understand it anyway. It's easy to grasp, and if you're building a rhythm/timing heavy - app as this knowledge will be very useful to you. - -

- Timing with the Web Audio API can seem tricky at first. It's unlike any other timing system native to the - browser. It's not very complex, and easy to wrap your brain around once you "get" it. + Below is an example of a drum machine that loads up three samples for each lane and allows you to program a drum beat. + The sample is automatically alternated so you never hear the same sample back-to-back.

-

- It's based on the concept of a currentTime that starts at 0 and counts it's way up in seconds (as a high-precision - Double). This currentTime starts the moment that an AudioContext has been created. -

-

If, for instance, you wanted a sound to play exactly 1 second after a user clicks a button, it could look like this:

- - ${codeBlock(` -// This is pseudo-code. The goal at this point is to get the concept across, -// not to potentially confuse you with framework-specific stuff. - -// The moment that audioContext is created, audioContext.currentTime starts counting seconds -const audioContext = new AudioContext(); - -const sound = // Create or load a sound and hook up audio inputs and outputs. -// Not important right now... -// We'll say that the result is an audio "node" that is ready to play - -function handleClick() { - // Get the current time from audioContext. - const now = audioContext.currentTime; - - // Start the sound we created up there^, adding 1 second to "now" - // The Web Audio API deals in seconds, not milliseconds - sound.start(now + 1); -} - `)} - -

Now what if we wanted to schedule the sound 5 times, each exactly 1 second apart?

- - ${codeBlock(` -// Again, I want to mention that this code will not work as-is. It's ignoring -// some other quirks of the Web Audio API. We're only focused on understanding -// timing at the moment. -const audioContext = new AudioContext(); - -const sound = // Create or load a sound and hook up audio inputs and outputs. - -function handleClick() { - const now = audioContext.currentTime; - for (let i = 0; i < 5; i++) { - sound.start(now + i); - } -} - `)} - -

- As you can see, as far as an AudioContext is concerned, the moment that it is created is "the beginning of time" - and scheduling events is achieved by specifying an exact moment in time. sound.start(100) would play the sound - exactly 100 seconds after the AudioContext was created, regardless of what time sound.start(100) was called. - If sound.start(100) is called after 100 seconds has already passed since "the beginning of time," the sound - will play immediately. -

- - Again, this is an important concept to understand, but in many cases (even more complex cases, such as - building a rhythmically-based instrument) this is already handled f - or you. Check out Beats, or the very last example on this page. -
`, } diff --git a/src/beat-track.test.ts b/src/beat-track.test.ts new file mode 100644 index 0000000..4c00cc4 --- /dev/null +++ b/src/beat-track.test.ts @@ -0,0 +1,48 @@ +import { expect, it } from 'vitest' +import { AudioContext as Mock } from 'standardized-audio-context-mock' +import type { Playable } from './interfaces/playable' +import type { Connectable } from './interfaces/connectable' +import { BeatTrack } from '@/beat-track' + +function createBeatTrack() { + const context = new Mock() as unknown as AudioContext + const sounds: (Playable & Connectable)[] = [] + return new BeatTrack(context, sounds) +} + +it('exists', () => { + expect(BeatTrack).toBeTruthy() +}) + +it('can be created', () => { + const track = createBeatTrack() + expect(track).toBeTruthy() +}) + +it(`remembers beats' 'active' state when numBeats changes`, () => { + const beatTrack = createBeatTrack() + let [beat1, beat2, beat3] = beatTrack.beats + + beat1.active = true + beat3.active = true + + beatTrack.numBeats = 6 + + beat1 = beatTrack.beats[0] + beat2 = beatTrack.beats[1] + beat3 = beatTrack.beats[2] + + expect(beat1.active).toBe(true) + expect(beat2.active).toBe(false) + expect(beat3.active).toBe(true) + + beatTrack.numBeats = 4 + + beat1 = beatTrack.beats[0] + beat2 = beatTrack.beats[1] + beat3 = beatTrack.beats[2] + + expect(beat1.active).toBe(true) + expect(beat2.active).toBe(false) + expect(beat3.active).toBe(true) +}) diff --git a/src/beat-track.ts b/src/beat-track.ts new file mode 100644 index 0000000..a5ef26a --- /dev/null +++ b/src/beat-track.ts @@ -0,0 +1,133 @@ +import { Beat } from './beat' +import type { Connectable } from './interfaces/connectable' +import type { Playable } from './interfaces/playable' +import { Sampler } from './sampler' + +const beatBank = new WeakMap() + +/** + * An instance of this class has an array of "sounds" (comprised of one or multiple + * audio sources, if multiple are provided, they are played in a round-robin fashion) + * and provides methods to play that sound repeatedly, mixed with "rests," in a + * rhythmic way. An instance of this class behaves very similarly to a "lane" on a drum machine. + * + * @class BeatTrack + * @extends Sampler + * + * @todo need a way to stop a BeatTrack once it's started. Maybe by creating + * the times in advance and not calling play until it's the next beat in the + * queue? + */ +export class BeatTrack extends Sampler { + constructor(private audioContext: AudioContext, sounds: (Playable & Connectable)[], opts?: { numBeats?: number, duration?: number }) { + super(sounds) + if (opts?.numBeats) { + this.numBeats = opts.numBeats + } + if (opts?.duration) { + this.duration = opts.duration + } + } + + /** + * @property numBeats + * + * Determines the number of beats in a BeatTrack instance. + */ + public numBeats = 4 + + /** + * @property duration + * + * If specified, Determines length of time, in milliseconds, before isPlaying + * and currentTimeIsPlaying are automatically switched back to false after + * having been switched to true for each beat. 100ms is used by default. + * + * @default 100 + */ + public duration = 100 + + /** + * @property beats + * + * Computed property. An array of Beat instances. The number of Beat instances + * in the array is always the same as the `numBeats` property. If 'numBeats' + * or duration changes. This property will be recomputed, but any beats that + * previously existed are reused so that they will maintain their `active` + * state. + */ + public get beats(): Beat[] { + let beats = [] + let numBeats = this.numBeats + let existingBeats + + if (beatBank.has(this)) { + existingBeats = beatBank.get(this) + numBeats = numBeats - existingBeats.length + } + + for (let i = 0; i < numBeats; i++) { + const beat = new Beat(this.audioContext, { + duration: this.duration, + playIn: this.playIn.bind(this), + play: this.play.bind(this), + }) + + beats.push(beat) + } + + if (existingBeats) { + beats = existingBeats.concat(beats) + } + + beatBank.set(this, beats) + + return beats + } + + /** + * @method playBeats + * + * Calls play on all Beat instances in the beats array. + * + * @param {number} bpm The tempo at which the beats should be played. + * @param noteType {number} The (rhythmic) length of each beat. Fractions + * are suggested here so that it's easy to reason about. For example, for + * eighth notes, pass in `1/8`. + */ + public playBeats(bpm: number, noteType: number): void { + this.callPlayMethodOnBeats('playIn', bpm, noteType) + } + + /** + * @method playActiveBeats + * + * Calls play on `active` Beat instances in the beats array. Any beat that + * is not marked active is effectively a "rest". + * + * @param {number} bpm The tempo at which the beats and rests should be played. + * @param noteType {number} The (rhythmic) length of each beat/rest. Fractions + * are suggested here so that it's easy to reason about. For example, for + * eighth notes, pass in `1/8`. + */ + public playActiveBeats(bpm: number, noteType: number): void { + this.callPlayMethodOnBeats('ifActivePlayIn', bpm, noteType) + } + + /** + * @method callPlayMethodOnBeats + * + * The underlying method behind playBeats and playActiveBeats. + * + * @param {string} method The method that should be called on each beat. + * @param {number} bpm The tempo that should be used to calculate the length + * of a beat/rest. + * @param noteType {number} The (rhythmic) length of each beat/rest that should + * be used to calculate the length of a beat/rest in seconds. + */ + private callPlayMethodOnBeats(method: 'ifActivePlayIn' | 'playIn', bpm: number, noteType: number = 1 / 4): void { + // http://bradthemad.org/guitar/tempo_explanation.php + const duration = (240 * noteType) / bpm + this.beats.forEach((beat, idx) => beat[method](idx * duration)) + } +} diff --git a/src/beat.test.ts b/src/beat.test.ts new file mode 100644 index 0000000..2118977 --- /dev/null +++ b/src/beat.test.ts @@ -0,0 +1,78 @@ +import { expect, it } from 'vitest' +import { AudioContext as Mock } from 'standardized-audio-context-mock' +import { mockSetTimeout, settle } from './test/helpers' +import type { BeatOptions } from '@/beat' +import { Beat } from '@/beat' + +class MockParentClass { + playCalled = false + playInCalled = false + playInValue = 0 + play() { + this.playCalled = true + } + + playIn(time: number) { + this.playInCalled = true + this.playInValue = time + } +} + +function createBeat(p: BeatOptions) { + const context = new Mock() as unknown as AudioContext + return new Beat(context, p) +} + +it('exists', () => { + expect(Beat).toBeTruthy() +}) + +it('can be created', () => { + const parent = new MockParentClass() + const beat = createBeat(parent) + expect(beat).toBeTruthy() +}) + +it('can play and calls parent play', () => { + const parent = new MockParentClass() + const beat = createBeat({ + play: parent.play.bind(parent), + playIn: parent.playIn.bind(parent), + }) + + expect(parent.playCalled).toBe(false) + beat.play() + expect(parent.playCalled).toBe(true) +}) + +it('can playIn and calls parent playIn, passing time value', () => { + const parent = new MockParentClass() + const beat = createBeat({ + play: parent.play.bind(parent), + playIn: parent.playIn.bind(parent), + }) + + expect(parent.playCalled).toBe(false) + beat.playIn(10) + expect(parent.playInCalled).toBe(true) + expect(parent.playInValue).toBe(10) +}) + +it('sets `isPlaying` to `true` when played and sets up a timer that sets `isPlaying` back to false after `duration` has elapsed.', async () => { + const parent = new MockParentClass() + const beat = createBeat({ + play: parent.play.bind(parent), + playIn: parent.playIn.bind(parent), + duration: 1, + // mock setTimeout so we can control when it fires + setTimeout: mockSetTimeout, + }) + + expect(beat.isPlaying).toBe(false) + + beat.play() + + expect(beat.isPlaying).toBe(true) + + expect(await settle(() => beat.isPlaying)).toBe(false) +}) diff --git a/src/beat.ts b/src/beat.ts new file mode 100644 index 0000000..c8f5001 --- /dev/null +++ b/src/beat.ts @@ -0,0 +1,196 @@ +import audioContextAwareTimeout from './utils/timeout' + +/** + * This class represents a single "beat" for a rhythmic instrument. An instance of this + * class can be set to `active` or not to facilitate the way that most drum + * machines work (when a beat is not `active`, the time that it occupies still + * exists, but it does not cause audio to play, effectively resulting in a + * "rest"). It provides properties that track when it is played, and when a "rest" + * is played in it's place. + * + * This class does not have the ability to create audio on it's own and is + * expected be a "child" of one of the Sound classes. See it's implementation in + * {{#crossLink "BeatTrack"}}BeatTrack{{/crossLink}} for an example. + * + * // Cannot play audio on it's own. + * // Must pass in parentPlay and/or parentPlayIn from a parent class. + * new Beat(audioContext, { + * parentPlayIn: parent.playIn.bind(parent), + * parentPlay: parent.play.bind(parent), + * }; + * + * @class Beat + * @todo add playAt + */ +export interface BeatOptions { + duration?: number + playIn: (time: number) => void + play: () => void + setTimeout?: (fn: () => void, delayMillis: number) => number +} + +export class Beat { + constructor(audioContext: AudioContext, opts: BeatOptions) { + this.parentPlayIn = opts.playIn + this.parentPlay = opts.play + this.duration = opts.duration || 100 + + if (opts.setTimeout) { + this.setTimeout = opts.setTimeout + } + else { + const { setTimeout } = audioContextAwareTimeout(audioContext) + this.setTimeout = setTimeout + } + } + + private parentPlayIn: ((time: number) => void) | undefined + private parentPlay: (() => void) | undefined + private setTimeout: (fn: () => void, delayMillis: number) => number + + /** + * @property active + * + * If `active` is `true`, all methods of play will cause this instance to play. + * If `active` is `false`, the `playIfActive()` and `ifActivePlayIn()` + * methods will treat this instance as a rest (a timed period of silence). + */ + public active = false + + /** + * @property currentTimeIsPlaying + * + * Whether a Beat instance is currently playing, considering both active and + * inactive beats (rests). When switched to `true`, is automatically returned + * to false after the time specified by the duration property. + * + * @default false + */ + public currentTimeIsPlaying = false + + /** + * @property isPlaying + * + * Whether a Beat instance is currently playing, considering only active beats. + * When switched to `true`, is automatically returned to false after the time + * specified by the duration property. + * + * @default false + */ + public isPlaying = false + + /** + * @property duration + * + * If specified, Determines length of time, in milliseconds, before isPlaying + * and currentTimeIsPlaying are automatically switched back to false after + * having been switched to true. 100ms is used by default. + * + * @default 100 + */ + public duration: number + + /** + * @method playIn + * + * Calls it's parent's `playIn()` method directly to play the beat in + * `${offset}` seconds. + * + * isPlaying and currentTimeIsPlaying are both marked true after the provided + * offset has elapsed. + * + * @param {number} offset Number of seconds from "now" that the audio should + * play. + */ + public playIn(offset = 0): void { + const msOffset = offset * 1000 + + this.parentPlayIn!(offset) + + this.setTimeout(() => { + this.isPlaying = true + this.currentTimeIsPlaying = true + }, msOffset) + } + + /** + * @method ifActivePlayIn + * + * If the beat is marked `active`, calls it's parent's `playIn()` method + * directly to play the beat in `${offset}` seconds. + * + * If active, `isPlaying` is marked true after the provided offset has elapsed. + * + * `currentTimeIsPlaying` is marked true after the provided offset has elapsed, + * even if beat is not active. + * + * @param {number} offset Number of seconds from "now" that the audio should + * play. + */ + public ifActivePlayIn(offset = 0): void { + const msOffset = offset * 1000 + + if (this.active) { + this.parentPlayIn!(offset) + this.setTimeout(() => this.markPlaying(), msOffset) + } + + this.setTimeout(() => this.markCurrentTimePlaying(), msOffset) + } + + /** + * @method play + * + * Calls it's parent's `play()` method directly to play the beat immediately. + * + * `isPlaying` and `currentTimeIsPlaying` are both immediately marked true. + */ + public play(): void { + this.parentPlay!() + this.markPlaying() + this.markCurrentTimePlaying() + } + + /** + * @method playIfActive + * + * If `active`, calls it's parent's `play()` method directly to play the beat + * immediately. + * + * If `active`, `isPlaying` is immediately marked true. + * + * `currentTimeIsPlaying` is immediately marked true, even if beat is not `active`. + */ + public playIfActive(): void { + if (this.active) { + this.parentPlay!() + this.markPlaying() + } + + this.markCurrentTimePlaying() + } + + /** + * @method markPlaying + * + * Sets `isPlaying` to `true` and sets up a timer that sets `isPlaying` back + * to false after `duration` has elapsed. + */ + private markPlaying(): void { + this.isPlaying = true + this.setTimeout(() => this.isPlaying = false, this.duration) + } + + /** + * @method markCurrentTimePlaying + * + * Sets `currentTimeIsPlaying` to `true` and sets up a timer that sets + * `currentTimeIsPlaying` back to false after `duration` has elapsed. + */ + private markCurrentTimePlaying(): void { + this.currentTimeIsPlaying = true + this.setTimeout(() => this.currentTimeIsPlaying = false, this.duration) + } +} + +export default Beat diff --git a/src/interfaces/playable.ts b/src/interfaces/playable.ts index bf3b193..d51d3d8 100644 --- a/src/interfaces/playable.ts +++ b/src/interfaces/playable.ts @@ -3,12 +3,13 @@ import type { ControlType, RampType } from '@controllers/base-param-controller' export interface Playable { play: () => void - // playIn: (when: number) => void + playAt: (time: number) => void + playIn: (when: number) => void playFor: (duration: number) => void - // playInAndStopAfter: (playIn: number, stopAfter: number) => void + playInAndStopAfter: (playIn: number, stopAfter: number) => void stop: () => void - // stopIn: (seconds: number) => void - // stopAt: (time: number) => void + stopIn: (seconds: number) => void + stopAt: (time: number) => void // stopAfter: (duration: number) => void isPlaying: boolean duration: TimeObject diff --git a/src/oscillator.ts b/src/oscillator.ts index 67508f2..42d9007 100644 --- a/src/oscillator.ts +++ b/src/oscillator.ts @@ -75,11 +75,11 @@ export class Oscillator implements Playable, Connectable { }) } - onPlaySet(type: ControlType): { to: (value: number) => { at: (time: number) => void, endingAt: (time: number, rampType?: RampType) => void } } { + public onPlaySet(type: ControlType): { to: (value: number) => { at: (time: number) => void, endingAt: (time: number, rampType?: RampType) => void } } { return this.controller.onPlaySet(type) } - onPlayRamp(type: ControlType, rampType?: RampType): { from: (startValue: number) => { to: (endValue: number) => { in: (endTime: number) => void } } } { + public onPlayRamp(type: ControlType, rampType?: RampType): { from: (startValue: number) => { to: (endValue: number) => { in: (endTime: number) => void } } } { return this.controller.onPlayRamp(type, rampType) } @@ -103,7 +103,7 @@ export class Oscillator implements Playable, Connectable { this.controller.setValuesAtTimes() } - wireConnections(): void { + private wireConnections(): void { // always start with the audio source const nodes: AudioNode[] = [this.oscillator] const { connections, filters, pannerNode } = this @@ -130,12 +130,12 @@ export class Oscillator implements Playable, Connectable { pannerNode.connect(this.audioContext.destination) } - addConnection(connection: Connection): void { + public addConnection(connection: Connection): void { this.connections.push(connection) this.wireConnections() } - removeConnection(name: string): void { + public removeConnection(name: string): void { const connection = this.getConnection(name) if (connection) { const index = this.connections.indexOf(connection) @@ -146,21 +146,21 @@ export class Oscillator implements Playable, Connectable { } } - getConnection(name: string): Connection | undefined { + public getConnection(name: string): Connection | undefined { return this.connections.find(c => c.name === name) } - getNodeFrom(connectionName: string): T | undefined { + public getNodeFrom(connectionName: string): T | undefined { return this.getConnection(connectionName)?.audioNode as T | undefined } - get audioSourceNode(): OscillatorNode { + public get audioSourceNode(): OscillatorNode { return this.oscillator } // convenience method, equivalent longer form would be // osc.controller.update(type).to(value).from('ratio') - update(type: ControlType): { + public update(type: ControlType): { to: (value: number) => { from: (method: RatioType) => void } @@ -170,28 +170,28 @@ export class Oscillator implements Playable, Connectable { // convenience method, equivalent longer form would be // osc.update('pan').to(value).from('ratio') - changePanTo(value: number): void { + public changePanTo(value: number): void { this.controller.update('pan').to(value).from('ratio') } // convenience method, equivalent longer form would be // osc.update('gain').to(value).from('ratio') - changeGainTo(value: number): void { + public changeGainTo(value: number): void { this.controller.update('gain').to(value).from('ratio') } - play(): void { + public play(): void { this.playAt(this.audioContext.currentTime) } - playFor(duration: number): void { + public playFor(duration: number): void { const { setTimeout } = audioContextAwareTimeout(this.audioContext) this.playAt(this.audioContext.currentTime) setTimeout(() => this.stop(), duration * 1000) } // playAt is the underlying play method behind all play methods - async playAt(time: number): Promise { + public async playAt(time: number): Promise { const { audioContext } = this const { currentTime } = audioContext const { setTimeout } = audioContextAwareTimeout(audioContext) @@ -211,18 +211,18 @@ export class Oscillator implements Playable, Connectable { } } - stop(): void { + public stop(): void { this._isPlaying = false this.oscillator.stop() } private _isPlaying = false - get isPlaying(): boolean { + public get isPlaying(): boolean { return this._isPlaying } // TODO: implement duration... can I? I think duration is too dynamic? any way to infer from asdr? or if there is a sheduled stop? - get duration(): TimeObject { + public get duration(): TimeObject { return createTimeObject(0, 0, 0) } diff --git a/src/sampler.ts b/src/sampler.ts index d9c35ca..2603ef4 100644 --- a/src/sampler.ts +++ b/src/sampler.ts @@ -1,4 +1,5 @@ -import type { Sound } from './sound' +import type { Connectable } from './interfaces/connectable' +import type { Playable } from './interfaces/playable' /** * An instance of the Sampler class behaves just like a Sound, but allows @@ -13,8 +14,8 @@ import type { Sound } from './sound' * @todo loop */ export class Sampler { - constructor(sounds: Sound[]) { - this.sounds = new Set(sounds) + constructor(sounds: (Playable & Connectable)[]) { + this.sounds = new Set(sounds) this._soundIterator = sounds.values() } @@ -36,7 +37,7 @@ export class Sampler { * This iterable is meant to be replaced with a new copy every time it reaches * it's end, resulting in an infinite stream of Sound instances. */ - private _soundIterator: Iterator + private _soundIterator: Iterator /** * @property sounds @@ -44,7 +45,7 @@ export class Sampler { * that uses {{#crossLink "Playable"}}{{/crossLink}}. If not set on * instantiation, automatically set to `new Set()` via `_initSounds`. */ - sounds: Set + sounds: Set /** * Gets the next audio source and plays it immediately. @@ -92,7 +93,7 @@ export class Sampler { * @method _getNextSound * @return {Sound} */ - _getNextSound(): Sound { + _getNextSound(): Playable & Connectable { let soundIterator = this._soundIterator let nextSound @@ -116,7 +117,7 @@ export class Sampler { * @method _setGainAndPan * @return {Sound} The input sound after having it's gain and pan set */ - _setGainAndPan(sound: Sound): Sound { + _setGainAndPan(sound: Playable & Connectable): Playable & Connectable { // sound.changeGainTo(this.gain).from('ratio') sound.changePanTo(this.pan) diff --git a/src/sound.test.ts b/src/sound.test.ts index e5880f2..6d3e7f5 100644 --- a/src/sound.test.ts +++ b/src/sound.test.ts @@ -1,5 +1,6 @@ import { expect, it } from 'vitest' import { AudioContext as Mock } from 'standardized-audio-context-mock' +import { settle } from './test/helpers' import { Sound } from '@/sound' function createSound() { @@ -17,17 +18,17 @@ it('can be created', () => { expect(sound).toBeTruthy() }) -it('plays', () => { +it('plays', async () => { const sound = createSound() - expect(sound.isPlaying).toBeFalsy() + expect(sound.isPlaying).toBe(false) sound.play() - expect(sound.isPlaying).toBeTruthy() + expect(await settle(() => sound.isPlaying)).toBe(true) }) -it('stops', () => { +it('stops', async () => { const sound = createSound() sound.play() - expect(sound.isPlaying).toBeTruthy() + expect(await settle(() => sound.isPlaying)).toBe(true) sound.stop() - expect(sound.isPlaying).toBeFalsy() + expect(await settle(() => sound.isPlaying)).toBe(false) }) diff --git a/src/sound.ts b/src/sound.ts index 1c89060..2cc0d63 100644 --- a/src/sound.ts +++ b/src/sound.ts @@ -24,37 +24,45 @@ import { SoundController } from './controllers/sound-controller' export class Sound implements Playable, Connectable { private gainNode: GainNode private pannerNode: StereoPannerNode - private bufferSourceNode: AudioBufferSourceNode private controller: ParamController - public _isPlaying: boolean = false + private _isPlaying: boolean = false + private setTimeout: (fn: () => void, delayMillis: number) => number protected _startedPlayingAt: number = 0 + public audioSourceNode: AudioBufferSourceNode public startOffset: number = 0 public connections: Connection[] = [] - constructor(protected audioContext: AudioContext, private audioBuffer: AudioBuffer) { - const bufferSourceNode = this.audioContext.createBufferSource() + constructor(protected audioContext: AudioContext, private audioBuffer: AudioBuffer, opts?: any) { + const audioSourceNode = this.audioContext.createBufferSource() const gainNode = audioContext.createGain() const pannerNode = audioContext.createStereoPanner() this.gainNode = gainNode this.pannerNode = pannerNode - this.bufferSourceNode = bufferSourceNode + this.audioSourceNode = audioSourceNode this.audioBuffer = audioBuffer - bufferSourceNode.buffer = audioBuffer + audioSourceNode.buffer = audioBuffer - this.controller = new SoundController(bufferSourceNode, gainNode, pannerNode) + this.controller = new SoundController(audioSourceNode, gainNode, pannerNode) + + if (opts?.setTimeout) { + this.setTimeout = opts.setTimeout + } + else { + this.setTimeout = audioContextAwareTimeout(audioContext).setTimeout + } } private setup(): void { - const bufferSourceNode = this.audioContext.createBufferSource() - bufferSourceNode.buffer = this.audioBuffer - this.bufferSourceNode = bufferSourceNode + const audioSourceNode = this.audioContext.createBufferSource() + audioSourceNode.buffer = this.audioBuffer + this.audioSourceNode = audioSourceNode this.wireConnections() this.controller.setValuesAtTimes() } - public wireConnections(): void { + private wireConnections(): void { // always start with the audio source - const nodes: AudioNode[] = [this.bufferSourceNode] + const nodes: AudioNode[] = [this.audioSourceNode] const { connections, pannerNode } = this // add the nodes from nodes property @@ -74,12 +82,12 @@ export class Sound implements Playable, Connectable { pannerNode.connect(this.audioContext.destination) } - addConnection(connection: Connection): void { + public addConnection(connection: Connection): void { this.connections.push(connection) this.wireConnections() } - removeConnection(name: string): void { + public removeConnection(name: string): void { const connection = this.getConnection(name) if (connection) { const index = this.connections.indexOf(connection) @@ -91,20 +99,16 @@ export class Sound implements Playable, Connectable { } // Allows you to get any user created connection in the connections array - getConnection(name: string): Connection | undefined { + public getConnection(name: string): Connection | undefined { return this.connections.find(c => c.name === name) } // Allows you to get node from any user created connection in the connections array - getNodeFrom(connectionName: string): T | undefined { + public getNodeFrom(connectionName: string): T | undefined { return this.getConnection(connectionName)?.audioNode as T | undefined } - get audioSourceNode(): AudioBufferSourceNode { - return this.bufferSourceNode - } - - update(type: ControlType): { + public update(type: ControlType): { to: (value: number) => { from: (method: RatioType) => void } @@ -112,17 +116,17 @@ export class Sound implements Playable, Connectable { return this.controller.update(type) } - changePanTo(value: number): void { + public changePanTo(value: number): void { this.controller.update('pan').to(value).from('ratio') } - changeGainTo(value: number): { + public changeGainTo(value: number): { from: (method: RatioType) => void } { return this.controller.update('gain').to(value) } - onPlaySet(type: ControlType): { + public onPlaySet(type: ControlType): { to: (value: number) => { at: (time: number) => void endingAt: (time: number, rampType?: RampType) => void @@ -131,7 +135,7 @@ export class Sound implements Playable, Connectable { return this.controller.onPlaySet(type) } - onPlayRamp(type: ControlType, rampType?: RampType): { + public onPlayRamp(type: ControlType, rampType?: RampType): { from: (startValue: number) => { to: (endValue: number) => { in: (endTime: number) => void @@ -141,41 +145,113 @@ export class Sound implements Playable, Connectable { return this.controller.onPlayRamp(type, rampType) } - play(): void { + public play(): void { this.playAt(this.audioContext.currentTime) } - playFor(duration: number): void { - const { setTimeout } = audioContextAwareTimeout(this.audioContext) + public playIn(when: number): void { + this.playAt(this.audioContext.currentTime + when) + } + + public playFor(duration: number): void { this.playAt(this.audioContext.currentTime) - setTimeout(() => this.stop(), duration * 1000) + this.setTimeout(() => this.stop(), duration * 1000) } - async playAt(time: number): Promise { + /** + * Starts playing the audio source after `playIn` seconds have elapsed, then + * stops the audio source `stopAfter` seconds after it started playing. + * + * @public + * @method playInAndStopAfter + * + * @param {number} playIn Number of seconds from "now" that the audio source + * should play. + * + * @param {number} stopAfter Number of seconds from when the audio source + * started playing that the audio source should be stopped. + */ + public playInAndStopAfter(playIn: number, stopAfter: number): void { + this.playIn(playIn) + this.stopIn(playIn + stopAfter) + } + + /** + * The underlying method that backs all of the `play` methods. Plays the audio source at + * the specified moment in time. A "moment in time" is measured in seconds from the moment + * that the {{#crossLink "AudioContext"}}{{/crossLink}} was instantiated. + * + * @param {number} time The moment in time (in seconds, relative to the + * {{#crossLink "AudioContext"}}AudioContext's{{/crossLink}} "beginning of + * time") when the audio source should be played. + * + * @method playAt + */ + public async playAt(time: number): Promise { const { audioContext } = this const { currentTime } = audioContext - const { setTimeout } = audioContextAwareTimeout(audioContext) await audioContext.resume() this.setup() - this.bufferSourceNode.start(time, this.startOffset) + this.audioSourceNode.start(time, this.startOffset) this._startedPlayingAt = time // schedule _isPlaying to false after duration - setTimeout(() => this._isPlaying = false, this.duration.pojo.seconds * 1000) + this.setTimeout(() => this._isPlaying = false, this.duration.pojo.seconds * 1000) if (time <= currentTime) { this._isPlaying = true } else { - setTimeout(() => { + this.setTimeout(() => { this._isPlaying = true }, (time - currentTime) * 1000) } } - stop(): void { - this.bufferSourceNode.stop() + /** + * Stops the audio source after specified seconds have elapsed. + * + * @public + * @method stopIn + * + * @param {number} seconds Number of seconds from "now" that the audio source + * should be stopped. + */ + public stopIn(seconds: number): void { + this.stopAt(this.audioContext.currentTime + seconds) + } + + /** + * The underlying method that backs all of the `stop` methods. Stops sound and + * set `isPlaying` to false at specified time. + * + * Functionally equivalent to the `stopAt` method. + * + * @method stopAt + * + * @param {number} stopAt The moment in time (in seconds, relative to the + * {{#crossLink "AudioContext"}}AudioContext's{{/crossLink}} "beginning of + * time") when the audio source should be stopped. + */ + public stopAt(stopAt: number): void { + const node = this.audioSourceNode + const currentTime = this.audioContext.currentTime + + if (node) { + node.stop(stopAt) + } + + if (stopAt === currentTime) { + this._isPlaying = false + } + else { + this.setTimeout(() => this._isPlaying = false, (stopAt - currentTime) * 1000) + } + } + + public stop(): void { + this.audioSourceNode.stop() this._isPlaying = false } @@ -184,7 +260,7 @@ export class Sound implements Playable, Connectable { } public get duration(): TimeObject { - const buffer = this.bufferSourceNode.buffer + const buffer = this.audioSourceNode.buffer if (buffer === null) return createTimeObject(0, 0, 0) const { duration } = buffer @@ -214,7 +290,7 @@ export class Sound implements Playable, Connectable { * * @param {number} amount The new play position value. */ - seek(amount: number): { from: (type: SeekType) => void } { + public seek(amount: number): { from: (type: SeekType) => void } { const duration = this.duration.raw const moveToOffset = (offset: number): void => { diff --git a/src/test/helpers/index.ts b/src/test/helpers/index.ts new file mode 100644 index 0000000..22b4a1c --- /dev/null +++ b/src/test/helpers/index.ts @@ -0,0 +1,20 @@ +/** + * @param valueFn the function to perform in order to derive the value. Promise resolves to the return value of this function after `wait` has elapsed + * @param wait the amount of time to wait before resolving the promise. Defaults to 2ms. + * + * @example + * const value = await settle(() => someFunctionThatReturnsAValue()) + * // value is now the return value of someFunctionThatReturnsAValue(), but it waited 2ms before checking + * + * @example + * const value = await settle(() => someFunctionThatReturnsAValue(), 1000) + * // value is now the return value of someFunctionThatReturnsAValue(), but it waited 1s before checking + */ +export function settle(valueFn: () => any, wait: number = 2): Promise { + return new Promise(resolve => setTimeout(() => resolve(valueFn()), wait)) +} + +export function mockSetTimeout(fn: () => void, time: number = 1): number { + setTimeout(fn, time) + return time +} diff --git a/src/utils/timeout.ts b/src/utils/timeout.ts index 62f67cc..31c76ca 100644 --- a/src/utils/timeout.ts +++ b/src/utils/timeout.ts @@ -1,3 +1,5 @@ +import type { AudioContext as AudioContextMock } from 'standardized-audio-context-mock' + interface Task { id: number due: number @@ -5,10 +7,20 @@ interface Task { } // AudioContext-aware and AudioContext-precise setTimeout and clearTimeout. -export default function audioContextAwareTimeout(audioContext: AudioContext | BaseAudioContext): { +export default function audioContextAwareTimeout(audioContext: AudioContext | BaseAudioContext | AudioContextMock): { setTimeout: (fn: () => void, delayMillis: number) => number clearTimeout: (id: number) => void } { + if (!audioContext) { + console.warn(`ez-web-audio: AudioContext was not available when an entity was created and timing tasks will therefore use javascript native \ +setTimeout instead of AudioContext-aware versions. Please ensure to await initAudio before instantiating any timing-sensitive entities. If your application \ +is behaving as you'd hope, you can safely ignore this message.`) + return { + setTimeout: window.setTimeout.bind(window), + clearTimeout: window.clearTimeout.bind(window), + } + } + let tasks: Task[] = [] let nextTaskId = 1 @@ -53,3 +65,10 @@ export default function audioContextAwareTimeout(audioContext: AudioContext | Ba }, } } + +// export function audioContextAwareInterval(audioContext: AudioContext | BaseAudioContext): { +// setInterval: (fn: () => void, delayMillis: number) => number +// clearInterval: (id: number) => void +// } { + +// }