Merge pull request #101 from mzgoddard/clipless-firefox

schedule stop DECAY in the future for firefox
This commit is contained in:
Eric Rosenbaum 2018-06-25 15:39:17 -04:00 committed by GitHub
commit 59ff2ad927
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 37 additions and 30 deletions

View file

@ -109,10 +109,21 @@ class AudioEngine {
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/AudioParam/setTargetAtTime}
* @const {number}
*/
get DECAY_TIME () {
get DECAY_DURATION () {
return 0.025;
}
/**
* Some environments cannot smoothly change parameters immediately, provide
* a small delay before decaying.
*
* @see {@link https://bugzilla.mozilla.org/show_bug.cgi?id=1228207}
* @const {number}
*/
get DECAY_WAIT () {
return 0.05;
}
/**
* Get the input node.
* @return {AudioNode} - audio node that is the input for this effect

View file

@ -98,7 +98,7 @@ class SoundPlayer extends EventEmitter {
* @type {boolean}
*/
get isStarting () {
return this.isPlaying && this.startingUntil > this.audioEngine.audioContext.currentTime;
return this.isPlaying && this.startingUntil > this.audioEngine.currentTime;
}
/**
@ -271,7 +271,8 @@ class SoundPlayer extends EventEmitter {
this.isPlaying = true;
this.startingUntil = this.audioEngine.audioContext.currentTime + this.audioEngine.DECAY_TIME;
const {currentTime, DECAY_DURATION} = this.audioEngine;
this.startingUntil = currentTime + DECAY_DURATION;
this.emit('play');
}
@ -298,7 +299,8 @@ class SoundPlayer extends EventEmitter {
taken.finished().then(() => taken.dispose());
taken.volumeEffect.set(0);
taken.outputNode.stop(this.audioEngine.audioContext.currentTime + this.audioEngine.DECAY_TIME);
const {currentTime, DECAY_WAIT, DECAY_DURATION} = this.audioEngine;
taken.outputNode.stop(currentTime + DECAY_WAIT + DECAY_DURATION);
}
/**

View file

@ -144,7 +144,7 @@ class Effect {
this.outputNode.disconnect();
}
if (this._isPatch || this._lastPatch + this.audioEngine.DECAY_TIME < this.audioEngine.currentTime) {
if (this._isPatch || this._lastPatch + this.audioEngine.DECAY_DURATION < this.audioEngine.currentTime) {
this.outputNode.connect(target.getInputNode());
}

View file

@ -66,16 +66,9 @@ class PanEffect extends Effect {
const leftVal = Math.cos(p * Math.PI / 2);
const rightVal = Math.sin(p * Math.PI / 2);
this.leftGain.gain.setTargetAtTime(
leftVal,
this.audioEngine.audioContext.currentTime,
this.audioEngine.DECAY_TIME
);
this.rightGain.gain.setTargetAtTime(
rightVal,
this.audioEngine.audioContext.currentTime,
this.audioEngine.DECAY_TIME
);
const {currentTime, DECAY_WAIT, DECAY_DURATION} = this.audioEngine;
this.leftGain.gain.setTargetAtTime(leftVal, currentTime + DECAY_WAIT, DECAY_DURATION);
this.rightGain.gain.setTargetAtTime(rightVal, currentTime + DECAY_WAIT, DECAY_DURATION);
}
/**

View file

@ -42,9 +42,9 @@ class VolumeEffect extends Effect {
this.value = value;
const {gain} = this.outputNode;
const {audioContext: {currentTime}, DECAY_TIME} = this.audioEngine;
gain.setValueAtTime(gain.value, currentTime);
gain.linearRampToValueAtTime(value / 100, currentTime + DECAY_TIME);
const {currentTime, DECAY_WAIT, DECAY_DURATION} = this.audioEngine;
gain.setValueAtTime(gain.value, currentTime + DECAY_WAIT);
gain.linearRampToValueAtTime(value / 100, currentTime + DECAY_WAIT + DECAY_DURATION);
}
/**

View file

@ -20,10 +20,10 @@ tap.test('SoundPlayer', suite => {
suite.beforeEach(() => {
audioContext = new AudioContext();
audioEngine = new AudioEngine(audioContext);
// sound will be 0.1 seconds long
audioContext.DECODE_AUDIO_DATA_RESULT = audioContext.createBuffer(2, 4410, 44100);
// sound will be 0.2 seconds long
audioContext.DECODE_AUDIO_DATA_RESULT = audioContext.createBuffer(2, 8820, 44100);
audioContext.DECODE_AUDIO_DATA_FAILED = false;
const data = new Uint8Array(44100);
const data = new Uint8Array(0);
return audioEngine.decodeSoundPlayer({data}).then(result => {
soundPlayer = result;
});
@ -88,13 +88,13 @@ tap.test('SoundPlayer', suite => {
inputs: [outputNode.toJSON()]
}], 'output node connects to gain node to input node');
audioContext.$processTo(audioEngine.DECAY_TIME / 2);
audioContext.$processTo(audioEngine.DECAY_WAIT + audioEngine.DECAY_DURATION / 2);
const engineInputs = help.engineInputs;
t.notEqual(engineInputs[0].gain.value, 1, 'gain value should not be 1');
t.notEqual(engineInputs[0].gain.value, 0, 'gain value should not be 0');
t.equal(outputNode.$state, 'PLAYING');
audioContext.$processTo(audioEngine.DECAY_TIME);
audioContext.$processTo(audioEngine.DECAY_WAIT + audioEngine.DECAY_DURATION + 0.001);
t.deepEqual(help.engineInputs, [{
name: 'GainNode',
gain: {
@ -129,14 +129,14 @@ tap.test('SoundPlayer', suite => {
t.deepEqual(log, ['finished first', 'finished second', 'finished third'], 'finished in order');
// fast forward to one ms before decay time
audioContext.$processTo(audioEngine.DECAY_TIME - 0.001);
audioContext.$processTo(audioEngine.DECAY_DURATION - 0.001);
soundPlayer.play();
t.equal(originalNode, soundPlayer.outputNode, 'same output node');
// now at DECAY_TIME, we should meet a new player as the old one is taken/stopped
audioContext.$processTo(audioEngine.DECAY_TIME);
// now at DECAY_DURATION, we should meet a new player as the old one is taken/stopped
audioContext.$processTo(audioEngine.DECAY_DURATION);
t.equal(soundPlayer.isStarting, false, 'player.isStarting now false');
@ -157,7 +157,7 @@ tap.test('SoundPlayer', suite => {
const firstPlayNode = soundPlayer.outputNode;
// go past debounce time and play again
audioContext.$processTo(audioEngine.DECAY_TIME);
audioContext.$processTo(audioEngine.DECAY_DURATION);
return Promise.resolve()
.then(() => {
@ -180,18 +180,19 @@ tap.test('SoundPlayer', suite => {
t.equal(soundPlayer.outputNode.$state, 'PLAYING');
t.equal(help.engineInputs[0].gain.value, 1, 'old sound connectect to gain node with volume 1');
audioContext.$processTo(audioContext.currentTime + 0.001);
const {currentTime} = audioContext;
audioContext.$processTo(currentTime + audioEngine.DECAY_WAIT + 0.001);
t.notEqual(help.engineInputs[0].gain.value, 1,
'old sound connected to gain node which will fade');
audioContext.$processTo(audioContext.currentTime + audioEngine.DECAY_TIME + 0.001);
audioContext.$processTo(currentTime + audioEngine.DECAY_WAIT + audioEngine.DECAY_DURATION + 0.001);
t.equal(soundPlayer.outputNode.$state, 'PLAYING');
t.equal(firstPlayNode.$state, 'FINISHED');
t.equal(help.engineInputs[0].gain.value, 0, 'faded old sound to 0');
t.equal(log.length, 1);
audioContext.$processTo(audioContext.currentTime + 0.2);
audioContext.$processTo(currentTime + audioEngine.DECAY_WAIT + audioEngine.DECAY_DURATION + 0.3);
// wait for a micro-task loop to fire our previous events
return Promise.resolve();