mirror of
https://github.com/scratchfoundation/scratch-audio.git
synced 2025-01-03 11:35:49 -05:00
commit
444aba7f76
9 changed files with 312 additions and 45 deletions
|
@ -33,8 +33,9 @@
|
||||||
"babel-preset-env": "^1.6.1",
|
"babel-preset-env": "^1.6.1",
|
||||||
"eslint": "^3.19.0",
|
"eslint": "^3.19.0",
|
||||||
"eslint-config-scratch": "^3.1.0",
|
"eslint-config-scratch": "^3.1.0",
|
||||||
|
"tap": "^12.0.1",
|
||||||
|
"web-audio-test-api": "^0.5.2",
|
||||||
"webpack": "^4.8.0",
|
"webpack": "^4.8.0",
|
||||||
"webpack-cli": "^2.0.15",
|
"webpack-cli": "^2.0.15"
|
||||||
"tap": "^12.0.1"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const StartAudioContext = require('startaudiocontext');
|
const StartAudioContext = require('./StartAudioContext');
|
||||||
const AudioContext = require('audio-context');
|
const AudioContext = require('audio-context');
|
||||||
|
|
||||||
const log = require('./log');
|
const log = require('./log');
|
||||||
|
@ -35,13 +35,13 @@ const decodeAudioData = function (audioContext, buffer) {
|
||||||
* sprites.
|
* sprites.
|
||||||
*/
|
*/
|
||||||
class AudioEngine {
|
class AudioEngine {
|
||||||
constructor () {
|
constructor (audioContext = new AudioContext()) {
|
||||||
/**
|
/**
|
||||||
* AudioContext to play and manipulate sounds with a graph of source
|
* AudioContext to play and manipulate sounds with a graph of source
|
||||||
* and effect nodes.
|
* and effect nodes.
|
||||||
* @type {AudioContext}
|
* @type {AudioContext}
|
||||||
*/
|
*/
|
||||||
this.audioContext = new AudioContext();
|
this.audioContext = audioContext;
|
||||||
StartAudioContext(this.audioContext);
|
StartAudioContext(this.audioContext);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -65,6 +65,14 @@ class AudioEngine {
|
||||||
this.loudness = null;
|
this.loudness = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Current time in the AudioEngine.
|
||||||
|
* @type {number}
|
||||||
|
*/
|
||||||
|
get currentTime () {
|
||||||
|
return this.audioContext.currentTime;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Names of the audio effects.
|
* Names of the audio effects.
|
||||||
* @enum {string}
|
* @enum {string}
|
||||||
|
|
|
@ -27,12 +27,18 @@ class SoundPlayer extends EventEmitter {
|
||||||
this.buffer = buffer;
|
this.buffer = buffer;
|
||||||
|
|
||||||
this.outputNode = null;
|
this.outputNode = null;
|
||||||
|
this.volumeEffect = null;
|
||||||
this.target = null;
|
this.target = null;
|
||||||
|
|
||||||
this.initialized = false;
|
this.initialized = false;
|
||||||
this.isPlaying = false;
|
this.isPlaying = false;
|
||||||
this.startingUntil = 0;
|
this.startingUntil = 0;
|
||||||
this.playbackRate = 1;
|
this.playbackRate = 1;
|
||||||
|
|
||||||
|
// handleEvent is a EventTarget api for the DOM, however the web-audio-test-api we use
|
||||||
|
// uses an addEventListener that isn't compatable with object and requires us to pass
|
||||||
|
// this bound function instead
|
||||||
|
this.handleEvent = this.handleEvent.bind(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -68,7 +74,7 @@ class SoundPlayer extends EventEmitter {
|
||||||
*/
|
*/
|
||||||
_createSource () {
|
_createSource () {
|
||||||
if (this.outputNode !== null) {
|
if (this.outputNode !== null) {
|
||||||
this.outputNode.removeEventListener(ON_ENDED, this);
|
this.outputNode.removeEventListener(ON_ENDED, this.handleEvent);
|
||||||
this.outputNode.disconnect();
|
this.outputNode.disconnect();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,7 +82,7 @@ class SoundPlayer extends EventEmitter {
|
||||||
this.outputNode.playbackRate.value = this.playbackRate;
|
this.outputNode.playbackRate.value = this.playbackRate;
|
||||||
this.outputNode.buffer = this.buffer;
|
this.outputNode.buffer = this.buffer;
|
||||||
|
|
||||||
this.outputNode.addEventListener(ON_ENDED, this);
|
this.outputNode.addEventListener(ON_ENDED, this.handleEvent);
|
||||||
|
|
||||||
if (this.target !== null) {
|
if (this.target !== null) {
|
||||||
this.connect(this.target);
|
this.connect(this.target);
|
||||||
|
@ -89,8 +95,6 @@ class SoundPlayer extends EventEmitter {
|
||||||
initialize () {
|
initialize () {
|
||||||
this.initialized = true;
|
this.initialized = true;
|
||||||
|
|
||||||
this.volumeEffect = new VolumeEffect(this.audioEngine, this, null);
|
|
||||||
|
|
||||||
this._createSource();
|
this._createSource();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,7 +116,12 @@ class SoundPlayer extends EventEmitter {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.volumeEffect.connect(target);
|
if (this.volumeEffect === null) {
|
||||||
|
this.outputNode.disconnect();
|
||||||
|
this.outputNode.connect(target.getInputNode());
|
||||||
|
} else {
|
||||||
|
this.volumeEffect.connect(target);
|
||||||
|
}
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -127,8 +136,10 @@ class SoundPlayer extends EventEmitter {
|
||||||
|
|
||||||
this.stopImmediately();
|
this.stopImmediately();
|
||||||
|
|
||||||
this.volumeEffect.dispose();
|
if (this.volumeEffect !== null) {
|
||||||
this.volumeEffect = null;
|
this.volumeEffect.dispose();
|
||||||
|
this.volumeEffect = null;
|
||||||
|
}
|
||||||
|
|
||||||
this.outputNode.disconnect();
|
this.outputNode.disconnect();
|
||||||
this.outputNode = null;
|
this.outputNode = null;
|
||||||
|
@ -149,7 +160,7 @@ class SoundPlayer extends EventEmitter {
|
||||||
*/
|
*/
|
||||||
take () {
|
take () {
|
||||||
if (this.outputNode) {
|
if (this.outputNode) {
|
||||||
this.outputNode.removeEventListener(ON_ENDED, this);
|
this.outputNode.removeEventListener(ON_ENDED, this.handleEvent);
|
||||||
}
|
}
|
||||||
|
|
||||||
const taken = new SoundPlayer(this.audioEngine, this);
|
const taken = new SoundPlayer(this.audioEngine, this);
|
||||||
|
@ -157,25 +168,22 @@ class SoundPlayer extends EventEmitter {
|
||||||
if (this.isPlaying) {
|
if (this.isPlaying) {
|
||||||
taken.startingUntil = this.startingUntil;
|
taken.startingUntil = this.startingUntil;
|
||||||
taken.isPlaying = this.isPlaying;
|
taken.isPlaying = this.isPlaying;
|
||||||
taken.initialize();
|
taken.initialized = this.initialized;
|
||||||
taken.outputNode.disconnect();
|
|
||||||
taken.outputNode = this.outputNode;
|
taken.outputNode = this.outputNode;
|
||||||
taken.outputNode.addEventListener(ON_ENDED, taken);
|
taken.outputNode.addEventListener(ON_ENDED, taken.handleEvent);
|
||||||
taken.volumeEffect.set(this.volumeEffect.value);
|
taken.volumeEffect = this.volumeEffect;
|
||||||
|
if (taken.volumeEffect) {
|
||||||
|
taken.volumeEffect.audioPlayer = taken;
|
||||||
|
}
|
||||||
if (this.target !== null) {
|
if (this.target !== null) {
|
||||||
taken.connect(this.target);
|
taken.connect(this.target);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (this.isPlaying) {
|
|
||||||
this.emit('stop');
|
this.emit('stop');
|
||||||
taken.emit('play');
|
taken.emit('play');
|
||||||
}
|
}
|
||||||
|
|
||||||
this.outputNode = null;
|
this.outputNode = null;
|
||||||
if (this.volumeEffect !== null) {
|
|
||||||
this.volumeEffect.dispose();
|
|
||||||
}
|
|
||||||
this.volumeEffect = null;
|
this.volumeEffect = null;
|
||||||
this.initialized = false;
|
this.initialized = false;
|
||||||
this.startingUntil = 0;
|
this.startingUntil = 0;
|
||||||
|
@ -192,23 +200,21 @@ class SoundPlayer extends EventEmitter {
|
||||||
*/
|
*/
|
||||||
play () {
|
play () {
|
||||||
if (this.isStarting) {
|
if (this.isStarting) {
|
||||||
|
this.emit('stop');
|
||||||
|
this.emit('play');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.isPlaying) {
|
if (this.isPlaying) {
|
||||||
// Spawn a Player with the current buffer source, and play for a
|
this.stop();
|
||||||
// short period until its volume is 0 and release it to be
|
|
||||||
// eventually garbage collected.
|
|
||||||
this.take().stop();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!this.initialized) {
|
if (this.initialized) {
|
||||||
this.initialize();
|
|
||||||
} else {
|
|
||||||
this._createSource();
|
this._createSource();
|
||||||
|
} else {
|
||||||
|
this.initialize();
|
||||||
}
|
}
|
||||||
|
|
||||||
this.volumeEffect.set(this.volumeEffect.DEFAULT_VALUE);
|
|
||||||
this.outputNode.start();
|
this.outputNode.start();
|
||||||
|
|
||||||
this.isPlaying = true;
|
this.isPlaying = true;
|
||||||
|
@ -226,13 +232,20 @@ class SoundPlayer extends EventEmitter {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.volumeEffect.set(0);
|
// always do a manual stop on a taken / volume effect fade out sound player
|
||||||
this.outputNode.stop(this.audioEngine.audioContext.currentTime + this.audioEngine.DECAY_TIME);
|
// take will emit "stop" as well as reset all of our playing statuses / remove our
|
||||||
|
// nodes / etc
|
||||||
|
const taken = this.take();
|
||||||
|
taken.volumeEffect = new VolumeEffect(taken.audioEngine, taken, null);
|
||||||
|
|
||||||
this.isPlaying = false;
|
taken.volumeEffect.connect(taken.target);
|
||||||
this.startingUntil = 0;
|
// volumeEffect will recursively connect to us if it needs to, so this happens too:
|
||||||
|
// taken.connect(taken.volumeEffect);
|
||||||
|
|
||||||
this.emit('stop');
|
taken.finished().then(() => taken.dispose());
|
||||||
|
|
||||||
|
taken.volumeEffect.set(0);
|
||||||
|
taken.outputNode.stop(this.audioEngine.audioContext.currentTime + this.audioEngine.DECAY_TIME);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
10
src/StartAudioContext.js
Normal file
10
src/StartAudioContext.js
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
// StartAudioContext assumes that we are in a window/document setting and messes with the unit
|
||||||
|
// tests, this is our own version just checking to see if we have a global document to listen
|
||||||
|
// to before we even try to "start" it. Our test api audio context is started by default.
|
||||||
|
const StartAudioContext = require('startaudiocontext');
|
||||||
|
|
||||||
|
module.exports = function (context) {
|
||||||
|
if (typeof document !== 'undefined') {
|
||||||
|
return StartAudioContext(context);
|
||||||
|
}
|
||||||
|
};
|
|
@ -82,14 +82,17 @@ class Effect {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store whether the graph should currently affected by this effect.
|
// Store whether the graph should currently affected by this effect.
|
||||||
const _isPatch = this._isPatch;
|
const wasPatch = this._isPatch;
|
||||||
|
if (wasPatch) {
|
||||||
|
this._lastPatch = this.audioEngine.currentTime;
|
||||||
|
}
|
||||||
|
|
||||||
// Call the internal implementation per this Effect.
|
// Call the internal implementation per this Effect.
|
||||||
this._set(value);
|
this._set(value);
|
||||||
|
|
||||||
// Connect or disconnect from the graph if this now applies or no longer
|
// Connect or disconnect from the graph if this now applies or no longer
|
||||||
// applies an effect.
|
// applies an effect.
|
||||||
if (this._isPatch !== _isPatch && this.target !== null) {
|
if (this._isPatch !== wasPatch && this.target !== null) {
|
||||||
this.connect(this.target);
|
this.connect(this.target);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -133,7 +136,7 @@ class Effect {
|
||||||
this.outputNode.disconnect();
|
this.outputNode.disconnect();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this._isPatch) {
|
if (this._isPatch || this._lastPatch + this.audioEngine.DECAY_TIME < this.audioEngine.currentTime) {
|
||||||
this.outputNode.connect(target.getInputNode());
|
this.outputNode.connect(target.getInputNode());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,13 +32,11 @@ class VolumeEffect extends Effect {
|
||||||
*/
|
*/
|
||||||
_set (value) {
|
_set (value) {
|
||||||
this.value = value;
|
this.value = value;
|
||||||
// A gain of 1 is normal. Scale down scratch's volume value. Apply the
|
|
||||||
// change over a tiny period of time.
|
const {gain} = this.outputNode;
|
||||||
this.outputNode.gain.setTargetAtTime(
|
const {audioContext: {currentTime}, DECAY_TIME} = this.audioEngine;
|
||||||
value / 100,
|
gain.setValueAtTime(gain.value, currentTime);
|
||||||
this.audioEngine.audioContext.currentTime,
|
gain.linearRampToValueAtTime(value / 100, currentTime + DECAY_TIME);
|
||||||
this.audioEngine.DECAY_TIME
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
18
test/AudioEngine.js
Normal file
18
test/AudioEngine.js
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
const tap = require('tap');
|
||||||
|
const AudioEngine = require('../src/AudioEngine');
|
||||||
|
|
||||||
|
const {AudioContext} = require('web-audio-test-api');
|
||||||
|
|
||||||
|
tap.test('AudioEngine', t => {
|
||||||
|
const audioEngine = new AudioEngine(new AudioContext());
|
||||||
|
|
||||||
|
t.plan(1);
|
||||||
|
t.deepEqual(audioEngine.inputNode.toJSON(), {
|
||||||
|
gain: {
|
||||||
|
inputs: [],
|
||||||
|
value: 1
|
||||||
|
},
|
||||||
|
inputs: [],
|
||||||
|
name: 'GainNode'
|
||||||
|
}, 'JSON Representation of inputNode');
|
||||||
|
});
|
210
test/SoundPlayer.js
Normal file
210
test/SoundPlayer.js
Normal file
|
@ -0,0 +1,210 @@
|
||||||
|
/* global Uint8Array Promise */
|
||||||
|
const tap = require('tap');
|
||||||
|
const {AudioContext} = require('web-audio-test-api');
|
||||||
|
|
||||||
|
const AudioEngine = require('../src/AudioEngine');
|
||||||
|
|
||||||
|
|
||||||
|
tap.test('SoundPlayer', suite => {
|
||||||
|
|
||||||
|
let audioContext;
|
||||||
|
let audioEngine;
|
||||||
|
let soundPlayer;
|
||||||
|
|
||||||
|
const help = {
|
||||||
|
get engineInputs () {
|
||||||
|
return audioEngine.inputNode.toJSON().inputs;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
suite.beforeEach(() => {
|
||||||
|
audioContext = new AudioContext();
|
||||||
|
audioEngine = new AudioEngine(audioContext);
|
||||||
|
// sound will be 0.1 seconds long
|
||||||
|
audioContext.DECODE_AUDIO_DATA_RESULT = audioContext.createBuffer(2, 4410, 44100);
|
||||||
|
audioContext.DECODE_AUDIO_DATA_FAILED = false;
|
||||||
|
const data = new Uint8Array(44100);
|
||||||
|
return audioEngine.decodeSoundPlayer({data}).then(result => {
|
||||||
|
soundPlayer = result;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
suite.afterEach(() => {
|
||||||
|
soundPlayer.dispose();
|
||||||
|
soundPlayer = null;
|
||||||
|
audioEngine = null;
|
||||||
|
audioContext.$reset();
|
||||||
|
audioContext = null;
|
||||||
|
});
|
||||||
|
|
||||||
|
suite.plan(5);
|
||||||
|
|
||||||
|
suite.test('play initializes and creates source node', t => {
|
||||||
|
t.plan(3);
|
||||||
|
t.equal(soundPlayer.initialized, false, 'not yet initialized');
|
||||||
|
soundPlayer.play();
|
||||||
|
t.equal(soundPlayer.initialized, true, 'now is initialized');
|
||||||
|
t.deepEqual(soundPlayer.outputNode.toJSON(), {
|
||||||
|
buffer: audioContext.DECODE_AUDIO_DATA_RESULT.toJSON(),
|
||||||
|
inputs: [],
|
||||||
|
loop: false,
|
||||||
|
loopEnd: 0,
|
||||||
|
loopStart: 0,
|
||||||
|
name: 'AudioBufferSourceNode',
|
||||||
|
playbackRate: {
|
||||||
|
inputs: [],
|
||||||
|
value: 1
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
suite.test('connect', t => {
|
||||||
|
t.plan(1);
|
||||||
|
soundPlayer.play();
|
||||||
|
soundPlayer.connect(audioEngine);
|
||||||
|
t.deepEqual(help.engineInputs, [
|
||||||
|
soundPlayer.outputNode.toJSON()
|
||||||
|
], 'output node connects to input node');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
suite.test('stop decay', t => {
|
||||||
|
t.plan(7);
|
||||||
|
soundPlayer.play();
|
||||||
|
soundPlayer.connect(audioEngine);
|
||||||
|
const outputNode = soundPlayer.outputNode;
|
||||||
|
|
||||||
|
audioContext.$processTo(0);
|
||||||
|
soundPlayer.stop();
|
||||||
|
t.equal(soundPlayer.outputNode, null, 'nullify outputNode immediately (taken sound is stopping)');
|
||||||
|
t.deepEqual(help.engineInputs, [{
|
||||||
|
name: 'GainNode',
|
||||||
|
gain: {
|
||||||
|
value: 1,
|
||||||
|
inputs: []
|
||||||
|
},
|
||||||
|
inputs: [outputNode.toJSON()]
|
||||||
|
}], 'output node connects to gain node to input node');
|
||||||
|
|
||||||
|
audioContext.$processTo(audioEngine.DECAY_TIME / 2);
|
||||||
|
const engineInputs = help.engineInputs;
|
||||||
|
t.notEqual(engineInputs[0].gain.value, 1, 'gain value should not be 1');
|
||||||
|
t.notEqual(engineInputs[0].gain.value, 0, 'gain value should not be 0');
|
||||||
|
t.equal(outputNode.$state, 'PLAYING');
|
||||||
|
|
||||||
|
audioContext.$processTo(audioEngine.DECAY_TIME);
|
||||||
|
t.deepEqual(help.engineInputs, [{
|
||||||
|
name: 'GainNode',
|
||||||
|
gain: {
|
||||||
|
value: 0,
|
||||||
|
inputs: []
|
||||||
|
},
|
||||||
|
inputs: [outputNode.toJSON()]
|
||||||
|
}], 'output node connects to gain node to input node decayed');
|
||||||
|
|
||||||
|
t.equal(outputNode.$state, 'FINISHED');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
suite.test('play while playing debounces', t => {
|
||||||
|
t.plan(7);
|
||||||
|
const log = [];
|
||||||
|
soundPlayer.connect(audioEngine);
|
||||||
|
soundPlayer.play();
|
||||||
|
t.equal(soundPlayer.isStarting, true, 'player.isStarting');
|
||||||
|
const originalNode = soundPlayer.outputNode;
|
||||||
|
// the second play should still "finish" this play
|
||||||
|
soundPlayer.finished().then(() => log.push('finished first'));
|
||||||
|
soundPlayer.play();
|
||||||
|
soundPlayer.finished().then(() => log.push('finished second'));
|
||||||
|
soundPlayer.play();
|
||||||
|
soundPlayer.finished().then(() => log.push('finished third'));
|
||||||
|
soundPlayer.play();
|
||||||
|
t.equal(originalNode, soundPlayer.outputNode, 'same output node');
|
||||||
|
t.equal(soundPlayer.outputNode.$state, 'PLAYING');
|
||||||
|
return Promise.resolve().then(() => {
|
||||||
|
t.deepEqual(log, ['finished first', 'finished second', 'finished third'], 'finished in order');
|
||||||
|
|
||||||
|
// fast forward to one ms before decay time
|
||||||
|
audioContext.$processTo(audioEngine.DECAY_TIME - 0.001);
|
||||||
|
soundPlayer.play();
|
||||||
|
|
||||||
|
t.equal(originalNode, soundPlayer.outputNode, 'same output node');
|
||||||
|
|
||||||
|
|
||||||
|
// now at DECAY_TIME, we should meet a new player as the old one is taken/stopped
|
||||||
|
audioContext.$processTo(audioEngine.DECAY_TIME);
|
||||||
|
|
||||||
|
t.equal(soundPlayer.isStarting, false, 'player.isStarting now false');
|
||||||
|
|
||||||
|
soundPlayer.play();
|
||||||
|
t.notEqual(originalNode, soundPlayer.outputNode, 'New output node');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
suite.test('play while playing', t => {
|
||||||
|
t.plan(15);
|
||||||
|
const log = [];
|
||||||
|
soundPlayer.play();
|
||||||
|
soundPlayer.finished().then(() => log.push('play 1 finished'));
|
||||||
|
soundPlayer.connect(audioEngine);
|
||||||
|
const firstPlayNode = soundPlayer.outputNode;
|
||||||
|
|
||||||
|
// go past debounce time and play again
|
||||||
|
audioContext.$processTo(audioEngine.DECAY_TIME);
|
||||||
|
|
||||||
|
return Promise.resolve()
|
||||||
|
.then(() => {
|
||||||
|
|
||||||
|
t.equal(soundPlayer.outputNode.$state, 'PLAYING');
|
||||||
|
|
||||||
|
soundPlayer.play();
|
||||||
|
soundPlayer.finished().then(() => log.push('play 2 finished'));
|
||||||
|
|
||||||
|
// wait for a micro-task loop to fire our previous events
|
||||||
|
return Promise.resolve();
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
|
||||||
|
t.equal(log[0], 'play 1 finished');
|
||||||
|
t.notEqual(soundPlayer.outputNode, firstPlayNode, 'created new player node');
|
||||||
|
|
||||||
|
t.equal(help.engineInputs.length, 2, 'there should be 2 players connected');
|
||||||
|
t.equal(firstPlayNode.$state, 'PLAYING');
|
||||||
|
t.equal(soundPlayer.outputNode.$state, 'PLAYING');
|
||||||
|
t.equal(help.engineInputs[0].gain.value, 1, 'old sound connectect to gain node with volume 1');
|
||||||
|
|
||||||
|
audioContext.$processTo(audioContext.currentTime + 0.001);
|
||||||
|
t.notEqual(help.engineInputs[0].gain.value, 1,
|
||||||
|
'old sound connected to gain node which will fade');
|
||||||
|
|
||||||
|
audioContext.$processTo(audioContext.currentTime + audioEngine.DECAY_TIME + 0.001);
|
||||||
|
t.equal(soundPlayer.outputNode.$state, 'PLAYING');
|
||||||
|
t.equal(firstPlayNode.$state, 'FINISHED');
|
||||||
|
|
||||||
|
t.equal(help.engineInputs[0].gain.value, 0, 'faded old sound to 0');
|
||||||
|
|
||||||
|
t.equal(log.length, 1);
|
||||||
|
audioContext.$processTo(audioContext.currentTime + 0.2);
|
||||||
|
|
||||||
|
// wait for a micro-task loop to fire our previous events
|
||||||
|
return Promise.resolve();
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
|
||||||
|
t.equal(log[1], 'play 2 finished');
|
||||||
|
t.equal(help.engineInputs.length, 1, 'old sound disconneted itself after done');
|
||||||
|
t.equal(log.length, 2);
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
suite.end();
|
||||||
|
});
|
|
@ -2,6 +2,12 @@ class AudioParamMock {
|
||||||
setTargetAtTime (value /* , start, stop */) {
|
setTargetAtTime (value /* , start, stop */) {
|
||||||
this.value = value;
|
this.value = value;
|
||||||
}
|
}
|
||||||
|
setValueAtTime (value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
linearRampToValueAtTime (value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = AudioParamMock;
|
module.exports = AudioParamMock;
|
||||||
|
|
Loading…
Reference in a new issue