Merge pull request #47 from LLK/feature/remove-tonejs

Remove tone.js
This commit is contained in:
Eric Rosenbaum 2017-06-23 13:24:29 -04:00 committed by GitHub
commit bd1820ebf7
8 changed files with 124 additions and 69 deletions

View file

@ -30,7 +30,6 @@
"json": "^9.0.6",
"minilog": "^3.0.1",
"soundfont-player": "0.10.5",
"tone": "0.9.0",
"travis-after-all": "^1.4.4",
"webpack": "2.4.0"
}

View file

@ -1,5 +1,4 @@
const ArrayBufferStream = require('./ArrayBufferStream');
const Tone = require('tone');
const log = require('./log');
/**
@ -10,6 +9,13 @@ const log = require('./log');
* https://github.com/LLK/scratch-flash/blob/master/src/sound/WAVFile.as
*/
class ADPCMSoundDecoder {
/**
* @param {AudioContext} audioContext - a webAudio context
* @constructor
*/
constructor (audioContext) {
this.audioContext = audioContext;
}
/**
* Data used by the decompression algorithm
* @type {Array}
@ -40,7 +46,7 @@ class ADPCMSoundDecoder {
* Decode an ADPCM sound stored in an ArrayBuffer and return a promise
* with the decoded audio buffer.
* @param {ArrayBuffer} audioData - containing ADPCM encoded wav audio
* @return {Tone.Buffer} the decoded audio buffer
* @return {AudioBuffer} the decoded audio buffer
*/
decode (audioData) {
@ -77,8 +83,7 @@ class ADPCMSoundDecoder {
const samples = this.imaDecompress(this.extractChunk('data', stream), this.adpcmBlockSize);
// @todo this line is the only place Tone is used here, should be possible to remove
const buffer = Tone.context.createBuffer(1, samples.length, this.samplesPerSecond);
const buffer = this.audioContext.createBuffer(1, samples.length, this.samplesPerSecond);
// @todo optimize this? e.g. replace the divide by storing 1/32768 and multiply?
for (let i = 0; i < samples.length; i++) {

View file

@ -1,14 +1,13 @@
const SoundPlayer = require('./SoundPlayer');
const Tone = require('tone');
class DrumPlayer {
/**
* A prototype for the drum sound functionality that can load drum sounds, play, and stop them.
* @param {Tone.Gain} outputNode - a webAudio node that the drum sounds will send their output to
* @param {AudioContext} audioContext - a webAudio context
* @constructor
*/
constructor (outputNode) {
this.outputNode = outputNode;
constructor (audioContext) {
this.audioContext = audioContext;
const baseUrl = 'https://raw.githubusercontent.com/LLK/scratch-audio/develop/sound-files/drums/';
const fileNames = [
@ -35,9 +34,21 @@ class DrumPlayer {
this.drumSounds = [];
for (let i = 0; i < fileNames.length; i++) {
const url = `${baseUrl + fileNames[i]}_22k.wav`;
this.drumSounds[i] = new SoundPlayer(this.outputNode);
this.drumSounds[i].setBuffer(new Tone.Buffer(url));
this.drumSounds[i] = new SoundPlayer(this.audioContext);
// download and decode the drum sounds
// @todo: use scratch-storage to manage these sound files
const url = `${baseUrl}${fileNames[i]}_22k.wav`;
const request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = () => {
const audioData = request.response;
this.audioContext.decodeAudioData(audioData).then(buffer => {
this.drumSounds[i].setBuffer(buffer);
});
};
request.send();
}
}
@ -46,10 +57,10 @@ class DrumPlayer {
* The parameter for output node allows sprites or clones to send the drum sound
* to their individual audio effect chains.
* @param {number} drum - the drum number to play (0-indexed)
* @param {Tone.Gain} outputNode - a node to send the output to
* @param {AudioNode} outputNode - a node to send the output to
*/
play (drum, outputNode) {
this.drumSounds[drum].outputNode = outputNode;
this.drumSounds[drum].connect(outputNode);
this.drumSounds[drum].start();
}

View file

@ -1,4 +1,3 @@
const Tone = require('tone');
const Soundfont = require('soundfont-player');
class InstrumentPlayer {
@ -10,11 +9,12 @@ class InstrumentPlayer {
* play note or set instrument block runs, causing a delay of a few seconds.
* Using this library we don't have a way to set the volume, sustain the note beyond the sample
* duration, or run it through the sprite-specific audio effects.
* @param {Tone.Gain} outputNode - a webAudio node that the instrument will send its output to
* @param {AudioContext} audioContext - a webAudio context
* @constructor
*/
constructor (outputNode) {
this.outputNode = outputNode;
constructor (audioContext) {
this.audioContext = audioContext;
this.outputNode = null;
// Instrument names used by Musyng Kite soundfont, in order to
// match scratch instruments
@ -42,7 +42,7 @@ class InstrumentPlayer {
this.loadInstrument(instrumentNum)
.then(() => {
this.instruments[instrumentNum].play(
note, Tone.context.currentTime, {
note, this.audioContext.currentTime, {
duration: sec,
gain: gain
}
@ -59,7 +59,7 @@ class InstrumentPlayer {
if (this.instruments[instrumentNum]) {
return Promise.resolve();
}
return Soundfont.instrument(Tone.context, this.instrumentNames[instrumentNum])
return Soundfont.instrument(this.audioContext, this.instrumentNames[instrumentNum])
.then(inst => {
inst.connect(this.outputNode);
this.instruments[instrumentNum] = inst;

View file

@ -1,13 +1,17 @@
const Tone = require('tone');
const log = require('./log');
/**
* A SoundPlayer stores an audio buffer, and plays it
*/
class SoundPlayer {
constructor () {
/**
* @param {AudioContext} audioContext - a webAudio context
* @constructor
*/
constructor (audioContext) {
this.audioContext = audioContext;
this.outputNode = null;
this.buffer = new Tone.Buffer();
this.buffer = null;
this.bufferSource = null;
this.playbackRate = 1;
this.isPlaying = false;
@ -15,7 +19,7 @@ class SoundPlayer {
/**
* Connect the SoundPlayer to an output node
* @param {Tone.Gain} node - an output node to connect to
* @param {GainNode} node - an output node to connect to
*/
connect (node) {
this.outputNode = node;
@ -23,7 +27,7 @@ class SoundPlayer {
/**
* Set an audio buffer
* @param {Tone.Buffer} buffer Buffer to set
* @param {AudioBuffer} buffer - Buffer to set
*/
setBuffer (buffer) {
this.buffer = buffer;
@ -55,13 +59,13 @@ class SoundPlayer {
* The web audio framework requires a new audio buffer source node for each playback
*/
start () {
if (!this.buffer || !this.buffer.loaded) {
if (!this.buffer) {
log.warn('tried to play a sound that was not loaded yet');
return;
}
this.bufferSource = Tone.context.createBufferSource();
this.bufferSource.buffer = this.buffer.get();
this.bufferSource = this.audioContext.createBufferSource();
this.bufferSource.buffer = this.buffer;
this.bufferSource.playbackRate.value = this.playbackRate;
this.bufferSource.connect(this.outputNode);
this.bufferSource.start();
@ -75,12 +79,11 @@ class SoundPlayer {
* @return {Promise} a Promise that resolves when the sound finishes playing
*/
finished () {
const storedContext = this;
return new Promise(resolve => {
storedContext.bufferSource.onended = function () {
this.bufferSource.onended = () => {
this.isPlaying = false;
resolve();
}.bind(storedContext);
};
});
}
}

View file

@ -1,17 +1,18 @@
const Tone = require('tone');
/**
* A pan effect, which moves the sound to the left or right between the speakers
* Effect value of -100 puts the audio entirely on the left channel,
* 0 centers it, 100 puts it on the right.
* Clamped -100 to 100
*/
class PanEffect extends Tone.Effect {
constructor () {
super();
class PanEffect {
/**
* @param {AudioContext} audioContext - a webAudio context
* @constructor
*/
constructor (audioContext) {
this.audioContext = audioContext;
this.panner = this.audioContext.createStereoPanner();
this.value = 0;
this.panner = new Tone.Panner();
this.effectSend.chain(this.panner, this.effectReturn);
}
/**
@ -23,6 +24,10 @@ class PanEffect extends Tone.Effect {
this.panner.pan.value = this.value / 100;
}
connect (node) {
this.panner.connect(node);
}
/**
* Change the effect value
* @param {number} val - the value to change the effect by

View file

@ -1,5 +1,3 @@
const Tone = require('tone');
/**
* A pitch change effect, which changes the playback rate of the sound in order
* to change its pitch: reducing the playback rate lowers the pitch, increasing the rate
@ -21,7 +19,6 @@ class PitchEffect {
constructor () {
this.value = 0; // effect value
this.ratio = 1; // the playback rate ratio
this.tone = new Tone();
}
/**
@ -52,7 +49,9 @@ class PitchEffect {
* @returns {number} a playback ratio
*/
getRatio (val) {
return this.tone.intervalToFrequencyRatio(val / 10);
const interval = val / 10;
// Convert the musical interval in semitones to a frequency ratio
return Math.pow(2, (interval / 12));
}
/**

View file

@ -1,5 +1,4 @@
const log = require('./log');
const Tone = require('tone');
const PitchEffect = require('./effects/PitchEffect');
const PanEffect = require('./effects/PanEffect');
@ -25,15 +24,15 @@ class AudioPlayer {
constructor (audioEngine) {
this.audioEngine = audioEngine;
// effects setup
// Create the audio effects
this.pitchEffect = new PitchEffect();
this.panEffect = new PanEffect();
this.panEffect = new PanEffect(this.audioEngine.audioContext);
// the effects are chained to an effects node for this player, then to the main audio engine
// audio is sent from each soundplayer, through the effects in order, then to the global effects
// note that the pitch effect works differently - it sets the playback rate for each soundplayer
this.effectsNode = new Tone.Gain();
this.effectsNode.chain(this.panEffect, this.audioEngine.input);
// Chain the audio effects together
// effectsNode -> panEffect -> audioEngine.input
this.effectsNode = this.audioEngine.audioContext.createGain();
this.effectsNode.connect(this.panEffect.panner);
this.panEffect.connect(this.audioEngine.input);
// reset effects to their default parameters
this.clearEffects();
@ -59,7 +58,7 @@ class AudioPlayer {
}
// create a new soundplayer to play the sound
const player = new SoundPlayer();
const player = new SoundPlayer(this.audioEngine.audioContext);
player.setBuffer(this.audioEngine.audioBuffers[md5]);
player.connect(this.effectsNode);
this.pitchEffect.updatePlayer(player);
@ -150,18 +149,22 @@ class AudioPlayer {
*/
class AudioEngine {
constructor () {
this.input = new Tone.Gain();
this.input.connect(Tone.Master);
const AudioContext = window.AudioContext || window.webkitAudioContext;
this.audioContext = new AudioContext();
this.input = this.audioContext.createGain();
this.input.connect(this.audioContext.destination);
// global tempo in bpm (beats per minute)
this.currentTempo = 60;
// instrument player for play note blocks
this.instrumentPlayer = new InstrumentPlayer(this.input);
this.instrumentPlayer = new InstrumentPlayer(this.audioContext);
this.instrumentPlayer.outputNode = this.input;
this.numInstruments = this.instrumentPlayer.instrumentNames.length;
// drum player for play drum blocks
this.drumPlayer = new DrumPlayer(this.input);
this.drumPlayer = new DrumPlayer(this.audioContext);
this.numDrums = this.drumPlayer.drumSounds.length;
// a map of md5s to audio buffers, holding sounds for all sprites
@ -169,7 +172,6 @@ class AudioEngine {
// microphone, for measuring loudness, with a level meter analyzer
this.mic = null;
this.micMeter = null;
}
/**
@ -197,14 +199,14 @@ class AudioEngine {
let loaderPromise = null;
// Make a copy of the buffer because decoding detaches the original buffer
var bufferCopy = sound.data.buffer.slice(0);
const bufferCopy = sound.data.buffer.slice(0);
switch (sound.format) {
case '':
loaderPromise = Tone.context.decodeAudioData(bufferCopy);
loaderPromise = this.audioContext.decodeAudioData(bufferCopy);
break;
case 'adpcm':
loaderPromise = (new ADPCMSoundDecoder()).decode(bufferCopy);
loaderPromise = (new ADPCMSoundDecoder(this.audioContext)).decode(bufferCopy);
break;
default:
return log.warn('unknown sound format', sound.format);
@ -213,7 +215,7 @@ class AudioEngine {
const storedContext = this;
return loaderPromise.then(
decodedAudio => {
storedContext.audioBuffers[sound.md5] = new Tone.Buffer(decodedAudio);
storedContext.audioBuffers[sound.md5] = decodedAudio;
},
error => {
log.warn('audio data could not be decoded', error);
@ -286,20 +288,51 @@ class AudioEngine {
/**
* Get the current loudness of sound received by the microphone.
* Sound is measured in RMS and smoothed.
* Some code adapted from Tone.js: https://github.com/Tonejs/Tone.js
* @return {number} loudness scaled 0 to 100
*/
getLoudness () {
if (!this.mic) {
this.mic = new Tone.UserMedia();
this.micMeter = new Tone.Meter('level', 0.5);
this.mic.open();
this.mic.connect(this.micMeter);
// The microphone has not been set up, so try to connect to it
if (!this.mic && !this.connectingToMic) {
this.connectingToMic = true; // prevent multiple connection attempts
navigator.mediaDevices.getUserMedia({audio: true}).then(stream => {
this.mic = this.audioContext.createMediaStreamSource(stream);
this.analyser = this.audioContext.createAnalyser();
this.mic.connect(this.analyser);
this.micDataArray = new Float32Array(this.analyser.fftSize);
})
.catch(err => {
log.warn(err);
});
}
if (this.mic && this.mic.state === 'started') {
return this.micMeter.value * 100;
}
return -1;
// If the microphone is set up and active, measure the loudness
if (this.mic && this.mic.mediaStream.active) {
this.analyser.getFloatTimeDomainData(this.micDataArray);
let sum = 0;
// compute the RMS of the sound
for (let i = 0; i < this.micDataArray.length; i++){
sum += Math.pow(this.micDataArray[i], 2);
}
let rms = Math.sqrt(sum / this.micDataArray.length);
// smooth the value, if it is descending
if (this._lastValue) {
rms = Math.max(rms, this._lastValue * 0.6);
}
this._lastValue = rms;
// Scale the measurement so it's more sensitive to quieter sounds
rms *= 1.63;
rms = Math.sqrt(rms);
// Scale it up to 0-100 and round
rms = Math.round(rms * 100);
// Prevent it from going above 100
rms = Math.min(rms, 100);
return rms;
}
// if there is no microphone input, return -1
return -1;
}
/**