Update to use ES6 class syntax

This commit is contained in:
Ray Schamp 2017-04-17 12:55:09 -04:00
parent 5007c43ddf
commit 9874cf006f
13 changed files with 1011 additions and 1026 deletions

View file

@ -8,161 +8,166 @@ const log = require('./log');
* formats, ADPCM is a non-standard format used by Scratch since its early days.
* This decoder is based on code from Scratch-Flash:
* https://github.com/LLK/scratch-flash/blob/master/src/sound/WAVFile.as
* @constructor
*/
const ADPCMSoundDecoder = function () {};
/**
* Decode an ADPCM sound stored in an ArrayBuffer and return a promise
* with the decoded audio buffer.
* @param {ArrayBuffer} audioData - containing ADPCM encoded wav audio
* @return {Tone.Buffer} the decoded audio buffer
*/
ADPCMSoundDecoder.prototype.decode = function (audioData) {
return new Promise((resolve, reject) => {
const stream = new ArrayBufferStream(audioData);
const riffStr = stream.readUint8String(4);
if (riffStr !== 'RIFF') {
log.warn('incorrect adpcm wav header');
reject();
}
const lengthInHeader = stream.readInt32();
if ((lengthInHeader + 8) !== audioData.byteLength) {
log.warn(`adpcm wav length in header: ${lengthInHeader} is incorrect`);
}
const wavStr = stream.readUint8String(4);
if (wavStr !== 'WAVE') {
log.warn('incorrect adpcm wav header');
reject();
}
const formatChunk = this.extractChunk('fmt ', stream);
this.encoding = formatChunk.readUint16();
this.channels = formatChunk.readUint16();
this.samplesPerSecond = formatChunk.readUint32();
this.bytesPerSecond = formatChunk.readUint32();
this.blockAlignment = formatChunk.readUint16();
this.bitsPerSample = formatChunk.readUint16();
formatChunk.position += 2; // skip extra header byte count
this.samplesPerBlock = formatChunk.readUint16();
this.adpcmBlockSize = ((this.samplesPerBlock - 1) / 2) + 4; // block size in bytes
const samples = this.imaDecompress(this.extractChunk('data', stream), this.adpcmBlockSize);
// @todo this line is the only place Tone is used here, should be possible to remove
const buffer = Tone.context.createBuffer(1, samples.length, this.samplesPerSecond);
// @todo optimize this? e.g. replace the divide by storing 1/32768 and multiply?
for (let i = 0; i < samples.length; i++) {
buffer.getChannelData(0)[i] = samples[i] / 32768;
}
resolve(buffer);
});
};
/**
* Data used by the decompression algorithm
* @type {Array}
*/
ADPCMSoundDecoder.prototype.stepTable = [
7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 19, 21, 23, 25, 28, 31, 34, 37, 41, 45,
50, 55, 60, 66, 73, 80, 88, 97, 107, 118, 130, 143, 157, 173, 190, 209, 230,
253, 279, 307, 337, 371, 408, 449, 494, 544, 598, 658, 724, 796, 876, 963,
1060, 1166, 1282, 1411, 1552, 1707, 1878, 2066, 2272, 2499, 2749, 3024, 3327,
3660, 4026, 4428, 4871, 5358, 5894, 6484, 7132, 7845, 8630, 9493, 10442, 11487,
12635, 13899, 15289, 16818, 18500, 20350, 22385, 24623, 27086, 29794, 32767];
/**
* Data used by the decompression algorithm
* @type {Array}
*/
ADPCMSoundDecoder.prototype.indexTable = [
-1, -1, -1, -1, 2, 4, 6, 8,
-1, -1, -1, -1, 2, 4, 6, 8];
/**
* Extract a chunk of audio data from the stream, consisting of a set of audio data bytes
* @param {string} chunkType - the type of chunk to extract. 'data' or 'fmt' (format)
* @param {ArrayBufferStream} stream - an stream containing the audio data
* @return {ArrayBufferStream} a stream containing the desired chunk
*/
ADPCMSoundDecoder.prototype.extractChunk = function (chunkType, stream) {
stream.position = 12;
while (stream.position < (stream.getLength() - 8)) {
const typeStr = stream.readUint8String(4);
const chunkSize = stream.readInt32();
if (typeStr === chunkType) {
const chunk = stream.extract(chunkSize);
return chunk;
}
stream.position += chunkSize;
class ADPCMSoundDecoder {
/**
* Data used by the decompression algorithm
* @type {Array}
*/
static get stepTable () {
return [
7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 19, 21, 23, 25, 28, 31, 34, 37, 41, 45,
50, 55, 60, 66, 73, 80, 88, 97, 107, 118, 130, 143, 157, 173, 190, 209, 230,
253, 279, 307, 337, 371, 408, 449, 494, 544, 598, 658, 724, 796, 876, 963,
1060, 1166, 1282, 1411, 1552, 1707, 1878, 2066, 2272, 2499, 2749, 3024, 3327,
3660, 4026, 4428, 4871, 5358, 5894, 6484, 7132, 7845, 8630, 9493, 10442, 11487,
12635, 13899, 15289, 16818, 18500, 20350, 22385, 24623, 27086, 29794, 32767
];
}
};
/**
* Decompress sample data using the IMA ADPCM algorithm.
* Note: Handles only one channel, 4-bits per sample.
* @param {ArrayBufferStream} compressedData - a stream of compressed audio samples
* @param {number} blockSize - the number of bytes in the stream
* @return {Int16Array} the uncompressed audio samples
*/
ADPCMSoundDecoder.prototype.imaDecompress = function (compressedData, blockSize) {
let sample;
let step;
let code;
let delta;
let index = 0;
let lastByte = -1; // -1 indicates that there is no saved lastByte
const out = [];
/**
* Data used by the decompression algorithm
* @type {Array}
*/
static get indexTable () {
return [
-1, -1, -1, -1, 2, 4, 6, 8,
-1, -1, -1, -1, 2, 4, 6, 8
];
}
// Bail and return no samples if we have no data
if (!compressedData) return out;
/**
* Decode an ADPCM sound stored in an ArrayBuffer and return a promise
* with the decoded audio buffer.
* @param {ArrayBuffer} audioData - containing ADPCM encoded wav audio
* @return {Tone.Buffer} the decoded audio buffer
*/
decode (audioData) {
compressedData.position = 0;
const a = 0;
while (a === 0) {
if (((compressedData.position % blockSize) === 0) && (lastByte < 0)) { // read block header
if (compressedData.getBytesAvailable() === 0) break;
sample = compressedData.readInt16();
index = compressedData.readUint8();
compressedData.position++; // skip extra header byte
if (index > 88) index = 88;
out.push(sample);
} else {
// read 4-bit code and compute delta from previous sample
if (lastByte < 0) {
if (compressedData.getBytesAvailable() === 0) break;
lastByte = compressedData.readUint8();
code = lastByte & 0xF;
} else {
code = (lastByte >> 4) & 0xF;
lastByte = -1;
return new Promise((resolve, reject) => {
const stream = new ArrayBufferStream(audioData);
const riffStr = stream.readUint8String(4);
if (riffStr !== 'RIFF') {
log.warn('incorrect adpcm wav header');
reject();
}
step = this.stepTable[index];
delta = 0;
if (code & 4) delta += step;
if (code & 2) delta += step >> 1;
if (code & 1) delta += step >> 2;
delta += step >> 3;
// compute next index
index += this.indexTable[code];
if (index > 88) index = 88;
if (index < 0) index = 0;
// compute and output sample
sample += (code & 8) ? -delta : delta;
if (sample > 32767) sample = 32767;
if (sample < -32768) sample = -32768;
out.push(sample);
const lengthInHeader = stream.readInt32();
if ((lengthInHeader + 8) !== audioData.byteLength) {
log.warn(`adpcm wav length in header: ${lengthInHeader} is incorrect`);
}
const wavStr = stream.readUint8String(4);
if (wavStr !== 'WAVE') {
log.warn('incorrect adpcm wav header');
reject();
}
const formatChunk = this.extractChunk('fmt ', stream);
this.encoding = formatChunk.readUint16();
this.channels = formatChunk.readUint16();
this.samplesPerSecond = formatChunk.readUint32();
this.bytesPerSecond = formatChunk.readUint32();
this.blockAlignment = formatChunk.readUint16();
this.bitsPerSample = formatChunk.readUint16();
formatChunk.position += 2; // skip extra header byte count
this.samplesPerBlock = formatChunk.readUint16();
this.adpcmBlockSize = ((this.samplesPerBlock - 1) / 2) + 4; // block size in bytes
const samples = this.imaDecompress(this.extractChunk('data', stream), this.adpcmBlockSize);
// @todo this line is the only place Tone is used here, should be possible to remove
const buffer = Tone.context.createBuffer(1, samples.length, this.samplesPerSecond);
// @todo optimize this? e.g. replace the divide by storing 1/32768 and multiply?
for (let i = 0; i < samples.length; i++) {
buffer.getChannelData(0)[i] = samples[i] / 32768;
}
resolve(buffer);
});
}
/**
* Extract a chunk of audio data from the stream, consisting of a set of audio data bytes
* @param {string} chunkType - the type of chunk to extract. 'data' or 'fmt' (format)
* @param {ArrayBufferStream} stream - an stream containing the audio data
* @return {ArrayBufferStream} a stream containing the desired chunk
*/
extractChunk (chunkType, stream) {
stream.position = 12;
while (stream.position < (stream.getLength() - 8)) {
const typeStr = stream.readUint8String(4);
const chunkSize = stream.readInt32();
if (typeStr === chunkType) {
const chunk = stream.extract(chunkSize);
return chunk;
}
stream.position += chunkSize;
}
}
const samples = Int16Array.from(out);
return samples;
};
/**
* Decompress sample data using the IMA ADPCM algorithm.
* Note: Handles only one channel, 4-bits per sample.
* @param {ArrayBufferStream} compressedData - a stream of compressed audio samples
* @param {number} blockSize - the number of bytes in the stream
* @return {Int16Array} the uncompressed audio samples
*/
imaDecompress (compressedData, blockSize) {
let sample;
let step;
let code;
let delta;
let index = 0;
let lastByte = -1; // -1 indicates that there is no saved lastByte
const out = [];
// Bail and return no samples if we have no data
if (!compressedData) return out;
compressedData.position = 0;
const a = 0;
while (a === 0) {
if (((compressedData.position % blockSize) === 0) && (lastByte < 0)) { // read block header
if (compressedData.getBytesAvailable() === 0) break;
sample = compressedData.readInt16();
index = compressedData.readUint8();
compressedData.position++; // skip extra header byte
if (index > 88) index = 88;
out.push(sample);
} else {
// read 4-bit code and compute delta from previous sample
if (lastByte < 0) {
if (compressedData.getBytesAvailable() === 0) break;
lastByte = compressedData.readUint8();
code = lastByte & 0xF;
} else {
code = (lastByte >> 4) & 0xF;
lastByte = -1;
}
step = this.stepTable[index];
delta = 0;
if (code & 4) delta += step;
if (code & 2) delta += step >> 1;
if (code & 1) delta += step >> 2;
delta += step >> 3;
// compute next index
index += this.indexTable[code];
if (index > 88) index = 88;
if (index < 0) index = 0;
// compute and output sample
sample += (code & 8) ? -delta : delta;
if (sample > 32767) sample = 32767;
if (sample < -32768) sample = -32768;
out.push(sample);
}
}
const samples = Int16Array.from(out);
return samples;
}
}
module.exports = ADPCMSoundDecoder;

View file

@ -1,107 +1,109 @@
/**
* ArrayBufferStream wraps the built-in javascript ArrayBuffer, adding the ability to access
* data in it like a stream, tracking its position.
* You can request to read a value from the front of the array, and it will keep track of the position
* within the byte array, so that successive reads are consecutive.
* The available types to read include:
* Uint8, Uint8String, Int16, Uint16, Int32, Uint32
* @param {ArrayBuffer} arrayBuffer - array to use as a stream
* @constructor
*/
const ArrayBufferStream = function (arrayBuffer) {
this.arrayBuffer = arrayBuffer;
this.position = 0;
};
/**
* Return a new ArrayBufferStream that is a slice of the existing one
* @param {number} length - the number of bytes of extract
* @return {ArrayBufferStream} the extracted stream
*/
ArrayBufferStream.prototype.extract = function (length) {
const slicedArrayBuffer = this.arrayBuffer.slice(this.position, this.position + length);
const newStream = new ArrayBufferStream(slicedArrayBuffer);
return newStream;
};
/**
* @return {number} the length of the stream in bytes
*/
ArrayBufferStream.prototype.getLength = function () {
return this.arrayBuffer.byteLength;
};
/**
* @return {number} the number of bytes available after the current position in the stream
*/
ArrayBufferStream.prototype.getBytesAvailable = function () {
return (this.arrayBuffer.byteLength - this.position);
};
/**
* Read an unsigned 8 bit integer from the stream
* @return {number} the next 8 bit integer in the stream
*/
ArrayBufferStream.prototype.readUint8 = function () {
const val = new Uint8Array(this.arrayBuffer, this.position, 1)[0];
this.position += 1;
return val;
};
/**
* Read a sequence of bytes of the given length and convert to a string.
* This is a convenience method for use with short strings.
* @param {number} length - the number of bytes to convert
* @return {string} a String made by concatenating the chars in the input
*/
ArrayBufferStream.prototype.readUint8String = function (length) {
const arr = new Uint8Array(this.arrayBuffer, this.position, length);
this.position += length;
let str = '';
for (let i = 0; i < arr.length; i++) {
str += String.fromCharCode(arr[i]);
class ArrayBufferStream {
/**
* ArrayBufferStream wraps the built-in javascript ArrayBuffer, adding the ability to access
* data in it like a stream, tracking its position.
* You can request to read a value from the front of the array, and it will keep track of the position
* within the byte array, so that successive reads are consecutive.
* The available types to read include:
* Uint8, Uint8String, Int16, Uint16, Int32, Uint32
* @param {ArrayBuffer} arrayBuffer - array to use as a stream
* @constructor
*/
constructor (arrayBuffer) {
this.arrayBuffer = arrayBuffer;
this.position = 0;
}
return str;
};
/**
* Read a 16 bit integer from the stream
* @return {number} the next 16 bit integer in the stream
*/
ArrayBufferStream.prototype.readInt16 = function () {
const val = new Int16Array(this.arrayBuffer, this.position, 1)[0];
this.position += 2; // one 16 bit int is 2 bytes
return val;
};
/**
* Return a new ArrayBufferStream that is a slice of the existing one
* @param {number} length - the number of bytes of extract
* @return {ArrayBufferStream} the extracted stream
*/
extract (length) {
const slicedArrayBuffer = this.arrayBuffer.slice(this.position, this.position + length);
const newStream = new ArrayBufferStream(slicedArrayBuffer);
return newStream;
}
/**
* Read an unsigned 16 bit integer from the stream
* @return {number} the next unsigned 16 bit integer in the stream
*/
ArrayBufferStream.prototype.readUint16 = function () {
const val = new Uint16Array(this.arrayBuffer, this.position, 1)[0];
this.position += 2; // one 16 bit int is 2 bytes
return val;
};
/**
* @return {number} the length of the stream in bytes
*/
getLength () {
return this.arrayBuffer.byteLength;
}
/**
* Read a 32 bit integer from the stream
* @return {number} the next 32 bit integer in the stream
*/
ArrayBufferStream.prototype.readInt32 = function () {
const val = new Int32Array(this.arrayBuffer, this.position, 1)[0];
this.position += 4; // one 32 bit int is 4 bytes
return val;
};
/**
* @return {number} the number of bytes available after the current position in the stream
*/
getBytesAvailable () {
return (this.arrayBuffer.byteLength - this.position);
}
/**
* Read an unsigned 32 bit integer from the stream
* @return {number} the next unsigned 32 bit integer in the stream
*/
ArrayBufferStream.prototype.readUint32 = function () {
const val = new Uint32Array(this.arrayBuffer, this.position, 1)[0];
this.position += 4; // one 32 bit int is 4 bytes
return val;
};
/**
* Read an unsigned 8 bit integer from the stream
* @return {number} the next 8 bit integer in the stream
*/
readUint8 () {
const val = new Uint8Array(this.arrayBuffer, this.position, 1)[0];
this.position += 1;
return val;
}
/**
* Read a sequence of bytes of the given length and convert to a string.
* This is a convenience method for use with short strings.
* @param {number} length - the number of bytes to convert
* @return {string} a String made by concatenating the chars in the input
*/
readUint8String (length) {
const arr = new Uint8Array(this.arrayBuffer, this.position, length);
this.position += length;
let str = '';
for (let i = 0; i < arr.length; i++) {
str += String.fromCharCode(arr[i]);
}
return str;
}
/**
* Read a 16 bit integer from the stream
* @return {number} the next 16 bit integer in the stream
*/
readInt16 () {
const val = new Int16Array(this.arrayBuffer, this.position, 1)[0];
this.position += 2; // one 16 bit int is 2 bytes
return val;
}
/**
* Read an unsigned 16 bit integer from the stream
* @return {number} the next unsigned 16 bit integer in the stream
*/
readUint16 () {
const val = new Uint16Array(this.arrayBuffer, this.position, 1)[0];
this.position += 2; // one 16 bit int is 2 bytes
return val;
}
/**
* Read a 32 bit integer from the stream
* @return {number} the next 32 bit integer in the stream
*/
readInt32 () {
const val = new Int32Array(this.arrayBuffer, this.position, 1)[0];
this.position += 4; // one 32 bit int is 4 bytes
return val;
}
/**
* Read an unsigned 32 bit integer from the stream
* @return {number} the next unsigned 32 bit integer in the stream
*/
readUint32 () {
const val = new Uint32Array(this.arrayBuffer, this.position, 1)[0];
this.position += 4; // one 32 bit int is 4 bytes
return val;
}
}
module.exports = ArrayBufferStream;

View file

@ -1,64 +1,66 @@
const SoundPlayer = require('./SoundPlayer');
const Tone = require('tone');
/**
* A prototype for the drum sound functionality that can load drum sounds, play, and stop them.
* @param {Tone.Gain} outputNode - a webAudio node that the drum sounds will send their output to
* @constructor
*/
const DrumPlayer = function (outputNode) {
this.outputNode = outputNode;
class DrumPlayer {
/**
* A prototype for the drum sound functionality that can load drum sounds, play, and stop them.
* @param {Tone.Gain} outputNode - a webAudio node that the drum sounds will send their output to
* @constructor
*/
constructor (outputNode) {
this.outputNode = outputNode;
const baseUrl = 'https://raw.githubusercontent.com/LLK/scratch-audio/develop/sound-files/drums/';
const fileNames = [
'SnareDrum(1)',
'BassDrum(1b)',
'SideStick(1)',
'Crash(2)',
'HiHatOpen(2)',
'HiHatClosed(1)',
'Tambourine(3)',
'Clap(1)',
'Claves(1)',
'WoodBlock(1)',
'Cowbell(3)',
'Triangle(1)',
'Bongo',
'Conga(1)',
'Cabasa(1)',
'GuiroLong(1)',
'Vibraslap(1)',
'Cuica(2)'
];
const baseUrl = 'https://raw.githubusercontent.com/LLK/scratch-audio/develop/sound-files/drums/';
const fileNames = [
'SnareDrum(1)',
'BassDrum(1b)',
'SideStick(1)',
'Crash(2)',
'HiHatOpen(2)',
'HiHatClosed(1)',
'Tambourine(3)',
'Clap(1)',
'Claves(1)',
'WoodBlock(1)',
'Cowbell(3)',
'Triangle(1)',
'Bongo',
'Conga(1)',
'Cabasa(1)',
'GuiroLong(1)',
'Vibraslap(1)',
'Cuica(2)'
];
this.drumSounds = [];
this.drumSounds = [];
for (let i = 0; i < fileNames.length; i++) {
const url = `${baseUrl + fileNames[i]}_22k.wav`;
this.drumSounds[i] = new SoundPlayer(this.outputNode);
this.drumSounds[i].setBuffer(new Tone.Buffer(url));
for (let i = 0; i < fileNames.length; i++) {
const url = `${baseUrl + fileNames[i]}_22k.wav`;
this.drumSounds[i] = new SoundPlayer(this.outputNode);
this.drumSounds[i].setBuffer(new Tone.Buffer(url));
}
}
};
/**
* Play a drum sound.
* The parameter for output node allows sprites or clones to send the drum sound
* to their individual audio effect chains.
* @param {number} drum - the drum number to play (0-indexed)
* @param {Tone.Gain} outputNode - a node to send the output to
*/
DrumPlayer.prototype.play = function (drum, outputNode) {
this.drumSounds[drum].outputNode = outputNode;
this.drumSounds[drum].start();
};
/**
* Stop all drum sounds.
*/
DrumPlayer.prototype.stopAll = function () {
for (let i = 0; i < this.drumSounds.length; i++) {
this.drumSounds[i].stop();
/**
* Play a drum sound.
* The parameter for output node allows sprites or clones to send the drum sound
* to their individual audio effect chains.
* @param {number} drum - the drum number to play (0-indexed)
* @param {Tone.Gain} outputNode - a node to send the output to
*/
play (drum, outputNode) {
this.drumSounds[drum].outputNode = outputNode;
this.drumSounds[drum].start();
}
};
/**
* Stop all drum sounds.
*/
stopAll () {
for (let i = 0; i < this.drumSounds.length; i++) {
this.drumSounds[i].stop();
}
}
}
module.exports = DrumPlayer;

View file

@ -1,80 +1,82 @@
const Tone = require('tone');
const Soundfont = require('soundfont-player');
/**
* A prototype for the instrument sound functionality that can play notes.
* This prototype version (which will be replaced at some point) uses an
* existing soundfont library that creates several limitations:
* The sound files are high quality but large, so they are loaded 'on demand,' at the time the
* play note or set instrument block runs, causing a delay of a few seconds.
* Using this library we don't have a way to set the volume, sustain the note beyond the sample
* duration, or run it through the sprite-specific audio effects.
* @param {Tone.Gain} outputNode - a webAudio node that the instrument will send its output to
* @constructor
*/
const InstrumentPlayer = function (outputNode) {
this.outputNode = outputNode;
class InstrumentPlayer {
/**
* A prototype for the instrument sound functionality that can play notes.
* This prototype version (which will be replaced at some point) uses an
* existing soundfont library that creates several limitations:
* The sound files are high quality but large, so they are loaded 'on demand,' at the time the
* play note or set instrument block runs, causing a delay of a few seconds.
* Using this library we don't have a way to set the volume, sustain the note beyond the sample
* duration, or run it through the sprite-specific audio effects.
* @param {Tone.Gain} outputNode - a webAudio node that the instrument will send its output to
* @constructor
*/
constructor (outputNode) {
this.outputNode = outputNode;
// Instrument names used by Musyng Kite soundfont, in order to
// match scratch instruments
this.instrumentNames = ['acoustic_grand_piano', 'electric_piano_1',
'drawbar_organ', 'acoustic_guitar_nylon', 'electric_guitar_clean',
'acoustic_bass', 'pizzicato_strings', 'cello', 'trombone', 'clarinet',
'tenor_sax', 'flute', 'pan_flute', 'bassoon', 'choir_aahs', 'vibraphone',
'music_box', 'steel_drums', 'marimba', 'lead_1_square', 'fx_4_atmosphere'];
// Instrument names used by Musyng Kite soundfont, in order to
// match scratch instruments
this.instrumentNames = ['acoustic_grand_piano', 'electric_piano_1',
'drawbar_organ', 'acoustic_guitar_nylon', 'electric_guitar_clean',
'acoustic_bass', 'pizzicato_strings', 'cello', 'trombone', 'clarinet',
'tenor_sax', 'flute', 'pan_flute', 'bassoon', 'choir_aahs', 'vibraphone',
'music_box', 'steel_drums', 'marimba', 'lead_1_square', 'fx_4_atmosphere'];
this.instruments = [];
};
/**
* Play a note for some number of seconds with a particular instrument.
* Load the instrument first, if it has not already been loaded.
* The duration is in seconds because the AudioEngine manages the tempo,
* and converts beats to seconds.
* @param {number} note - a MIDI note number
* @param {number} sec - a duration in seconds
* @param {number} instrumentNum - an instrument number (0-indexed)
* @param {number} vol - a volume level (0-100%)
*/
InstrumentPlayer.prototype.playNoteForSecWithInstAndVol = function (note, sec, instrumentNum, vol) {
const gain = vol / 100;
this.loadInstrument(instrumentNum)
.then(() => {
this.instruments[instrumentNum].play(
note, Tone.context.currentTime, {
duration: sec,
gain: gain
}
);
});
};
/**
* Load an instrument by number
* @param {number} instrumentNum - an instrument number (0-indexed)
* @return {Promise} a Promise that resolves once the instrument audio data has been loaded
*/
InstrumentPlayer.prototype.loadInstrument = function (instrumentNum) {
if (this.instruments[instrumentNum]) {
return Promise.resolve();
this.instruments = [];
}
return Soundfont.instrument(Tone.context, this.instrumentNames[instrumentNum])
.then(inst => {
inst.connect(this.outputNode);
this.instruments[instrumentNum] = inst;
/**
* Play a note for some number of seconds with a particular instrument.
* Load the instrument first, if it has not already been loaded.
* The duration is in seconds because the AudioEngine manages the tempo,
* and converts beats to seconds.
* @param {number} note - a MIDI note number
* @param {number} sec - a duration in seconds
* @param {number} instrumentNum - an instrument number (0-indexed)
* @param {number} vol - a volume level (0-100%)
*/
playNoteForSecWithInstAndVol (note, sec, instrumentNum, vol) {
const gain = vol / 100;
this.loadInstrument(instrumentNum)
.then(() => {
this.instruments[instrumentNum].play(
note, Tone.context.currentTime, {
duration: sec,
gain: gain
}
);
});
}
};
/**
* Load an instrument by number
* @param {number} instrumentNum - an instrument number (0-indexed)
* @return {Promise} a Promise that resolves once the instrument audio data has been loaded
*/
loadInstrument (instrumentNum) {
if (this.instruments[instrumentNum]) {
return Promise.resolve();
}
return Soundfont.instrument(Tone.context, this.instrumentNames[instrumentNum])
.then(inst => {
inst.connect(this.outputNode);
this.instruments[instrumentNum] = inst;
});
/**
* Stop all notes being played on all instruments
*/
InstrumentPlayer.prototype.stopAll = function () {
for (let i = 0; i < this.instruments.length; i++) {
if (this.instruments[i]) {
this.instruments[i].stop();
}
/**
* Stop all notes being played on all instruments
*/
stopAll () {
for (let i = 0; i < this.instruments.length; i++) {
if (this.instruments[i]) {
this.instruments[i].stop();
}
}
}
};
}
module.exports = InstrumentPlayer;

View file

@ -3,84 +3,85 @@ const log = require('./log');
/**
* A SoundPlayer stores an audio buffer, and plays it
* @constructor
*/
const SoundPlayer = function () {
this.outputNode = null;
this.buffer = new Tone.Buffer();
this.bufferSource = null;
this.playbackRate = 1;
this.isPlaying = false;
};
class SoundPlayer {
constructor () {
this.outputNode = null;
this.buffer = new Tone.Buffer();
this.bufferSource = null;
this.playbackRate = 1;
this.isPlaying = false;
}
/**
* Connect the SoundPlayer to an output node
* @param {Tone.Gain} node - an output node to connect to
*/
SoundPlayer.prototype.connect = function (node) {
this.outputNode = node;
};
/**
* Connect the SoundPlayer to an output node
* @param {Tone.Gain} node - an output node to connect to
*/
connect (node) {
this.outputNode = node;
}
/**
* Set an audio buffer
* @param {Tone.Buffer} buffer Buffer to set
*/
SoundPlayer.prototype.setBuffer = function (buffer) {
this.buffer = buffer;
};
/**
* Set an audio buffer
* @param {Tone.Buffer} buffer Buffer to set
*/
setBuffer (buffer) {
this.buffer = buffer;
}
/**
* Set the playback rate for the sound
* @param {number} playbackRate - a ratio where 1 is normal playback, 0.5 is half speed, 2 is double speed, etc.
*/
SoundPlayer.prototype.setPlaybackRate = function (playbackRate) {
this.playbackRate = playbackRate;
if (this.bufferSource && this.bufferSource.playbackRate) {
/**
* Set the playback rate for the sound
* @param {number} playbackRate - a ratio where 1 is normal playback, 0.5 is half speed, 2 is double speed, etc.
*/
setPlaybackRate (playbackRate) {
this.playbackRate = playbackRate;
if (this.bufferSource && this.bufferSource.playbackRate) {
this.bufferSource.playbackRate.value = this.playbackRate;
}
}
/**
* Stop the sound
*/
stop () {
if (this.bufferSource) {
this.bufferSource.stop();
}
this.isPlaying = false;
}
/**
* Start playing the sound
* The web audio framework requires a new audio buffer source node for each playback
*/
start () {
if (!this.buffer || !this.buffer.loaded) {
log.warn('tried to play a sound that was not loaded yet');
return;
}
this.bufferSource = new Tone.BufferSource(this.buffer.get());
this.bufferSource.playbackRate.value = this.playbackRate;
}
};
this.bufferSource.connect(this.outputNode);
this.bufferSource.start();
/**
* Stop the sound
*/
SoundPlayer.prototype.stop = function () {
if (this.bufferSource) {
this.bufferSource.stop();
}
this.isPlaying = false;
};
/**
* Start playing the sound
* The web audio framework requires a new audio buffer source node for each playback
*/
SoundPlayer.prototype.start = function () {
if (!this.buffer || !this.buffer.loaded) {
log.warn('tried to play a sound that was not loaded yet');
return;
this.isPlaying = true;
}
this.bufferSource = new Tone.BufferSource(this.buffer.get());
this.bufferSource.playbackRate.value = this.playbackRate;
this.bufferSource.connect(this.outputNode);
this.bufferSource.start();
this.isPlaying = true;
};
/**
* The sound has finished playing. This is called at the correct time even if the playback rate
* has been changed
* @return {Promise} a Promise that resolves when the sound finishes playing
*/
SoundPlayer.prototype.finished = function () {
const storedContext = this;
return new Promise(resolve => {
storedContext.bufferSource.onended = function () {
this.isPlaying = false;
resolve();
}.bind(storedContext);
});
};
/**
* The sound has finished playing. This is called at the correct time even if the playback rate
* has been changed
* @return {Promise} a Promise that resolves when the sound finishes playing
*/
finished () {
const storedContext = this;
return new Promise(resolve => {
storedContext.bufferSource.onended = function () {
this.isPlaying = false;
resolve();
}.bind(storedContext);
});
}
}
module.exports = SoundPlayer;

View file

@ -6,57 +6,51 @@ const Tone = require('tone');
* Values up to 100 set the echo feedback amount,
* increasing the time it takes the echo to fade away
* Clamped 0-100
* @constructor
*/
const EchoEffect = function () {
Tone.Effect.call(this);
this.value = 0;
this.delay = new Tone.FeedbackDelay(0.25, 0.5);
this.effectSend.chain(this.delay, this.effectReturn);
};
Tone.extend(EchoEffect, Tone.Effect);
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
EchoEffect.prototype.set = function (val) {
this.value = val;
this.value = this.clamp(this.value, 0, 100);
// mute the effect if value is 0
if (this.value === 0) {
this.wet.value = 0;
} else {
this.wet.value = 0.5;
class EchoEffect extends Tone.Effect {
constructor () {
super();
this.value = 0;
this.delay = new Tone.FeedbackDelay(0.25, 0.5);
this.effectSend.chain(this.delay, this.effectReturn);
}
const feedback = (this.value / 100) * 0.75;
this.delay.feedback.rampTo(feedback, 1 / 60);
};
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
set (val) {
this.value = this.clamp(val, 0, 100);
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
EchoEffect.prototype.changeBy = function (val) {
this.set(this.value + val);
};
// mute the effect if value is 0
if (this.value === 0) {
this.wet.value = 0;
} else {
this.wet.value = 0.5;
}
/**
* Clamp the input to a range
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
EchoEffect.prototype.clamp = function (input, min, max) {
return Math.min(Math.max(input, min), max);
};
const feedback = (this.value / 100) * 0.75;
this.delay.feedback.rampTo(feedback, 1 / 60);
}
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
changeBy (val) {
this.set(this.value + val);
}
/**
* Clamp the input to a range
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
clamp (input, min, max) {
return Math.min(Math.max(input, min), max);
}
}
module.exports = EchoEffect;

View file

@ -5,48 +5,41 @@ const Tone = require('tone');
* Effect value controls the wet/dry amount:
* 0 passes through none of the effect, 100 passes through all effect
* Clamped 0-100
* @constructor
*/
const FuzzEffect = function () {
Tone.Effect.call(this);
class FuzzEffect extends Tone.Effect {
constructor () {
super();
this.value = 0;
this.distortion = new Tone.Distortion(1);
this.effectSend.chain(this.distortion, this.effectReturn);
}
this.value = 0;
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
set (val) {
this.value = this.clamp(val, 0, 100);
this.distortion.wet.value = this.value / 100;
}
this.distortion = new Tone.Distortion(1);
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
changeBy (val) {
this.set(this.value + val);
}
this.effectSend.chain(this.distortion, this.effectReturn);
};
Tone.extend(FuzzEffect, Tone.Effect);
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
FuzzEffect.prototype.set = function (val) {
this.value = val;
this.value = this.clamp(this.value, 0, 100);
this.distortion.wet.value = this.value / 100;
};
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
FuzzEffect.prototype.changeBy = function (val) {
this.set(this.value + val);
};
/**
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
FuzzEffect.prototype.clamp = function (input, min, max) {
return Math.min(Math.max(input, min), max);
};
/**
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
clamp (input, min, max) {
return Math.min(Math.max(input, min), max);
}
}
module.exports = FuzzEffect;

View file

@ -5,49 +5,42 @@ const Tone = require('tone');
* Effect value of -100 puts the audio entirely on the left channel,
* 0 centers it, 100 puts it on the right.
* Clamped -100 to 100
* @constructor
*/
const PanEffect = function () {
Tone.Effect.call(this);
class PanEffect extends Tone.Effect {
constructor () {
super();
this.value = 0;
this.panner = new Tone.Panner();
this.effectSend.chain(this.panner, this.effectReturn);
}
this.value = 0;
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
set (val) {
this.value = this.clamp(val, -100, 100);
this.panner.pan.value = this.value / 100;
}
this.panner = new Tone.Panner();
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
changeBy (val) {
this.set(this.value + val);
}
this.effectSend.chain(this.panner, this.effectReturn);
};
Tone.extend(PanEffect, Tone.Effect);
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
PanEffect.prototype.set = function (val) {
this.value = val;
this.value = this.clamp(this.value, -100, 100);
this.panner.pan.value = this.value / 100;
};
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
PanEffect.prototype.changeBy = function (val) {
this.set(this.value + val);
};
/**
* Clamp the input to a range
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
PanEffect.prototype.clamp = function (input, min, max) {
return Math.min(Math.max(input, min), max);
};
/**
* Clamp the input to a range
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
clamp (input, min, max) {
return Math.min(Math.max(input, min), max);
}
}
module.exports = PanEffect;

View file

@ -16,66 +16,66 @@ const Tone = require('tone');
* Note that this effect functions differently from the other audio effects. It is
* not part of a chain of audio nodes. Instead, it provides a way to set the playback
* on one SoundPlayer or a group of them.
* @constructor
*/
const PitchEffect = function () {
this.value = 0; // effect value
this.ratio = 1; // the playback rate ratio
class PitchEffect {
constructor () {
this.value = 0; // effect value
this.ratio = 1; // the playback rate ratio
this.tone = new Tone();
}
this.tone = new Tone();
};
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
* @param {object} players - a dictionary of SoundPlayer objects to apply the effect to, indexed by md5
*/
set (val, players) {
this.value = val;
this.ratio = this.getRatio(this.value);
this.updatePlayers(players);
}
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
* @param {object} players - a dictionary of SoundPlayer objects to apply the effect to, indexed by md5
*/
PitchEffect.prototype.set = function (val, players) {
this.value = val;
this.ratio = this.getRatio(this.value);
this.updatePlayers(players);
};
/**
* Change the effect value
* @param {number} val - the value to change the effect by
* @param {object} players - a dictionary of SoundPlayer objects indexed by md5
*/
changeBy (val, players) {
this.set(this.value + val, players);
}
/**
* Change the effect value
* @param {number} val - the value to change the effect by
* @param {object} players - a dictionary of SoundPlayer objects indexed by md5
*/
PitchEffect.prototype.changeBy = function (val, players) {
this.set(this.value + val, players);
};
/**
* Compute the playback ratio for an effect value.
* The playback ratio is scaled so that a change of 10 in the effect value
* gives a change of 1 semitone in the ratio.
* @param {number} val - an effect value
* @returns {number} a playback ratio
*/
getRatio (val) {
return this.tone.intervalToFrequencyRatio(val / 10);
}
/**
* Compute the playback ratio for an effect value.
* The playback ratio is scaled so that a change of 10 in the effect value
* gives a change of 1 semitone in the ratio.
* @param {number} val - an effect value
* @returns {number} a playback ratio
*/
PitchEffect.prototype.getRatio = function (val) {
return this.tone.intervalToFrequencyRatio(val / 10);
};
/**
* Update a sound player's playback rate using the current ratio for the effect
* @param {object} player - a SoundPlayer object
*/
updatePlayer (player) {
player.setPlaybackRate(this.ratio);
}
/**
* Update a sound player's playback rate using the current ratio for the effect
* @param {object} player - a SoundPlayer object
*/
PitchEffect.prototype.updatePlayer = function (player) {
player.setPlaybackRate(this.ratio);
};
/**
* Update a sound player's playback rate using the current ratio for the effect
* @param {object} players - a dictionary of SoundPlayer objects to update, indexed by md5
*/
updatePlayers (players) {
if (!players) return;
/**
* Update a sound player's playback rate using the current ratio for the effect
* @param {object} players - a dictionary of SoundPlayer objects to update, indexed by md5
*/
PitchEffect.prototype.updatePlayers = function (players) {
if (!players) return;
for (const md5 in players) {
if (players.hasOwnProperty(md5)) {
this.updatePlayer(players[md5]);
for (const md5 in players) {
if (players.hasOwnProperty(md5)) {
this.updatePlayer(players[md5]);
}
}
}
};
}
module.exports = PitchEffect;

View file

@ -5,49 +5,42 @@ const Tone = require('tone');
* Effect value controls the wet/dry amount:
* 0 passes through none of the effect, 100 passes through all effect
* Clamped 0 to 100
* @constructor
*/
const ReverbEffect = function () {
Tone.Effect.call(this);
class ReverbEffect extends Tone.Effect {
constructor () {
super();
this.value = 0;
this.reverb = new Tone.Freeverb();
this.effectSend.chain(this.reverb, this.effectReturn);
}
this.value = 0;
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
set (val) {
this.value = this.clamp(val, 0, 100);
this.reverb.wet.value = this.value / 100;
}
this.reverb = new Tone.Freeverb();
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
changeBy (val) {
this.set(this.value + val);
}
this.effectSend.chain(this.reverb, this.effectReturn);
};
Tone.extend(ReverbEffect, Tone.Effect);
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
ReverbEffect.prototype.set = function (val) {
this.value = val;
this.value = this.clamp(this.value, 0, 100);
this.reverb.wet.value = this.value / 100;
};
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
ReverbEffect.prototype.changeBy = function (val) {
this.set(this.value + val);
};
/**
* Clamp the input to a range
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
ReverbEffect.prototype.clamp = function (input, min, max) {
return Math.min(Math.max(input, min), max);
};
/**
* Clamp the input to a range
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
clamp (input, min, max) {
return Math.min(Math.max(input, min), max);
}
}
module.exports = ReverbEffect;

View file

@ -9,59 +9,58 @@ const Tone = require('tone');
* Other values change the pitch of the effect, in units of 10 steps per semitone.
* The effect value is not clamped (but probably should be).
* Exterminate.
* @constructor
*/
const RoboticEffect = function () {
Tone.Effect.call(this);
class RoboticEffect extends Tone.Effect {
constructor () {
super();
this.value = 0;
this.value = 0;
const time = this._delayTimeForValue(100);
this.feedbackCombFilter = new Tone.FeedbackCombFilter(time, 0.9);
const time = this._delayTimeForValue(100);
this.feedbackCombFilter = new Tone.FeedbackCombFilter(time, 0.9);
this.effectSend.chain(this.feedbackCombFilter, this.effectReturn);
};
Tone.extend(RoboticEffect, Tone.Effect);
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
RoboticEffect.prototype.set = function (val) {
this.value = val;
// mute the effect if value is 0
if (this.value === 0) {
this.wet.value = 0;
} else {
this.wet.value = 1;
this.effectSend.chain(this.feedbackCombFilter, this.effectReturn);
}
// set delay time using the value
const time = this._delayTimeForValue(this.value);
this.feedbackCombFilter.delayTime.rampTo(time, 1 / 60);
};
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
set (val) {
this.value = val;
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
RoboticEffect.prototype.changeBy = function (val) {
this.set(this.value + val);
};
// mute the effect if value is 0
if (this.value === 0) {
this.wet.value = 0;
} else {
this.wet.value = 1;
}
/**
* Compute the delay time for an effect value.
* Convert the effect value to a musical note (in units of 10 per semitone),
* and return the period (single cycle duration) of the frequency of that note.
* @param {number} val - the effect value
* @returns {number} a delay time in seconds
*/
RoboticEffect.prototype._delayTimeForValue = function (val) {
const midiNote = ((val - 100) / 10) + 36;
const freq = Tone.Frequency(midiNote, 'midi').eval();
return 1 / freq;
};
// set delay time using the value
const time = this._delayTimeForValue(this.value);
this.feedbackCombFilter.delayTime.rampTo(time, 1 / 60);
}
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
changeBy (val) {
this.set(this.value + val);
}
/**
* Compute the delay time for an effect value.
* Convert the effect value to a musical note (in units of 10 per semitone),
* and return the period (single cycle duration) of the frequency of that note.
* @param {number} val - the effect value
* @returns {number} a delay time in seconds
*/
_delayTimeForValue (val) {
const midiNote = ((val - 100) / 10) + 36;
const freq = Tone.Frequency(midiNote, 'midi').eval();
return 1 / freq;
}
}
module.exports = RoboticEffect;

View file

@ -9,53 +9,52 @@ const Tone = require('tone');
* 0 passes through none of the effect, 100 passes through all effect
* Effect value also controls the frequency of the LFO.
* Clamped 0 to 100
* @constructor
*/
const WobbleEffect = function () {
Tone.Effect.call(this);
class WobbleEffect extends Tone.Effect {
constructor () {
super();
this.value = 0;
this.value = 0;
this.wobbleLFO = new Tone.LFO(10, 0, 1).start();
this.wobbleGain = new Tone.Gain();
this.wobbleLFO.connect(this.wobbleGain.gain);
this.wobbleLFO = new Tone.LFO(10, 0, 1).start();
this.wobbleGain = new Tone.Gain();
this.wobbleLFO.connect(this.wobbleGain.gain);
this.effectSend.chain(this.wobbleGain, this.effectReturn);
};
this.effectSend.chain(this.wobbleGain, this.effectReturn);
}
Tone.extend(WobbleEffect, Tone.Effect);
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
set (val) {
this.value = val;
/**
* Set the effect value
* @param {number} val - the new value to set the effect to
*/
WobbleEffect.prototype.set = function (val) {
this.value = val;
this.value = this.clamp(this.value, 0, 100);
this.value = this.clamp(this.value, 0, 100);
this.wet.value = this.value / 100;
this.wet.value = this.value / 100;
this.wobbleLFO.frequency.rampTo(this.value / 10, 1 / 60);
}
this.wobbleLFO.frequency.rampTo(this.value / 10, 1 / 60);
};
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
changeBy (val) {
this.set(this.value + val);
}
/**
* Change the effect value
* @param {number} val - the value to change the effect by
*/
WobbleEffect.prototype.changeBy = function (val) {
this.set(this.value + val);
};
/**
* Clamp the input to a range
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
WobbleEffect.prototype.clamp = function (input, min, max) {
return Math.min(Math.max(input, min), max);
};
/**
* Clamp the input to a range
* @param {number} input - the input to clamp
* @param {number} min - the min value to clamp to
* @param {number} max - the max value to clamp to
* @return {number} the clamped value
*/
clamp (input, min, max) {
return Math.min(Math.max(input, min), max);
}
}
module.exports = WobbleEffect;

View file

@ -19,329 +19,331 @@ const DrumPlayer = require('./DrumPlayer');
* that handles global functionality, and AudioPlayers, belonging to individual sprites and clones.
*/
/**
* Each sprite or clone has an audio player
* the audio player handles sound playback, volume, and the sprite-specific audio effects:
* pitch and pan
* @param {AudioEngine} audioEngine AudioEngine for player
* @constructor
*/
const AudioPlayer = function (audioEngine) {
class AudioPlayer {
/**
* Each sprite or clone has an audio player
* the audio player handles sound playback, volume, and the sprite-specific audio effects:
* pitch and pan
* @param {AudioEngine} audioEngine AudioEngine for player
* @constructor
*/
constructor (audioEngine) {
this.audioEngine = audioEngine;
this.audioEngine = audioEngine;
// effects setup
this.pitchEffect = new PitchEffect();
this.panEffect = new PanEffect();
// effects setup
this.pitchEffect = new PitchEffect();
this.panEffect = new PanEffect();
// the effects are chained to an effects node for this player, then to the main audio engine
// audio is sent from each soundplayer, through the effects in order, then to the global effects
// note that the pitch effect works differently - it sets the playback rate for each soundplayer
this.effectsNode = new Tone.Gain();
this.effectsNode.chain(this.panEffect, this.audioEngine.input);
// the effects are chained to an effects node for this player, then to the main audio engine
// audio is sent from each soundplayer, through the effects in order, then to the global effects
// note that the pitch effect works differently - it sets the playback rate for each soundplayer
this.effectsNode = new Tone.Gain();
this.effectsNode.chain(this.panEffect, this.audioEngine.input);
// reset effects to their default parameters
this.clearEffects();
// reset effects to their default parameters
this.clearEffects();
// sound players that are currently playing, indexed by the sound's md5
this.activeSoundPlayers = {};
};
/**
* Play a sound
* @param {string} md5 - the md5 id of a sound file
* @return {Promise} a Promise that resolves when the sound finishes playing
*/
AudioPlayer.prototype.playSound = function (md5) {
// if this sound is not in the audio engine, return
if (!this.audioEngine.audioBuffers[md5]) {
return;
// sound players that are currently playing, indexed by the sound's md5
this.activeSoundPlayers = {};
}
// if this sprite or clone is already playing this sound, stop it first
if (this.activeSoundPlayers[md5]) {
this.activeSoundPlayers[md5].stop();
}
/**
* Play a sound
* @param {string} md5 - the md5 id of a sound file
* @return {Promise} a Promise that resolves when the sound finishes playing
*/
playSound (md5) {
// if this sound is not in the audio engine, return
if (!this.audioEngine.audioBuffers[md5]) {
return;
}
// create a new soundplayer to play the sound
const player = new SoundPlayer();
player.setBuffer(this.audioEngine.audioBuffers[md5]);
player.connect(this.effectsNode);
this.pitchEffect.updatePlayer(player);
player.start();
// if this sprite or clone is already playing this sound, stop it first
if (this.activeSoundPlayers[md5]) {
this.activeSoundPlayers[md5].stop();
}
// add it to the list of active sound players
this.activeSoundPlayers[md5] = player;
// create a new soundplayer to play the sound
const player = new SoundPlayer();
player.setBuffer(this.audioEngine.audioBuffers[md5]);
player.connect(this.effectsNode);
this.pitchEffect.updatePlayer(player);
player.start();
// remove sounds that are not playing from the active sound players array
for (const id in this.activeSoundPlayers) {
if (this.activeSoundPlayers.hasOwnProperty(id)) {
if (!this.activeSoundPlayers[id].isPlaying) {
delete this.activeSoundPlayers[id];
// add it to the list of active sound players
this.activeSoundPlayers[md5] = player;
// remove sounds that are not playing from the active sound players array
for (const id in this.activeSoundPlayers) {
if (this.activeSoundPlayers.hasOwnProperty(id)) {
if (!this.activeSoundPlayers[id].isPlaying) {
delete this.activeSoundPlayers[id];
}
}
}
return player.finished();
}
/**
* Play a drum sound. The AudioEngine contains the DrumPlayer, but the AudioPlayer
* calls this function so that it can pass a reference to its own effects node.
* @param {number} drum - a drum number (0-indexed)
* @param {number} beats - a duration in beats
* @return {Promise} a Promise that resolves after the duration has elapsed
*/
playDrumForBeats (drum, beats) {
this.audioEngine.drumPlayer.play(drum, this.effectsNode);
return this.audioEngine.waitForBeats(beats);
}
/**
* Stop all sounds, notes and drums that are playing
*/
stopAllSounds () {
// stop all active sound players
for (const md5 in this.activeSoundPlayers) {
this.activeSoundPlayers[md5].stop();
}
// stop all instruments
this.audioEngine.instrumentPlayer.stopAll();
// stop drum notes
this.audioEngine.drumPlayer.stopAll();
}
/**
* Set an audio effect to a value
* @param {string} effect - the name of the effect
* @param {number} value - the value to set the effect to
*/
setEffect (effect, value) {
switch (effect) {
case this.audioEngine.EFFECT_NAMES.pitch:
this.pitchEffect.set(value, this.activeSoundPlayers);
break;
case this.audioEngine.EFFECT_NAMES.pan:
this.panEffect.set(value);
break;
case this.audioEngine.EFFECT_NAMES.echo:
this.audioEngine.echoEffect.set(value);
break;
case this.audioEngine.EFFECT_NAMES.reverb:
this.audioEngine.reverbEffect.set(value);
break;
case this.audioEngine.EFFECT_NAMES.fuzz:
this.audioEngine.fuzzEffect.set(value);
break;
case this.audioEngine.EFFECT_NAMES.robot:
this.audioEngine.roboticEffect.set(value);
break;
}
}
return player.finished();
};
/**
* Clear all audio effects
*/
clearEffects () {
this.panEffect.set(0);
this.pitchEffect.set(0, this.activeSoundPlayers);
this.effectsNode.gain.value = 1;
/**
* Play a drum sound. The AudioEngine contains the DrumPlayer, but the AudioPlayer
* calls this function so that it can pass a reference to its own effects node.
* @param {number} drum - a drum number (0-indexed)
* @param {number} beats - a duration in beats
* @return {Promise} a Promise that resolves after the duration has elapsed
*/
AudioPlayer.prototype.playDrumForBeats = function (drum, beats) {
this.audioEngine.drumPlayer.play(drum, this.effectsNode);
return this.audioEngine.waitForBeats(beats);
};
/**
* Stop all sounds, notes and drums that are playing
*/
AudioPlayer.prototype.stopAllSounds = function () {
// stop all active sound players
for (const md5 in this.activeSoundPlayers) {
this.activeSoundPlayers[md5].stop();
this.audioEngine.echoEffect.set(0);
this.audioEngine.reverbEffect.set(0);
this.audioEngine.fuzzEffect.set(0);
this.audioEngine.roboticEffect.set(0);
}
// stop all instruments
this.audioEngine.instrumentPlayer.stopAll();
// stop drum notes
this.audioEngine.drumPlayer.stopAll();
};
/**
* Set an audio effect to a value
* @param {string} effect - the name of the effect
* @param {number} value - the value to set the effect to
*/
AudioPlayer.prototype.setEffect = function (effect, value) {
switch (effect) {
case this.audioEngine.EFFECT_NAMES.pitch:
this.pitchEffect.set(value, this.activeSoundPlayers);
break;
case this.audioEngine.EFFECT_NAMES.pan:
this.panEffect.set(value);
break;
case this.audioEngine.EFFECT_NAMES.echo:
this.audioEngine.echoEffect.set(value);
break;
case this.audioEngine.EFFECT_NAMES.reverb:
this.audioEngine.reverbEffect.set(value);
break;
case this.audioEngine.EFFECT_NAMES.fuzz:
this.audioEngine.fuzzEffect.set(value);
break;
case this.audioEngine.EFFECT_NAMES.robot:
this.audioEngine.roboticEffect.set(value);
break;
/**
* Set the volume for sounds played by this AudioPlayer
* @param {number} value - the volume in range 0-100
*/
setVolume (value) {
this.effectsNode.gain.value = value / 100;
}
};
/**
* Clear all audio effects
*/
AudioPlayer.prototype.clearEffects = function () {
this.panEffect.set(0);
this.pitchEffect.set(0, this.activeSoundPlayers);
this.effectsNode.gain.value = 1;
this.audioEngine.echoEffect.set(0);
this.audioEngine.reverbEffect.set(0);
this.audioEngine.fuzzEffect.set(0);
this.audioEngine.roboticEffect.set(0);
};
/**
* Set the volume for sounds played by this AudioPlayer
* @param {number} value - the volume in range 0-100
*/
AudioPlayer.prototype.setVolume = function (value) {
this.effectsNode.gain.value = value / 100;
};
}
/**
* There is a single instance of the AudioEngine. It handles global audio properties and effects,
* loads all the audio buffers for sounds belonging to sprites, and creates a single instrument player
* and a drum player, used by all play note and play drum blocks.
* @constructor
*/
const AudioEngine = function () {
class AudioEngine {
constructor () {
// create the global audio effects
this.roboticEffect = new RoboticEffect();
this.fuzzEffect = new FuzzEffect();
this.echoEffect = new EchoEffect();
this.reverbEffect = new ReverbEffect();
// create the global audio effects
this.roboticEffect = new RoboticEffect();
this.fuzzEffect = new FuzzEffect();
this.echoEffect = new EchoEffect();
this.reverbEffect = new ReverbEffect();
// chain the global effects to the output
this.input = new Tone.Gain();
this.input.chain(
this.roboticEffect, this.fuzzEffect, this.echoEffect, this.reverbEffect,
Tone.Master
);
// chain the global effects to the output
this.input = new Tone.Gain();
this.input.chain(
this.roboticEffect, this.fuzzEffect, this.echoEffect, this.reverbEffect,
Tone.Master
);
// global tempo in bpm (beats per minute)
this.currentTempo = 60;
// global tempo in bpm (beats per minute)
this.currentTempo = 60;
// instrument player for play note blocks
this.instrumentPlayer = new InstrumentPlayer(this.input);
this.numInstruments = this.instrumentPlayer.instrumentNames.length;
// instrument player for play note blocks
this.instrumentPlayer = new InstrumentPlayer(this.input);
this.numInstruments = this.instrumentPlayer.instrumentNames.length;
// drum player for play drum blocks
this.drumPlayer = new DrumPlayer(this.input);
this.numDrums = this.drumPlayer.drumSounds.length;
// drum player for play drum blocks
this.drumPlayer = new DrumPlayer(this.input);
this.numDrums = this.drumPlayer.drumSounds.length;
// a map of md5s to audio buffers, holding sounds for all sprites
this.audioBuffers = {};
// a map of md5s to audio buffers, holding sounds for all sprites
this.audioBuffers = {};
// microphone, for measuring loudness, with a level meter analyzer
this.mic = null;
this.micMeter = null;
};
/**
* Decode a sound, decompressing it into audio samples.
* Store a reference to it the sound in the audioBuffers dictionary, indexed by md5
* @param {object} sound - an object containing audio data and metadata for a sound
* @property {Buffer} data - sound data loaded from scratch-storage.
* @property {string} format - format type, either empty or adpcm.
* @property {string} md5 - the MD5 and extension of the sound.
* @returns {?Promise} - a promise which will resolve after the audio buffer is stored, or null on error.
*/
AudioEngine.prototype.decodeSound = function (sound) {
let loaderPromise = null;
switch (sound.format) {
case '':
loaderPromise = Tone.context.decodeAudioData(sound.data.buffer);
break;
case 'adpcm':
loaderPromise = (new ADPCMSoundDecoder()).decode(sound.data.buffer);
break;
default:
return log.warn('unknown sound format', sound.format);
// microphone, for measuring loudness, with a level meter analyzer
this.mic = null;
this.micMeter = null;
}
const storedContext = this;
return loaderPromise.then(
decodedAudio => {
storedContext.audioBuffers[sound.md5] = new Tone.Buffer(decodedAudio);
},
error => {
log.warn('audio data could not be decoded', error);
/**
* Names of the audio effects.
* @enum {string}
*/
static get EFFECT_NAMES () {
return {
pitch: 'pitch',
pan: 'pan',
echo: 'echo',
reverb: 'reverb',
fuzz: 'fuzz',
robot: 'robot'
};
}
/**
* Decode a sound, decompressing it into audio samples.
* Store a reference to it the sound in the audioBuffers dictionary, indexed by md5
* @param {object} sound - an object containing audio data and metadata for a sound
* @property {Buffer} data - sound data loaded from scratch-storage.
* @property {string} format - format type, either empty or adpcm.
* @property {string} md5 - the MD5 and extension of the sound.
* @returns {?Promise} - a promise which will resolve after the audio buffer is stored, or null on error.
*/
decodeSound (sound) {
let loaderPromise = null;
switch (sound.format) {
case '':
loaderPromise = Tone.context.decodeAudioData(sound.data.buffer);
break;
case 'adpcm':
loaderPromise = (new ADPCMSoundDecoder()).decode(sound.data.buffer);
break;
default:
return log.warn('unknown sound format', sound.format);
}
);
};
/**
* An older version of the AudioEngine had this function to load all sounds
* This is a stub to provide a warning when it is called
* @todo remove this
*/
AudioEngine.prototype.loadSounds = function () {
log.warn('The loadSounds function is no longer available. Please use Scratch Storage.');
};
/**
* Play a note for a duration on an instrument with a volume
* @param {number} note - a MIDI note number
* @param {number} beats - a duration in beats
* @param {number} inst - an instrument number (0-indexed)
* @param {number} vol - a volume level (0-100%)
* @return {Promise} a Promise that resolves after the duration has elapsed
*/
AudioEngine.prototype.playNoteForBeatsWithInstAndVol = function (note, beats, inst, vol) {
const sec = this.beatsToSec(beats);
this.instrumentPlayer.playNoteForSecWithInstAndVol(note, sec, inst, vol);
return this.waitForBeats(beats);
};
/**
* Convert a number of beats to a number of seconds, using the current tempo
* @param {number} beats number of beats to convert to secs
* @return {number} seconds number of seconds `beats` will last
*/
AudioEngine.prototype.beatsToSec = function (beats) {
return (60 / this.currentTempo) * beats;
};
/**
* Wait for some number of beats
* @param {number} beats number of beats to wait for
* @return {Promise} a Promise that resolves after the duration has elapsed
*/
AudioEngine.prototype.waitForBeats = function (beats) {
const storedContext = this;
return new Promise(resolve => {
setTimeout(() => {
resolve();
}, storedContext.beatsToSec(beats) * 1000);
});
};
/**
* Set the global tempo in bpm (beats per minute)
* @param {number} value - the new tempo to set
*/
AudioEngine.prototype.setTempo = function (value) {
this.currentTempo = value;
};
/**
* Change the tempo by some number of bpm (beats per minute)
* @param {number} value - the number of bpm to change the tempo by
*/
AudioEngine.prototype.changeTempo = function (value) {
this.setTempo(this.currentTempo + value);
};
/**
* Get the current loudness of sound received by the microphone.
* Sound is measured in RMS and smoothed.
* @return {number} loudness scaled 0 to 100
*/
AudioEngine.prototype.getLoudness = function () {
if (!this.mic) {
this.mic = new Tone.UserMedia();
this.micMeter = new Tone.Meter('level', 0.5);
this.mic.open();
this.mic.connect(this.micMeter);
const storedContext = this;
return loaderPromise.then(
decodedAudio => {
storedContext.audioBuffers[sound.md5] = new Tone.Buffer(decodedAudio);
},
error => {
log.warn('audio data could not be decoded', error);
}
);
}
if (this.mic && this.mic.state === 'started') {
return this.micMeter.value * 100;
/**
* An older version of the AudioEngine had this function to load all sounds
* This is a stub to provide a warning when it is called
* @todo remove this
*/
loadSounds () {
log.warn('The loadSounds function is no longer available. Please use Scratch Storage.');
}
return -1;
};
/**
* Play a note for a duration on an instrument with a volume
* @param {number} note - a MIDI note number
* @param {number} beats - a duration in beats
* @param {number} inst - an instrument number (0-indexed)
* @param {number} vol - a volume level (0-100%)
* @return {Promise} a Promise that resolves after the duration has elapsed
*/
playNoteForBeatsWithInstAndVol (note, beats, inst, vol) {
const sec = this.beatsToSec(beats);
this.instrumentPlayer.playNoteForSecWithInstAndVol(note, sec, inst, vol);
return this.waitForBeats(beats);
}
/**
* Names of the audio effects.
* @readonly
* @enum {string}
*/
AudioEngine.prototype.EFFECT_NAMES = {
pitch: 'pitch',
pan: 'pan',
echo: 'echo',
reverb: 'reverb',
fuzz: 'fuzz',
robot: 'robot'
};
/**
* Convert a number of beats to a number of seconds, using the current tempo
* @param {number} beats number of beats to convert to secs
* @return {number} seconds number of seconds `beats` will last
*/
beatsToSec (beats) {
return (60 / this.currentTempo) * beats;
}
/**
* Create an AudioPlayer. Each sprite or clone has an AudioPlayer.
* It includes a reference to the AudioEngine so it can use global
* functionality such as playing notes.
* @return {AudioPlayer} new AudioPlayer instance
*/
AudioEngine.prototype.createPlayer = function () {
return new AudioPlayer(this);
};
/**
* Wait for some number of beats
* @param {number} beats number of beats to wait for
* @return {Promise} a Promise that resolves after the duration has elapsed
*/
waitForBeats (beats) {
const storedContext = this;
return new Promise(resolve => {
setTimeout(() => {
resolve();
}, storedContext.beatsToSec(beats) * 1000);
});
}
/**
* Set the global tempo in bpm (beats per minute)
* @param {number} value - the new tempo to set
*/
setTempo (value) {
this.currentTempo = value;
}
/**
* Change the tempo by some number of bpm (beats per minute)
* @param {number} value - the number of bpm to change the tempo by
*/
changeTempo (value) {
this.setTempo(this.currentTempo + value);
}
/**
* Get the current loudness of sound received by the microphone.
* Sound is measured in RMS and smoothed.
* @return {number} loudness scaled 0 to 100
*/
getLoudness () {
if (!this.mic) {
this.mic = new Tone.UserMedia();
this.micMeter = new Tone.Meter('level', 0.5);
this.mic.open();
this.mic.connect(this.micMeter);
}
if (this.mic && this.mic.state === 'started') {
return this.micMeter.value * 100;
}
return -1;
}
/**
* Create an AudioPlayer. Each sprite or clone has an AudioPlayer.
* It includes a reference to the AudioEngine so it can use global
* functionality such as playing notes.
* @return {AudioPlayer} new AudioPlayer instance
*/
createPlayer () {
return new AudioPlayer(this);
}
}
module.exports = AudioEngine;