Skip to content
Snippets Groups Projects
Commit ee45508c authored by Paul Kaplan's avatar Paul Kaplan
Browse files

Classify and cleanup

parent ba706f24
No related branches found
No related tags found
No related merge requests found
...@@ -49,10 +49,7 @@ class RecordModal extends React.Component { ...@@ -49,10 +49,7 @@ class RecordModal extends React.Component {
this.setState({playing: false, samples: null}); this.setState({playing: false, samples: null});
} }
handleSubmit () { handleSubmit () {
this.setState({encoding: true}); this.setState({encoding: true}, () => {
// Allow enough time for UI refresh before starting encoding
setTimeout(() => {
WavEncoder.encode({ WavEncoder.encode({
sampleRate: this.state.sampleRate, sampleRate: this.state.sampleRate,
channelData: [this.state.samples] channelData: [this.state.samples]
...@@ -76,7 +73,7 @@ class RecordModal extends React.Component { ...@@ -76,7 +73,7 @@ class RecordModal extends React.Component {
this.props.vm.addSound(vmSound); this.props.vm.addSound(vmSound);
this.handleCancel(); this.handleCancel();
}); });
}, 100); });
} }
handleCancel () { handleCancel () {
this.props.onClose(); this.props.onClose();
......
const SharedAudioContext = require('./shared-audio-context.js'); const SharedAudioContext = require('./shared-audio-context.js');
const AudioBufferPlayer = function (samples) { class AudioBufferPlayer {
this.audioContext = new SharedAudioContext(); constructor (samples) {
this.buffer = this.audioContext.createBuffer(1, samples.length, this.audioContext.sampleRate); this.audioContext = new SharedAudioContext();
this.buffer.getChannelData(0).set(samples); this.buffer = this.audioContext.createBuffer(1, samples.length, this.audioContext.sampleRate);
this.source = null; this.buffer.getChannelData(0).set(samples);
}; this.source = null;
}
AudioBufferPlayer.prototype.play = function (onEnded) { play (onEnded) {
// Buffer source nodes are one time use only. Must do this every play. // Buffer source nodes are one time use only. Must do this every play.
this.source = this.audioContext.createBufferSource(); this.source = this.audioContext.createBufferSource();
this.source.onended = onEnded; this.source.onended = onEnded;
this.source.buffer = this.buffer; this.source.buffer = this.buffer;
this.source.connect(this.audioContext.destination); this.source.connect(this.audioContext.destination);
this.source.start(); this.source.start();
}; }
AudioBufferPlayer.prototype.stop = function () { stop () {
if (this.source) { if (this.source) {
this.source.onended = null; // Do not call onEnded callback if manually stopped this.source.onended = null; // Do not call onEnded callback if manually stopped
this.source.stop(); this.source.stop();
}
} }
}; }
module.exports = AudioBufferPlayer; module.exports = AudioBufferPlayer;
const SharedAudioContext = require('./shared-audio-context.js'); const SharedAudioContext = require('./shared-audio-context.js');
const AudioRecorder = function () { class AudioRecorder {
this.audioContext = new SharedAudioContext(); constructor () {
this.bufferLength = 1024; this.audioContext = new SharedAudioContext();
this.bufferLength = 1024;
this.userMediaStream = null;
this.mediaStreamSource = null; this.userMediaStream = null;
this.sourceNode = null; this.mediaStreamSource = null;
this.scriptProcessorNode = null; this.sourceNode = null;
this.scriptProcessorNode = null;
this.recordedSamples = 0;
this.recording = false; this.recordedSamples = 0;
this.buffers = []; this.recording = false;
this.buffers = [];
this.disposed = false;
}; this.disposed = false;
AudioRecorder.prototype.startListening = function (onUpdate, onError) {
try {
navigator.getUserMedia({audio: true}, userMediaStream => {
this.attachUserMediaStream(userMediaStream, onUpdate);
}, e => {
onError(e);
});
} catch (e) {
onError(e);
} }
};
startListening (onUpdate, onError) {
AudioRecorder.prototype.startRecording = function () { try {
this.recording = true; navigator.getUserMedia({audio: true}, userMediaStream => {
}; this.attachUserMediaStream(userMediaStream, onUpdate);
}, e => {
AudioRecorder.prototype.calculateRMS = function (samples) { onError(e);
// Calculate RMS, adapted from https://github.com/Tonejs/Tone.js/blob/master/Tone/component/Meter.js#L88 });
const sum = samples.reduce((acc, v) => acc + Math.pow(v, 2), 0); } catch (e) {
const rms = Math.sqrt(sum / samples.length); onError(e);
// Scale it
const unity = 0.35;
const val = rms / unity;
// Scale the output curve
return Math.sqrt(val);
};
AudioRecorder.prototype.attachUserMediaStream = function (userMediaStream, onUpdate) {
this.userMediaStream = userMediaStream;
this.mediaStreamSource = this.audioContext.createMediaStreamSource(userMediaStream);
this.sourceNode = this.audioContext.createGain();
this.scriptProcessorNode = this.audioContext.createScriptProcessor(this.bufferLength, 2, 2);
this.scriptProcessorNode.onaudioprocess = processEvent => {
if (this.recording) {
this.buffers.push(new Float32Array(processEvent.inputBuffer.getChannelData(0)));
} }
}; }
startRecording () {
this.recording = true;
}
calculateRMS (samples) {
// Calculate RMS, adapted from https://github.com/Tonejs/Tone.js/blob/master/Tone/component/Meter.js#L88
const sum = samples.reduce((acc, v) => acc + Math.pow(v, 2), 0);
const rms = Math.sqrt(sum / samples.length);
// Scale it
const unity = 0.35;
const val = rms / unity;
// Scale the output curve
return Math.sqrt(val);
}
this.analyserNode = this.audioContext.createAnalyser(); attachUserMediaStream (userMediaStream, onUpdate) {
this.userMediaStream = userMediaStream;
this.mediaStreamSource = this.audioContext.createMediaStreamSource(userMediaStream);
this.sourceNode = this.audioContext.createGain();
this.scriptProcessorNode = this.audioContext.createScriptProcessor(this.bufferLength, 2, 2);
this.analyserNode.fftSize = 2048;
const bufferLength = this.analyserNode.frequencyBinCount; this.scriptProcessorNode.onaudioprocess = processEvent => {
const dataArray = new Float32Array(bufferLength); if (this.recording) {
this.buffers.push(new Float32Array(processEvent.inputBuffer.getChannelData(0)));
}
};
this.analyserNode = this.audioContext.createAnalyser();
this.analyserNode.fftSize = 2048;
const bufferLength = this.analyserNode.frequencyBinCount;
const dataArray = new Float32Array(bufferLength);
const update = () => {
if (this.disposed) return;
requestAnimationFrame(update);
this.analyserNode.getFloatTimeDomainData(dataArray);
onUpdate(this.calculateRMS(dataArray));
};
const update = () => {
if (this.disposed) return;
requestAnimationFrame(update); requestAnimationFrame(update);
this.analyserNode.getFloatTimeDomainData(dataArray);
onUpdate(this.calculateRMS(dataArray)); // Wire everything together, ending in the destination
}; this.mediaStreamSource.connect(this.sourceNode);
this.sourceNode.connect(this.analyserNode);
requestAnimationFrame(update); this.analyserNode.connect(this.scriptProcessorNode);
this.scriptProcessorNode.connect(this.audioContext.destination);
// Wire everything together, ending in the destination
this.mediaStreamSource.connect(this.sourceNode);
this.sourceNode.connect(this.analyserNode);
this.analyserNode.connect(this.scriptProcessorNode);
this.scriptProcessorNode.connect(this.audioContext.destination);
};
AudioRecorder.prototype.stop = function () {
const chunkLevels = this.buffers.map(buffer => this.calculateRMS(buffer));
const maxRMS = Math.max.apply(null, chunkLevels);
const threshold = maxRMS / 8;
let firstChunkAboveThreshold = null;
let lastChunkAboveThreshold = null;
for (let i = 0; i < chunkLevels.length; i++) {
if (chunkLevels[i] > threshold) {
if (firstChunkAboveThreshold === null) firstChunkAboveThreshold = i;
lastChunkAboveThreshold = i;
}
} }
const usedSamples = lastChunkAboveThreshold - firstChunkAboveThreshold + 2; stop () {
const buffer = new Float32Array(usedSamples * this.bufferLength); const chunkLevels = this.buffers.map(buffer => this.calculateRMS(buffer));
const maxRMS = Math.max.apply(null, chunkLevels);
const threshold = maxRMS / 8;
let firstChunkAboveThreshold = null;
let lastChunkAboveThreshold = null;
for (let i = 0; i < chunkLevels.length; i++) {
if (chunkLevels[i] > threshold) {
if (firstChunkAboveThreshold === null) firstChunkAboveThreshold = i;
lastChunkAboveThreshold = i;
}
}
const usedSamples = lastChunkAboveThreshold - firstChunkAboveThreshold + 2;
const buffer = new Float32Array(usedSamples * this.bufferLength);
const usedChunkLevels = []; const usedChunkLevels = [];
let offset = 0; let offset = 0;
for (let i = 0; i < this.buffers.length; i++) { for (let i = 0; i < this.buffers.length; i++) {
const bufferChunk = this.buffers[i]; const bufferChunk = this.buffers[i];
if (i > firstChunkAboveThreshold - 2 && i < lastChunkAboveThreshold + 1) { if (i > firstChunkAboveThreshold - 2 && i < lastChunkAboveThreshold + 1) {
usedChunkLevels.push(chunkLevels[i]); usedChunkLevels.push(chunkLevels[i]);
buffer.set(bufferChunk, offset); buffer.set(bufferChunk, offset);
offset += bufferChunk.length; offset += bufferChunk.length;
}
} }
return {
levels: usedChunkLevels,
samples: buffer,
sampleRate: this.audioContext.sampleRate
};
} }
return { dispose () {
levels: usedChunkLevels, this.scriptProcessorNode.onaudioprocess = null;
samples: buffer, this.scriptProcessorNode.disconnect();
sampleRate: this.audioContext.sampleRate this.analyserNode.disconnect();
}; this.sourceNode.disconnect();
}; this.mediaStreamSource.disconnect();
this.userMediaStream.getAudioTracks()[0].stop();
AudioRecorder.prototype.dispose = function () { this.disposed = true;
this.scriptProcessorNode.onaudioprocess = null; }
this.scriptProcessorNode.disconnect(); }
this.analyserNode.disconnect();
this.sourceNode.disconnect();
this.mediaStreamSource.disconnect();
this.userMediaStream.getAudioTracks()[0].stop();
this.disposed = true;
};
module.exports = AudioRecorder; module.exports = AudioRecorder;
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment