From ee45508cc0119094251754f871fd93740914df44 Mon Sep 17 00:00:00 2001
From: Paul Kaplan <pkaplan@media.mit.edu>
Date: Mon, 26 Jun 2017 17:06:57 -0400
Subject: [PATCH] Classify and cleanup

---
 src/containers/record-modal.jsx      |   7 +-
 src/lib/audio/audio-buffer-player.js |  40 ++---
 src/lib/audio/audio-recorder.js      | 222 ++++++++++++++-------------
 3 files changed, 135 insertions(+), 134 deletions(-)

diff --git a/src/containers/record-modal.jsx b/src/containers/record-modal.jsx
index b5fdad91c..f7ad70097 100644
--- a/src/containers/record-modal.jsx
+++ b/src/containers/record-modal.jsx
@@ -49,10 +49,7 @@ class RecordModal extends React.Component {
         this.setState({playing: false, samples: null});
     }
     handleSubmit () {
-        this.setState({encoding: true});
-
-        // Allow enough time for UI refresh before starting encoding
-        setTimeout(() => {
+        this.setState({encoding: true}, () => {
             WavEncoder.encode({
                 sampleRate: this.state.sampleRate,
                 channelData: [this.state.samples]
@@ -76,7 +73,7 @@ class RecordModal extends React.Component {
                 this.props.vm.addSound(vmSound);
                 this.handleCancel();
             });
-        }, 100);
+        });
     }
     handleCancel () {
         this.props.onClose();
diff --git a/src/lib/audio/audio-buffer-player.js b/src/lib/audio/audio-buffer-player.js
index 76fc3ffac..ab71b7a52 100644
--- a/src/lib/audio/audio-buffer-player.js
+++ b/src/lib/audio/audio-buffer-player.js
@@ -1,26 +1,28 @@
 const SharedAudioContext = require('./shared-audio-context.js');
 
-const AudioBufferPlayer = function (samples) {
-    this.audioContext = new SharedAudioContext();
-    this.buffer = this.audioContext.createBuffer(1, samples.length, this.audioContext.sampleRate);
-    this.buffer.getChannelData(0).set(samples);
-    this.source = null;
-};
+class AudioBufferPlayer {
+    constructor (samples) {
+        this.audioContext = new SharedAudioContext();
+        this.buffer = this.audioContext.createBuffer(1, samples.length, this.audioContext.sampleRate);
+        this.buffer.getChannelData(0).set(samples);
+        this.source = null;
+    }
 
-AudioBufferPlayer.prototype.play = function (onEnded) {
-    // Buffer source nodes are one time use only. Must do this every play.
-    this.source = this.audioContext.createBufferSource();
-    this.source.onended = onEnded;
-    this.source.buffer = this.buffer;
-    this.source.connect(this.audioContext.destination);
-    this.source.start();
-};
+    play (onEnded) {
+        // Buffer source nodes are one time use only. Must do this every play.
+        this.source = this.audioContext.createBufferSource();
+        this.source.onended = onEnded;
+        this.source.buffer = this.buffer;
+        this.source.connect(this.audioContext.destination);
+        this.source.start();
+    }
 
-AudioBufferPlayer.prototype.stop = function () {
-    if (this.source) {
-        this.source.onended = null; // Do not call onEnded callback if manually stopped
-        this.source.stop();
+    stop () {
+        if (this.source) {
+            this.source.onended = null; // Do not call onEnded callback if manually stopped
+            this.source.stop();
+        }
     }
-};
+}
 
 module.exports = AudioBufferPlayer;
diff --git a/src/lib/audio/audio-recorder.js b/src/lib/audio/audio-recorder.js
index 12f13c825..f3a073d81 100644
--- a/src/lib/audio/audio-recorder.js
+++ b/src/lib/audio/audio-recorder.js
@@ -1,128 +1,130 @@
 const SharedAudioContext = require('./shared-audio-context.js');
 
-const AudioRecorder = function () {
-    this.audioContext = new SharedAudioContext();
-    this.bufferLength = 1024;
-
-    this.userMediaStream = null;
-    this.mediaStreamSource = null;
-    this.sourceNode = null;
-    this.scriptProcessorNode = null;
-
-    this.recordedSamples = 0;
-    this.recording = false;
-    this.buffers = [];
-
-    this.disposed = false;
-};
-
-AudioRecorder.prototype.startListening = function (onUpdate, onError) {
-    try {
-        navigator.getUserMedia({audio: true}, userMediaStream => {
-            this.attachUserMediaStream(userMediaStream, onUpdate);
-        }, e => {
-            onError(e);
-        });
-    } catch (e) {
-        onError(e);
+class AudioRecorder {
+    constructor () {
+        this.audioContext = new SharedAudioContext();
+        this.bufferLength = 1024;
+
+        this.userMediaStream = null;
+        this.mediaStreamSource = null;
+        this.sourceNode = null;
+        this.scriptProcessorNode = null;
+
+        this.recordedSamples = 0;
+        this.recording = false;
+        this.buffers = [];
+
+        this.disposed = false;
     }
-};
-
-AudioRecorder.prototype.startRecording = function () {
-    this.recording = true;
-};
-
-AudioRecorder.prototype.calculateRMS = function (samples) {
-    // Calculate RMS, adapted from https://github.com/Tonejs/Tone.js/blob/master/Tone/component/Meter.js#L88
-    const sum = samples.reduce((acc, v) => acc + Math.pow(v, 2), 0);
-    const rms = Math.sqrt(sum / samples.length);
-    // Scale it
-    const unity = 0.35;
-    const val = rms / unity;
-    // Scale the output curve
-    return Math.sqrt(val);
-};
-
-AudioRecorder.prototype.attachUserMediaStream = function (userMediaStream, onUpdate) {
-    this.userMediaStream = userMediaStream;
-    this.mediaStreamSource = this.audioContext.createMediaStreamSource(userMediaStream);
-    this.sourceNode = this.audioContext.createGain();
-    this.scriptProcessorNode = this.audioContext.createScriptProcessor(this.bufferLength, 2, 2);
-
-
-    this.scriptProcessorNode.onaudioprocess = processEvent => {
-        if (this.recording) {
-            this.buffers.push(new Float32Array(processEvent.inputBuffer.getChannelData(0)));
+
+    startListening (onUpdate, onError) {
+        try {
+            navigator.getUserMedia({audio: true}, userMediaStream => {
+                this.attachUserMediaStream(userMediaStream, onUpdate);
+            }, e => {
+                onError(e);
+            });
+        } catch (e) {
+            onError(e);
         }
-    };
+    }
+
+    startRecording () {
+        this.recording = true;
+    }
+
+    calculateRMS (samples) {
+        // Calculate RMS, adapted from https://github.com/Tonejs/Tone.js/blob/master/Tone/component/Meter.js#L88
+        const sum = samples.reduce((acc, v) => acc + Math.pow(v, 2), 0);
+        const rms = Math.sqrt(sum / samples.length);
+        // Scale it
+        const unity = 0.35;
+        const val = rms / unity;
+        // Scale the output curve
+        return Math.sqrt(val);
+    }
 
-    this.analyserNode = this.audioContext.createAnalyser();
+    attachUserMediaStream (userMediaStream, onUpdate) {
+        this.userMediaStream = userMediaStream;
+        this.mediaStreamSource = this.audioContext.createMediaStreamSource(userMediaStream);
+        this.sourceNode = this.audioContext.createGain();
+        this.scriptProcessorNode = this.audioContext.createScriptProcessor(this.bufferLength, 2, 2);
 
-    this.analyserNode.fftSize = 2048;
 
-    const bufferLength = this.analyserNode.frequencyBinCount;
-    const dataArray = new Float32Array(bufferLength);
+        this.scriptProcessorNode.onaudioprocess = processEvent => {
+            if (this.recording) {
+                this.buffers.push(new Float32Array(processEvent.inputBuffer.getChannelData(0)));
+            }
+        };
+
+        this.analyserNode = this.audioContext.createAnalyser();
+
+        this.analyserNode.fftSize = 2048;
+
+        const bufferLength = this.analyserNode.frequencyBinCount;
+        const dataArray = new Float32Array(bufferLength);
+
+        const update = () => {
+            if (this.disposed) return;
+            requestAnimationFrame(update);
+            this.analyserNode.getFloatTimeDomainData(dataArray);
+            onUpdate(this.calculateRMS(dataArray));
+        };
 
-    const update = () => {
-        if (this.disposed) return;
         requestAnimationFrame(update);
-        this.analyserNode.getFloatTimeDomainData(dataArray);
-        onUpdate(this.calculateRMS(dataArray));
-    };
-
-    requestAnimationFrame(update);
-
-    // Wire everything together, ending in the destination
-    this.mediaStreamSource.connect(this.sourceNode);
-    this.sourceNode.connect(this.analyserNode);
-    this.analyserNode.connect(this.scriptProcessorNode);
-    this.scriptProcessorNode.connect(this.audioContext.destination);
-};
-
-AudioRecorder.prototype.stop = function () {
-    const chunkLevels = this.buffers.map(buffer => this.calculateRMS(buffer));
-    const maxRMS = Math.max.apply(null, chunkLevels);
-    const threshold = maxRMS / 8;
-
-    let firstChunkAboveThreshold = null;
-    let lastChunkAboveThreshold = null;
-    for (let i = 0; i < chunkLevels.length; i++) {
-        if (chunkLevels[i] > threshold) {
-            if (firstChunkAboveThreshold === null) firstChunkAboveThreshold = i;
-            lastChunkAboveThreshold = i;
-        }
+
+        // Wire everything together, ending in the destination
+        this.mediaStreamSource.connect(this.sourceNode);
+        this.sourceNode.connect(this.analyserNode);
+        this.analyserNode.connect(this.scriptProcessorNode);
+        this.scriptProcessorNode.connect(this.audioContext.destination);
     }
 
-    const usedSamples = lastChunkAboveThreshold - firstChunkAboveThreshold + 2;
-    const buffer = new Float32Array(usedSamples * this.bufferLength);
+    stop () {
+        const chunkLevels = this.buffers.map(buffer => this.calculateRMS(buffer));
+        const maxRMS = Math.max.apply(null, chunkLevels);
+        const threshold = maxRMS / 8;
+
+        let firstChunkAboveThreshold = null;
+        let lastChunkAboveThreshold = null;
+        for (let i = 0; i < chunkLevels.length; i++) {
+            if (chunkLevels[i] > threshold) {
+                if (firstChunkAboveThreshold === null) firstChunkAboveThreshold = i;
+                lastChunkAboveThreshold = i;
+            }
+        }
+
+        const usedSamples = lastChunkAboveThreshold - firstChunkAboveThreshold + 2;
+        const buffer = new Float32Array(usedSamples * this.bufferLength);
 
-    const usedChunkLevels = [];
+        const usedChunkLevels = [];
 
-    let offset = 0;
-    for (let i = 0; i < this.buffers.length; i++) {
-        const bufferChunk = this.buffers[i];
-        if (i > firstChunkAboveThreshold - 2 && i < lastChunkAboveThreshold + 1) {
-            usedChunkLevels.push(chunkLevels[i]);
-            buffer.set(bufferChunk, offset);
-            offset += bufferChunk.length;
+        let offset = 0;
+        for (let i = 0; i < this.buffers.length; i++) {
+            const bufferChunk = this.buffers[i];
+            if (i > firstChunkAboveThreshold - 2 && i < lastChunkAboveThreshold + 1) {
+                usedChunkLevels.push(chunkLevels[i]);
+                buffer.set(bufferChunk, offset);
+                offset += bufferChunk.length;
+            }
         }
+
+        return {
+            levels: usedChunkLevels,
+            samples: buffer,
+            sampleRate: this.audioContext.sampleRate
+        };
     }
 
-    return {
-        levels: usedChunkLevels,
-        samples: buffer,
-        sampleRate: this.audioContext.sampleRate
-    };
-};
-
-AudioRecorder.prototype.dispose = function () {
-    this.scriptProcessorNode.onaudioprocess = null;
-    this.scriptProcessorNode.disconnect();
-    this.analyserNode.disconnect();
-    this.sourceNode.disconnect();
-    this.mediaStreamSource.disconnect();
-    this.userMediaStream.getAudioTracks()[0].stop();
-    this.disposed = true;
-};
+    dispose () {
+        this.scriptProcessorNode.onaudioprocess = null;
+        this.scriptProcessorNode.disconnect();
+        this.analyserNode.disconnect();
+        this.sourceNode.disconnect();
+        this.mediaStreamSource.disconnect();
+        this.userMediaStream.getAudioTracks()[0].stop();
+        this.disposed = true;
+    }
+}
 
 module.exports = AudioRecorder;
-- 
GitLab