|
| 1 | +// Much inspired by http://typedarray.org/from-microphone-to-wav-with-getusermedia-and-web-audio |
| 2 | + |
| 3 | +// @flow |
| 4 | +export default class Recorder { |
| 5 | + constructor(context, onRecordStop) { |
| 6 | + const bufferSize = 2048; |
| 7 | + this.recording = true; |
| 8 | + this.processor = context.createScriptProcessor(bufferSize, 2, 2); |
| 9 | + this.processor.recordingLength = 0; |
| 10 | + this.processor.leftChannel = []; |
| 11 | + this.processor.rightChannel = []; |
| 12 | + this.onRecordStop = onRecordStop; |
| 13 | + |
| 14 | + this.processor.onaudioprocess = function (e) { |
| 15 | + const left = e.inputBuffer.getChannelData(0); |
| 16 | + const right = e.inputBuffer.getChannelData(1); |
| 17 | + // we clone the samples |
| 18 | + this.leftChannel.push(new Float32Array(left)); |
| 19 | + this.rightChannel.push(new Float32Array(right)); |
| 20 | + this.recordingLength += bufferSize; |
| 21 | + }.bind(this.processor); |
| 22 | + |
| 23 | + this.processor.stop = function () { |
| 24 | + this.recording = false; |
| 25 | + const left = mergeBuffers(this.processor.leftChannel, this.processor.recordingLength); |
| 26 | + const right = mergeBuffers(this.processor.rightChannel, this.processor.recordingLength); |
| 27 | + const interleavedChannels = interleave(left, right); |
| 28 | + const blob = getWAV(interleavedChannels, context.sampleRate); |
| 29 | + |
| 30 | + this.processor.disconnect(); |
| 31 | + this.onRecordStop(blob); |
| 32 | + // blobToBase64(blob, (base64) => { |
| 33 | + // cb(base64); |
| 34 | + // }); |
| 35 | + }.bind(this); |
| 36 | + } |
| 37 | + |
| 38 | + stop() { |
| 39 | + this.processor.stop(); |
| 40 | + } |
| 41 | + |
| 42 | + connect(node) { |
| 43 | + this.processor.connect(node); |
| 44 | + } |
| 45 | +} |
| 46 | + |
| 47 | +function mergeBuffers(channelBuffer, recordingLength) { |
| 48 | + const result = new Float32Array(recordingLength); |
| 49 | + let offset = 0; |
| 50 | + const lng = channelBuffer.length; |
| 51 | + for (let i = 0; i < lng; i++) { |
| 52 | + const buffer = channelBuffer[i]; |
| 53 | + result.set(buffer, offset); |
| 54 | + offset += buffer.length; |
| 55 | + } |
| 56 | + return result; |
| 57 | +} |
| 58 | + |
| 59 | +function interleave(leftChannel, rightChannel) { |
| 60 | + const length = leftChannel.length + rightChannel.length; |
| 61 | + const result = new Float32Array(length); |
| 62 | + |
| 63 | + let inputIndex = 0; |
| 64 | + |
| 65 | + for (let index = 0; index < length;) { |
| 66 | + result[index++] = leftChannel[inputIndex]; |
| 67 | + result[index++] = rightChannel[inputIndex]; |
| 68 | + inputIndex++; |
| 69 | + } |
| 70 | + return result; |
| 71 | +} |
| 72 | + |
| 73 | +function writeUTFBytes(view, offset, string) { |
| 74 | + const lng = string.length; |
| 75 | + for (let i = 0; i < lng; i++) { |
| 76 | + view.setUint8(offset + i, string.charCodeAt(i)); |
| 77 | + } |
| 78 | +} |
| 79 | + |
| 80 | +function getWAV(interleaved, sampleRate) { |
| 81 | + // create the buffer and view to create the .WAV file |
| 82 | + const buffer = new ArrayBuffer(44 + interleaved.length * 2); |
| 83 | + const view = new DataView(buffer); |
| 84 | + |
| 85 | + // write the WAV container, check spec at: https://ccrma.stanford.edu/courses/422/projects/WaveFormat/ |
| 86 | + // RIFF chunk descriptor |
| 87 | + writeUTFBytes(view, 0, 'RIFF'); |
| 88 | + view.setUint32(4, 44 + interleaved.length * 2, true); |
| 89 | + writeUTFBytes(view, 8, 'WAVE'); |
| 90 | + // FMT sub-chunk |
| 91 | + writeUTFBytes(view, 12, 'fmt '); |
| 92 | + view.setUint32(16, 16, true); |
| 93 | + view.setUint16(20, 1, true); |
| 94 | + // stereo (2 channels) |
| 95 | + view.setUint16(22, 2, true); |
| 96 | + view.setUint32(24, sampleRate, true); |
| 97 | + view.setUint32(28, sampleRate * 4, true); |
| 98 | + view.setUint16(32, 4, true); |
| 99 | + view.setUint16(34, 16, true); |
| 100 | + // data sub-chunk |
| 101 | + writeUTFBytes(view, 36, 'data'); |
| 102 | + view.setUint32(40, interleaved.length * 2, true); |
| 103 | + |
| 104 | + // write the PCM samples |
| 105 | + const lng = interleaved.length; |
| 106 | + const volume = 1; |
| 107 | + let index = 44; |
| 108 | + for (let i = 0; i < lng; i++) { |
| 109 | + view.setInt16(index, interleaved[i] * (0x7FFF * volume), true); |
| 110 | + index += 2; |
| 111 | + } |
| 112 | + |
| 113 | + // our final binary blob that we can hand off |
| 114 | + return new Blob([view], { type: 'audio/wav' }); |
| 115 | +} |
| 116 | + |
| 117 | +// found on stackoverflow |
| 118 | +function blobToBase64(blob, cb) { |
| 119 | + const reader = new FileReader(); |
| 120 | + reader.onload = function () { |
| 121 | + const dataUrl = reader.result; |
| 122 | + const base64 = dataUrl.split(',')[1]; |
| 123 | + cb(base64); |
| 124 | + }; |
| 125 | + reader.readAsDataURL(blob); |
| 126 | +} |
0 commit comments