How to get a mono channel .wav from this library.
https://github.com/mattdiamond/Recorderjs
That is the question many are searching for as i can see around.
In short, this the answer.
to have a single mono channel .wav file, produced by this library, you have to open:
recorderWorker.js file
search for:
Code: Select all
function exportWAV(type){
var bufferL = mergeBuffers(recBuffersL, recLength);
var bufferR = mergeBuffers(recBuffersR, recLength);
var interleaved = interleave(bufferL, bufferR);
var dataview = encodeWAV(interleaved);
var audioBlob = new Blob([dataview], { type: type });
this.postMessage(audioBlob);
}
Code: Select all
function exportWAV(type){
var bufferL = mergeBuffers(recBuffersL, recLength);
// var bufferR = mergeBuffers(recBuffersR, recLength);
// var interleaved = interleave(bufferL, bufferR);
// var dataview = encodeWAV(interleaved);
var dataview = encodeWAV(bufferL);
var audioBlob = new Blob([dataview], { type: type });
this.postMessage(audioBlob);
}
Code: Select all
function encodeWAV(samples, mono){
var buffer = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(buffer);
/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* file length */
view.setUint32(4, 32 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
view.setUint16(22, mono?1:2, true);
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, sampleRate * 4, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, 4, true);
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);
floatTo16BitPCM(view, 44, samples);
return view;
}
Code: Select all
function encodeWAV(samples){
var buffer = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(buffer);
/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* file length */
view.setUint32(4, 32 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
//view.setUint16(22, 2, true); /*STEREO*/
view.setUint16(22, 1, true); /*MONO*/
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
//view.setUint32(28, sampleRate * 4, true); /*STEREO*/
view.setUint32(28, sampleRate * 2, true); /*MONO*/
/* block align (channel count * bytes per sample) */
//view.setUint16(32, 4, true); /*STEREO*/
view.setUint16(32, 2, true); /*MONO*/
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);
floatTo16BitPCM(view, 44, samples);
return view;
}
I've also follow encoding the result to .flac, so i will github the entire thing, if i can find out the time.
But since the question about this seem not answered around (or i've not find out in any place), hope will may be a good help for someone searching for, and for me, to not loose time again if necessary.