Generating Blob after appendAudioFromTrack
WebAudioTrack is awesome!
This is my first time with WebAudioAPI and this library made it simple to understand.
I now need some help and thanks in advance!
I am implementing a Record -> Pause -> Record -> Pause sequence, in which the record can be paused and resumed any number of times. Post which I am sending the final blob to a remote server for storage.
I am having an issue to generate a valid blob, after two audioTracks are appended. This is what I tried with the code below:
- with
appendAudioFromTrackI call a functiongetViewto get the view data. I use the view data to generate the blob of typeaudio/wav - I cloned code from the existing function
mergeAudioBuffersto create thegetViewfunction. - Once the blob is created, I invoke a newly added callback so I can fetch the blob from my code.
With these changes, I a blob is getting generated but doesn't have the needed audio (mostly silence). The appended audioData on the other hand plays ok.
Below are the changes.
appendAudioFromTrack: function(audioTrack, callback) {
var buffer1 = this.audioData;
var buffer2 = audioTrack.audioData;
if (!buffer1) {
if (buffer2) {
this.audioData = buffer2;
}
return;
} else if (!buffer2) {
if (buffer1) {
this.audioData = buffer1;
}
return;
}
var numberOfChannels = Math.min(buffer1.numberOfChannels, buffer2.numberOfChannels);
var bufferNew = this.context.createBuffer(numberOfChannels, (buffer1.length + buffer2.length), buffer1.sampleRate);
for (var i = 0; i < numberOfChannels; i++) {
var channel = bufferNew.getChannelData(i);
channel.set(buffer1.getChannelData(i), 0);
channel.set(buffer2.getChannelData(i), buffer1.length);
}
this.audioData = bufferNew;
var view = getView(this.audioData, this.context.sampleRate);
this.blob = new Blob([view], {
type: 'audio/wav'
});
callback();
},
The getView function, cloned with minor changes from mergeAudioBuffers
function getView(audioBuffer, sampleRate) {
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
let numberOfAudioChannels = 1;
// create wav file
var resultingBufferLength = 44 + audioBuffer.length * 2;
var buffer = new ArrayBuffer(resultingBufferLength);
var view = new DataView(buffer);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
view.setUint32(4, 44 + audioBuffer.length * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// stereo (2 channels)
view.setUint16(22, numberOfAudioChannels, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 2, true);
// block align (channel count * bytes per sample)
view.setUint16(32, numberOfAudioChannels * 2, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, audioBuffer.length * 2, true);
// write the PCM samples
var lng = audioBuffer.length;
console.log(" length is ", lng)
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++) {
view.setInt16(index, audioBuffer[i] * (0x7FFF * volume), true);
index += 2;
}
return view;
}
finally if this works, should this ideally be run in a webWoker?