On my webpage, I have an audio file inside of an tag.
<!DOCTYPE html>
<html>
<audio src="myTrack.mp3" controls preload="auto"></audio>
</html>
I want to chop up this file stored in an tag into multiple 10 second audio files that I could then insert into the webpage as their own audio files in seperate <audio> tags.
Is it possible to do this in javascript?
Yes, of course this is possible! :)
ArrayBuffer and decode it with AudioContextrevokeObjectURL()).One drawback is of course that you would have to load the entire file into memory before processing it.
Hopefully the file I'm using for the demo will be available through the current CDN that is used to allow CORS usage (I own the copyright, feel free to use it for testing, but only testing!! :) ). The loading and decoding can take some time depending on your system and connection, so please be patient...
Ideally you should use an asynchronous approach splitting the buffers, but the demo targets only the needed steps to make the buffer segments available as new file fragments.
Also note that I did not take into consideration the last segment to be shorter than the others (I use floor, you should use ceil for the segment count and cut the last block length short). I'll leave that as an exercise for the reader...
var actx = new(AudioContext || webkitAudioContext)(),
url = "//dl.dropboxusercontent.com/s/7ttdz6xsoaqbzdl/war_demo.mp3";
// STEP 1: Load audio file using AJAX ----------------------------------
fetch(url).then(function(resp) {return resp.arrayBuffer()}).then(decode);
// STEP 2: Decode the audio file ---------------------------------------
function decode(buffer) {
actx.decodeAudioData(buffer, split);
}
// STEP 3: Split the buffer --------------------------------------------
function split(abuffer) {
// calc number of segments and segment length
var channels = abuffer.numberOfChannels,
duration = abuffer.duration,
rate = abuffer.sampleRate,
segmentLen = 10,
count = Math.floor(duration / segmentLen),
offset = 0,
block = 10 * rate;
while(count--) {
var url = URL.createObjectURL(bufferToWave(abuffer, offset, block));
var audio = new Audio(url);
audio.controls = true;
audio.volume = 0.75;
document.body.appendChild(audio);
offset += block;
}
}
// Convert a audio-buffer segment to a Blob using WAVE representation
function bufferToWave(abuffer, offset, len) {
var numOfChan = abuffer.numberOfChannels,
length = len * numOfChan * 2 + 44,
buffer = new ArrayBuffer(length),
view = new DataView(buffer),
channels = [], i, sample,
pos = 0;
// write WAVE header
setUint32(0x46464952); // "RIFF"
setUint32(length - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(abuffer.sampleRate);
setUint32(abuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit (hardcoded in this demo)
setUint32(0x61746164); // "data" - chunk
setUint32(length - pos - 4); // chunk length
// write interleaved data
for(i = 0; i < abuffer.numberOfChannels; i++)
channels.push(abuffer.getChannelData(i));
while(pos < length) {
for(i = 0; i < numOfChan; i++) { // interleave channels
sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767)|0; // scale to 16-bit signed int
view.setInt16(pos, sample, true); // update data chunk
pos += 2;
}
offset++ // next source sample
}
// create Blob
return new Blob([buffer], {type: "audio/wav"});
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
}
audio {display:block;margin-bottom:1px}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With