Wavesurfer processing large audio file waveform rendering (2)

Wavesurfer processing large audio file waveform rendering (2)

In the previous article, we have obtained all the "complete" audio paragraphs, and then we will use these "complete" audio paragraphs to perform the last two steps of audio segment loading :

  1. Wavesurfer processes each segment of the buffer obtained before to generate each segment of waveform information
  2. When all the bytes of the wav resource are requested, and the buffers are processed by wavesurfer into waveform information, the waveform information of all the requested segments is spliced and handed over to wavesurfer for rendering. At the same time as the rendering, the waveform information file is generated and uploaded to the service Save it at the end, next time you get the same wav resource, you will directly get the waveform information file to avoid repeated decode

How to make wavesurfer have the ability to generate only waveform information

Wavesurfer did not intend to provide a method of generating peaks of waveform information, so a little skill is needed~

/**
 * Get the correct peaks for current wave view-port and render wave
 *
 * @private
 * @emits WaveSurfer#redraw
 */
drawBuffer() {
    const nominalWidth = Math.round(
        this.getDuration() *
            this.params.minPxPerSec *
            this.params.pixelRatio
    );
    const parentWidth = this.drawer.getWidth();
    let width = nominalWidth;
    //always start at 0 after zooming for scrolling : issue redraw left part
    let start = 0;
    let end = Math.max(start + parentWidth, width);
    //Fill container
    if (
        this.params.fillParent &&
        (!this.params.scrollParent || nominalWidth < parentWidth)
    ) {
        width = parentWidth;
        start = 0;
        end = width;
    }

    let peaks;
    if (this.params.partialRender) {
        /* something */
    } else {
        peaks = this.backend.getPeaks(width, start, end);
        this.drawer.drawPeaks(peaks, width, start, end);
    }
    this.fireEvent('redraw', peaks, width);
}
 

The above code comes from the wavesurfer github source code . You can see that wavesurfer get the current correct peaks information during the draweBuffer process. It controls the width, start, and end parameters of the getPeaks method according to the current rendering container width, minPxPerSec, pixelRatio, and resource duration. Then call the drawPeaks method of the drawer to draw.

Based on the above analysis and our needs are only peaks waveform information, so we need to borrow the above code to extend the wavesurfer method:

//  WaveSurfer  
//  buffer   peaks  
getPeaks(arraybuffer, callback) {
    this.backend.decodeArrayBuffer(
        arraybuffer,
        buffer => {
            if (!this.isDestroyed) {
                //https://github.com/katspaugh/wavesurfer.js/blob/832e114b7be6436458fc351a57699ba169d08676/src/wavesurfer.js#L1395-L1396
                //decodeArrayBuffer  
                this.backend.buffer = buffer;
                this.backend.setPeaks(null);
                const nominalWidth = Math.round(
                    this.getDuration() *
                        this.params.minPxPerSec *
                        this.params.pixelRatio
                );
                const parentWidth = this.drawer.getWidth();
                let width = nominalWidth;
                let start = 0;
                //  end   width
                //  let end = Math.max(start + parentWidth, width)  
                // ( 2M ) 
                //  this.backend.getPeaks   0 
                // canvas 
                // 0 
                let end = width;

                if (
                    this.params.fillParent
                    && (!this.params.scrollParent || nominalWidth < parentWidth)
                ) {
                    width = parentWidth;
                }

                const peaks = this.backend.getPeaks(width, start, end);
                //  peaks  
                callback(peaks);
                //  arraybuffer  
                this.arraybuffer = null;
                this.backend.buffer = null;
            }
        },
        () => this.fireEvent('error', 'Error decoding audiobuffer')
    );
}

// canvas
loadPeaks(peaks) {
    this.backend.buffer = null;
    this.backend.setPeaks(peaks);
    this.drawBuffer();
    this.fireEvent('waveform-ready');
    this.isReady = true;
}
 

After enhancing the capabilities of wavesurfer, it needs to be called in the business~~

Invoke extended capabilities, integrate waveform information, render and upload and save

import _ from 'lodash';
import pako from 'pako'; //JS 
import WaveSurfer from 'wavesurfer.js';
import requestWav from 'requestWav';

const waveSurfer = null;
const peaksList = [];
const texture = null;

function initWaveSurfer() {
    const options = {
        container: '#waveform',
        backend: 'MediaElement',
        fillParent: false, // 
        height: 200,
        barHeight: 10,
        normalize: true,
        minPxPerSec: 100,
    }
    waveSurfer = WaveSurfer.create(options);
    renderWaveSurfer();
}

function renderWaveSurfer() {
    waveSurfer.load(source, [], 'none');
    if (!texture) {
        decodePeaks();
    }
}

function decodePeaks() {
    const that = this;
    requestWav.loadBlocks(' Url', {
        loadRangeSucess(data, index) {
            // 
            peaksList[index - 1] = [];
            //  waveSurfer   peaks
            waveSurfer.getPeaks(data, (peaks) => {
                peaksList[index - 1] = peaks;
            });
        },
        loadAllSucess() {
            // 
            let texture = _.flatten(peaksList); //peaksList  
            if (!texture) {
                return;
            }
            //  ( )
            waveSurfer.texture = pako.deflate(JSON.stringify(texture), { level: 9, to: 'string' });
            // 
            //const texture = pako.deflate(JSON.stringify(waveSurfer.texture), { level: 9, to: 'string' });

            // ,  
            texture = null;
            //  FormData
            const peaksFile = new FormData();
            peaksFile.append('sourceUrl',  URL );
            //  Blob
            const blob = new Blob([waveSurfer.texture], { type: 'application/json' });
            // 
            peaksFile.append('sourcePeaks', blob, 'sourcePeaks');
            axios({
                method: 'post',
                url: ' ',
                data: peaksFile,
                headers: {
                    'Content-Type': 'multipart/form-data',
                },
                timeout: 1000000, // 
            });
        },
    });
}
 

So far the 5 steps are all completed, and the only thing left is to judge when the same resource is requested for the second time, if the waveform information of the current wav resource has been stored, there is no need to perform the operation of decode to generate the waveform again.