export default class AudioManager{ static get BUFFER_SIZE(){ return 4096; } constructor(){ /* (1) Initialise our AudioNodes ---------------------------------------------------------*/ /* (1) Build Audio Context */ this.ctx = new (window.AudioContext || window.webkitAudioContext)(); /* (2) Create the MASTER gain */ this.master = this.ctx.createGain(); this.volume = this.ctx.createGain(); this.volume.gain.setValueAtTime(.65, 0); // input volume 65% /* (3) Initialise input (typically bound from recorder) */ this.input = null; /* (4) Initialize analyser (from input) + callback */ this.analyser = this.ctx.createAnalyser(); this.freq_drawer = null; this.wave_drawer = null; /* (5) Shortcut our output */ this.output = this.ctx.destination; /* (6) Connect MASTER gain to output */ this.master.connect(this.output); /* (2) Initialise processing attributes ---------------------------------------------------------*/ /* (1) Container for our recorder */ this.recorder = null; /* (2) Initialise filters */ this.filters = { voice_clarity: this.ctx.createBiquadFilter(), voice_fullness: this.ctx.createBiquadFilter(), voice_presence: this.ctx.createBiquadFilter(), voice_sss: this.ctx.createBiquadFilter() }; /* (3) Create network I/O controller (WebSocket) */ this.network = { out: this.ctx.createScriptProcessor(AudioManager.BUFFER_SIZE, 1, 1) }; /* (4) Initialise websocket */ this.ws = null; /* (5) Bind network controller to send() function */ this.network.out.onaudioprocess = this.send.bind(this); /* (6) Set up our filters' parameters */ this.setUpFilters(); /* (7) Initialise coordinator to manage received */ this.stack = []; /* (9) Debug data */ this.dbg = { interval: 10, // debug every ... second def: { packets_received: 0, packets_sent: 0, kB_received: 0, kB_sent: 0 }, data: { packets_received: 0, packets_sent: 0, kB_received: 0, kB_sent: 0 } }; this.debug = () => setInterval(function(){ console.group('debug'); for( let k in this.data ){ console.log(`${this.data[k]} ${k}`) this.data[k] = this.def[k] } console.groupEnd('debug'); }.bind(this.dbg), this.dbg.interval*1000); } /* (2) Setup filters * ---------------------------------------------------------*/ setUpFilters(){ /* (1) Setup filter parameters ---------------------------------------------------------*/ /* (1) Setup EQ#1 -> voice clarity */ this.filters.voice_clarity.type = 'peaking'; this.filters.voice_clarity.frequency.setValueAtTime(3000, this.ctx.currentTime); this.filters.voice_clarity.Q.setValueAtTime(.8, this.ctx.currentTime); this.filters.voice_clarity.gain.setValueAtTime(2, this.ctx.currentTime); /* (2) Setup EQ#2 -> voice fullness */ this.filters.voice_fullness.type = 'peaking'; this.filters.voice_fullness.frequency.setValueAtTime(200, this.ctx.currentTime); this.filters.voice_fullness.Q.setValueAtTime(.8, this.ctx.currentTime); this.filters.voice_fullness.gain.setValueAtTime(2, this.ctx.currentTime); /* (3) Setup EQ#3 -> reduce voice presence */ this.filters.voice_presence.type = 'peaking'; this.filters.voice_presence.frequency.setValueAtTime(5000, this.ctx.currentTime); this.filters.voice_presence.Q.setValueAtTime(.8, this.ctx.currentTime); this.filters.voice_presence.gain.setValueAtTime(-2, this.ctx.currentTime); /* (4) Setup EQ#3 -> reduce 'sss' metallic sound */ this.filters.voice_sss.type = 'peaking'; this.filters.voice_sss.frequency.setValueAtTime(7000, this.ctx.currentTime); this.filters.voice_sss.Q.setValueAtTime(.8, this.ctx.currentTime); this.filters.voice_sss.gain.setValueAtTime(-8, this.ctx.currentTime); /* (2) Connect filters ---------------------------------------------------------*/ /* (1) Connect clarity to fullness */ this.filters.voice_clarity.connect( this.filters.voice_fullness ); /* (2) Connect fullness to presence reduction */ this.filters.voice_fullness.connect( this.filters.voice_presence ); /* (3) Connect presence reduction to 'ss' removal */ this.filters.voice_presence.connect( this.filters.voice_sss ); } /* (3) Filter toggle * * @unlink Whether to unlink filters (directly bind to output) * ---------------------------------------------------------*/ linkFilters(unlink=false){ /* (1) Disconnect all by default */ this.input.disconnect(); /* (2) Get first filter */ let first_filter = this.filters.voice_clarity; let last_filter = this.filters.voice_sss; /* (3) If unlink -> connect directly to NETWORK output */ if( unlink === true ) return this.input.connect(this.network.out); /* (4) If linking -> connect input to volume */ this.input.connect(this.volume); /* (5) If linking -> connect volume to filter stack */ this.volume.connect(first_filter); /* (5) If linking -> connect stack end to network.out */ last_filter.connect(this.network.out); } /* (3) Binds an input stream * ---------------------------------------------------------*/ bindRecorderStream(_stream){ /* (1) Bind audio stream ---------------------------------------------------------*/ /* (1) bind our audio stream to our source */ console.log(_stream); this.input = this.ctx.createMediaStreamSource(_stream); /* (2) By default: link through filters to output ---------------------------------------------------------*/ /* (1) Link through filters */ this.linkFilters(); /* (2) Also link to analyser */ this.input.connect(this.analyser); gs.get.audio_conn = 2; // voice connected } /* (4) Send chunks (Float32Array) * ---------------------------------------------------------*/ send(_audioprocess){ /* (1) Manage analyser ---------------------------------------------------------*/ /* (1) Process only if 'freq_drawer' is set */ if( this.freq_drawer instanceof Function ){ // 1. Prepare array let freqArray = new Uint8Array(this.analyser.frequencyBinCount); // 2. Get frequency array this.analyser.getByteFrequencyData(freqArray); // 3. Send to callback this.freq_drawer(freqArray); } /* (2) Process only if 'wave_drawer' is set */ else if( this.wave_drawer instanceof Function ){ // 1. Prepare array let waveArray = new Uint8Array(this.analyser.fftSize); // 2. Get wave array this.analyser.getByteTimeDomainData(waveArray); // 3. Send to callback this.wave_drawer(waveArray); } /* (2) WebSocket send packet ---------------------------------------------------------*/ /* (1) Exit here if not connected */ if( this.ws === null || this.ws.readyState !== 1 ) return; /* (2) Initialize buffer (Float32Array) */ let buf32 = new Float32Array(AudioManager.BUFFER_SIZE); /* (3) Extract stream into buffer */ _audioprocess.inputBuffer.copyFromChannel(buf32, 0); /* (4) Convert for WS connection (Int16Array) */ let buf16 = this.f32toi16(buf32); /* (5) Send buffer through websocket */ this.ws.send(buf16); // DEBUG this.dbg.data.packets_sent++; this.dbg.data.kB_sent += buf16.length * 16. / 8 / 1024; } /* (5) Play received chunks (Int16Array) * ---------------------------------------------------------*/ receive(_buffer){ /* (1) Convert to Float32Array */ let buf32 = this.i16tof32(_buffer); /* (2) Create source node */ let source = this.ctx.createBufferSource(); /* (3) Create buffer and dump data */ let input_buffer = this.ctx.createBuffer(1, AudioManager.BUFFER_SIZE, this.ctx.sampleRate); input_buffer.getChannelData(0).set(buf32); /* (4) Bind buffer to source node */ source.buffer = input_buffer; /* (5) Create a dedicated *muted* gain */ let gain = this.ctx.createGain(); /* (6) source -> gain -> MASTER + play() */ source.connect(gain); gain.connect(this.master); /* (7) If not empty stack -> play stack */ if( this.stack.length > 0 ){ this.stack.shift().start(this.ctx.currentTime); return this.stack.push(source); } /* (8) If empty stack -> play + chain */ // chain stack source.onended = function(){ this.dbg.data.packets_received++; this.dbg.data.kB_received += _buffer.length * 16. / 8 / 1024; return this.stack.length > 0 && this.stack.shift().start(this.ctx.curentTime); }.bind(this); // start playing source.start(this.ctx.currentTime); } /* (6) Convert Float32Array to Int16Array * * @buf32 Input * * @return buf16 Converted output * ---------------------------------------------------------*/ f32toi16(buf32){ /* (1) Initialise output */ let buf16 = new Int16Array(buf32.length); /* (2) Initialize loop */ let i = 0, l = buf32.length; /* (3) Convert each value */ for( ; i < l ; i++ ) buf16[i] = (buf32[i] < 0) ? 0x8000 * buf32[i] : 0x7FFF * buf32[i]; return buf16; } /* (7) Convert Int16Array to Float32Array * * @buf16 Input * * @return buf32 Converted output * ---------------------------------------------------------*/ i16tof32(buf16){ /* (1) Initialise output */ let buf32 = new Float32Array(buf16.length); /* (2) Initialize loop */ let i = 0, l = buf16.length; /* (3) Convert each value */ for( ; i < l ; i++ ) buf32[i] = (buf16[i] >= 0x8000) ? -(0x10000 * buf16[i])/0x8000 : buf16[i] / 0x7FFF; return buf32; } /* (8) Connect websocket * * @address Websocket address * ---------------------------------------------------------*/ wsconnect(_addr){ /* (1) Create websocket connection */ this.ws = new WebSocket(_addr); gs.get.audio_conn = 0; // connecting /* (2) Manage websocket responses */ this.ws.onmessage = function(_msg){ if( !(_msg.data instanceof Blob) ) return console.warn('[NaB] Not A Blob'); let fr = new FileReader(); fr.onload = function(){ let buf16 = new Int16Array(fr.result); this.receive(buf16); }.bind(this); fr.readAsArrayBuffer(_msg.data); }.bind(this); /* (3) Debug */ this.ws.onopen = () => ( gs.get.audio_conn = 1 ); // listening this.ws.onclose = () => ( gs.get.audio_conn = null ); // disconnected } /* (9) Access microphone + launch all * ---------------------------------------------------------*/ launch(wsAddress='wss://ws.douscord.xdrm.io/audio/2'){ /* (1) Start websocket */ this.wsconnect(wsAddress); if( navigator.mediaDevices && navigator.mediaDevices.getUserMedia ){ navigator.mediaDevices.getUserMedia({ audio: true }) .then( stream => { this.recorder = new MediaRecorder(stream); this.bindRecorderStream(stream); this.recorder.onstart = () => console.warn('[audio] recording'); this.recorder.onstop = () => { this.recorder.stream.getTracks().map( t => t.stop() ); this.recorder = null; console.warn('[audio] stopped recording'); }; // start recording this.recorder.start(); }) .catch( e => console.warn('[audio] microphone permission issue', e) ); }else console.warn('[audio] microphone not supported'); } /* (10) Shut down microphone + kill all * ---------------------------------------------------------*/ kill(){ /* (1) Close websocket */ this.ws.close(); /* (2) Stop recording */ this.recorder.stop(); } /* (11) Play a POP notification * ---------------------------------------------------------*/ pop(){ /* (1) Base data */ let base_freq = 150; let mods = [0, 75, 75]; // freq modulations (from base_freq) let time_range = 0.05; // time between each modulation let start = this.ctx.currentTime + 0.1; /* (2) Build oscillator */ let osc = this.ctx.createOscillator(); osc.type = 'triangle'; /* (3) Create local gain to lower volume */ let local = this.ctx.createGain(); local.gain.setValueAtTime(0.3, 0); /* (4) Connect all nodes to output */ osc.connect(local); local.connect(this.master); /* (5) Bind frequencies over time */ for( let i in mods ) osc.frequency.setValueAtTime(base_freq+mods[i], start + i*time_range ); /* (6) Start playing */ osc.start( start ); /* (7) Set when to stop playing */ osc.stop( start + time_range*mods.length ); } }