export default class AudioManager{ static get BUFFER_SIZE(){ return 4096; } constructor(){ /* (1) Initialise our AudioNodes ---------------------------------------------------------*/ /* (1) Build Audio Context */ this.ctx = new (window.AudioContext || window.webkitAudioContext)(); /* (2) Create the MASTER gain */ this.master = this.ctx.createGain(); this.volume = this.ctx.createGain(); this.peaks = { low: 0, high: 0 }; this.volume_value = 1; /* (3) Initialise input (typically bound from recorder) */ this.input = null; /* (4) Initialize analyser (from input) + callback */ this.analyser = this.ctx.createAnalyser(); this.freq_drawer = null; this.wave_drawer = null; /* (5) Shortcut our output */ this.output = this.ctx.destination; /* (6) Connect MASTER gain to output */ this.master.connect(this.output); /* (2) Initialise processing attributes ---------------------------------------------------------*/ /* (1) Container for our recorder */ this.recorder = null; /* (2) Initialise filters */ this.filters = { voice_clarity: this.ctx.createBiquadFilter(), voice_fullness: this.ctx.createBiquadFilter(), voice_presence: this.ctx.createBiquadFilter(), voice_sss: this.ctx.createBiquadFilter() }; /* (3) Create network I/O controller (WebSocket) */ this.network = { out: this.ctx.createScriptProcessor(AudioManager.BUFFER_SIZE, 1, 1) }; /* (4) Initialise websocket */ this.ws = null; /* (5) Bind network controller to send() function */ this.network.out.onaudioprocess = this.send.bind(this); /* (6) Set up our filters' parameters */ this.setUpFilters(); /* (7) Initialise coordinator to manage received */ this.stack = []; this.stack_size = 2; this.fade_in = 0.1; this.fade_out = 0.1; /* (9) Debug data */ this.dbg = { interval: 10, // debug every ... second def: { packets_received: 0, packets_sent: 0, kB_received: 0, kB_sent: 0 }, data: { packets_received: 0, packets_sent: 0, kB_received: 0, kB_sent: 0 } }; this.debug = () => setInterval(function(){ console.group('debug'); for( let k in this.data ){ console.log(`${this.data[k]} ${k}`) this.data[k] = this.def[k] } console.groupEnd('debug'); }.bind(this.dbg), this.dbg.interval*1000); } /* (2) Setup filters * ---------------------------------------------------------*/ setUpFilters(){ /* (1) Setup filter parameters ---------------------------------------------------------*/ /* (1) Setup EQ#1 -> voice clarity */ this.filters.voice_clarity.type = 'peaking'; this.filters.voice_clarity.frequency.setValueAtTime(3000, this.ctx.currentTime); this.filters.voice_clarity.Q.setValueAtTime(.8, this.ctx.currentTime); this.filters.voice_clarity.gain.setValueAtTime(2, this.ctx.currentTime); /* (2) Setup EQ#2 -> voice fullness */ this.filters.voice_fullness.type = 'peaking'; this.filters.voice_fullness.frequency.setValueAtTime(200, this.ctx.currentTime); this.filters.voice_fullness.Q.setValueAtTime(.8, this.ctx.currentTime); this.filters.voice_fullness.gain.setValueAtTime(2, this.ctx.currentTime); /* (3) Setup EQ#3 -> reduce voice presence */ this.filters.voice_presence.type = 'peaking'; this.filters.voice_presence.frequency.setValueAtTime(5000, this.ctx.currentTime); this.filters.voice_presence.Q.setValueAtTime(.8, this.ctx.currentTime); this.filters.voice_presence.gain.setValueAtTime(-2, this.ctx.currentTime); /* (4) Setup EQ#3 -> reduce 'sss' metallic sound */ this.filters.voice_sss.type = 'peaking'; this.filters.voice_sss.frequency.setValueAtTime(7000, this.ctx.currentTime); this.filters.voice_sss.Q.setValueAtTime(.8, this.ctx.currentTime); this.filters.voice_sss.gain.setValueAtTime(-8, this.ctx.currentTime); /* (2) Connect filters ---------------------------------------------------------*/ /* (1) Connect clarity to fullness */ this.filters.voice_clarity.connect( this.filters.voice_fullness ); /* (2) Connect fullness to presence reduction */ this.filters.voice_fullness.connect( this.filters.voice_presence ); /* (3) Connect presence reduction to 'ss' removal */ this.filters.voice_presence.connect( this.filters.voice_sss ); } /* (3) Filter toggle * * @unlink Whether to unlink filters (directly bind to output) * ---------------------------------------------------------*/ linkFilters(unlink=false){ /* (1) Disconnect all by default */ this.input.disconnect(); /* (2) Also link to analyser */ this.input.connect(this.analyser); /*Chrome fix*/this.network.out.connect(this.output); /* (3) Get first filter */ let first_filter = this.filters.voice_clarity; let last_filter = this.filters.voice_sss; /* (4) If unlink -> connect directly to NETWORK output */ if( unlink === true ) return this.input.connect(this.network.out); /* (5) If linking -> connect input to volume */ this.input.connect(this.volume); /* (6) If linking -> connect volume to filter stack */ this.volume.connect(first_filter); /* (7) If linking -> connect stack end to network.out */ last_filter.connect(this.network.out); } /* (3) Binds an input stream * ---------------------------------------------------------*/ bindRecorderStream(_stream){ /* (1) Bind audio stream ---------------------------------------------------------*/ /* (1) bind our audio stream to our source */ this.input = this.ctx.createMediaStreamSource(_stream); /* (2) By default: link through filters to output ---------------------------------------------------------*/ /* (1) Link through filters */ this.linkFilters(); gs.get.audio_conn = 2; // voice connected } /* (4) Send chunks (Float32Array) * ---------------------------------------------------------*/ send(_audioprocess){ /* Exit here if not connected */ if( this.ws === null || this.ws.readyState !== 1 ) return; /* (1) WebSocket send packet ---------------------------------------------------------*/ /* (1) Initialize buffer (Float32Array) */ let buf32 = new Float32Array(AudioManager.BUFFER_SIZE); /* (2) Extract stream into buffer */ _audioprocess.inputBuffer.copyFromChannel(buf32, 0); /* (4) Convert for WS connection (Int16Array) */ this.peaks.low = 0; this.peaks.high = 0; let buf16 = this.f32toi16(buf32); /* (5) Send buffer through websocket */ this.ws.send(buf16); /* (5) Adapt microphone volume if had peaks */ if( this.peaks.high > .01 ) // 30% saturation -> decrease this.volume_value *= .8; else if( this.peaks.low > .99 && this.volume_value*1.01 < 1 ) // 90% too low volume + less than 30% saturation -> increase this.volume_value *= 1.01; // apply new volume this.volume.gain.setValueAtTime(this.volume_value, this.ctx.currentTime); /* (2) WebSocket buffer stack read ---------------------------------------------------------*/ setTimeout(function(){ /* (1) Pop too large stack */ this.stack.length > this.stack_size && this.stack.pop(); /* (2) Read input buffer stack */ if( this.stack.length > 0 ){ // 1. extract our source let source_node = this.stack.shift(); // 2. Play source node source_node.start(); } }.bind(this), 0); /* (3) Manage analyser ---------------------------------------------------------*/ /* (1) Process only if 'freq_drawer' is set */ if( this.freq_drawer instanceof Function ){ // 1. Prepare array let freqArray = new Uint8Array(this.analyser.frequencyBinCount); // 2. Get frequency array this.analyser.getByteFrequencyData(freqArray); // 3. Send to callback setTimeout(this.freq_drawer.bind(this,freqArray), 0); } /* (2) Process only if 'wave_drawer' is set */ else if( this.wave_drawer instanceof Function ){ // 1. Prepare array let waveArray = new Uint8Array(this.analyser.fftSize); // 2. Get wave array this.analyser.getByteTimeDomainData(waveArray); // 3. Send to callback setTimeout(this.wave_drawer.bind(this,waveArray), 0); } // DEBUG this.dbg.data.packets_sent++; this.dbg.data.kB_sent += buf16.length * 16. / 8 / 1024; } /* (5) Play received chunks (Int16Array) * ---------------------------------------------------------*/ receive(_buffer){ /* (1) Convert to Float32Array */ let buf32 = this.i16tof32(_buffer); /* (2) Create source node */ let source = this.ctx.createBufferSource(); /* (3) Create buffer and dump data */ let input_buffer = this.ctx.createBuffer(1, AudioManager.BUFFER_SIZE, this.ctx.sampleRate); input_buffer.getChannelData(0).set(buf32); /* (4) Bind buffer to source node */ source.buffer = input_buffer; /* (5) Create a dedicated *muted* gain */ let gain = this.ctx.createGain(); /* (6) source -> gain -> MASTER + play() */ source.connect(gain); gain.connect(this.master); /* (7) Push in buffer stack */ this.stack.push(source); } /* (6) Convert Float32Array to Int16Array * * @buf32 Input * * @return buf16 Converted output * ---------------------------------------------------------*/ f32toi16(buf32){ /* (1) Initialise output */ let buf16 = new Int16Array(buf32.length); /* (2) Initialize loop */ let i = 0, l = buf32.length; /* (3) Convert each value */ for( ; i < l ; i++ ){ buf16[i] = (buf32[i] < 0) ? 0x8000 * buf32[i] : 0x7FFF * buf32[i]; ( buf32[i] > 0.9 ) && ( this.peaks.high++ ); ( buf32[i] < 0.1 ) && ( this.peaks.low++ ); } /* (4) Report peaks in percentage */ this.peaks.high /= l; this.peaks.low /= l; return buf16; } /* (7) Convert Int16Array to Float32Array * * @buf16 Input * * @return buf32 Converted output * ---------------------------------------------------------*/ i16tof32(buf16){ /* (1) Initialise output */ let buf32 = new Float32Array(buf16.length); /* (2) Initialize loop */ let i = 0, l = buf16.length; /* (3) Convert each value */ for( ; i < l ; i++ ) buf32[i] = (buf16[i] >= 0x8000) ? -(0x10000 * buf16[i])/0x8000 : buf16[i] / 0x7FFF; return buf32; } /* (8) Connect websocket * * @address Websocket address * ---------------------------------------------------------*/ wsconnect(_addr){ /* (1) Create websocket connection */ this.ws = new WebSocket(_addr); gs.get.audio_conn = 0; // connecting /* (2) Manage websocket responses */ this.ws.onmessage = function(_msg){ if( !(_msg.data instanceof Blob) ) return console.warn('[NaB] Not A Blob'); let fr = new FileReader(); fr.onload = function(){ let buf16 = new Int16Array(fr.result); this.receive(buf16); }.bind(this); fr.readAsArrayBuffer(_msg.data); }.bind(this); /* (3) Debug */ this.ws.onopen = () => (gs.get.audio_conn !== 2 && (gs.get.audio_conn = 1)); // listening this.ws.onclose = () => ( gs.get.audio_conn = null ); // disconnected } /* (9) Access microphone + launch all * ---------------------------------------------------------*/ launch(room_id=0){ /* (1) Start websocket */ this.wsconnect(`wss://ws.douscord.xdrm.io/audio/${room_id}`); /* (2) Set our streaming binding function */ let streaming_binding = function(stream){ this.recorder = new MediaRecorder(stream); this.recorder.onstart = function(){ this.bindRecorderStream(stream); console.warn('[audio] recording'); }.bind(this); this.recorder.onstop = () => { this.recorder.stream.getTracks().map( t => t.stop() ); this.recorder = null; console.warn('[audio] stopped recording'); }; // start recording this.recorder.start(); }.bind(this); navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia; /* (3) If navigator.mediaDevices.getUserMedia */ if( navigator.mediaDevices && navigator.mediaDevices.getUserMedia ){ console.log('[audio] using "navigator.mediaDevices.getUserMedia"') return navigator.mediaDevices.getUserMedia({ audio: true }) .then(streaming_binding) .catch((e) => console.warn('[audio] microphone recorder issue', e)); } /* (4) If old version */ if( navigator.getUserMedia ){ console.log('[audio] using "navigator.getUserMedia"') return navigator.getUserMedia({ audio: true }, streaming_binding, (e) => console.warn('[audio] microphone recorder issue', e)); } console.warn('[audio] recorder not supported'); } /* (10) Shut down microphone + kill all * ---------------------------------------------------------*/ kill(){ /* (1) Close websocket */ this.ws && this.ws.close(); /* (2) Stop recording */ this.recorder && this.recorder.stop(); } /* (11) Play a POP notification * ---------------------------------------------------------*/ pop(){ /* (1) Base data */ let base_freq = 150; let mods = [0, 75, 75]; // freq modulations (from base_freq) let time_range = 0.05; // time between each modulation let start = this.ctx.currentTime + 0.1; /* (2) Build oscillator */ let osc = this.ctx.createOscillator(); osc.type = 'triangle'; /* (3) Create local gain to lower volume */ let local = this.ctx.createGain(); local.gain.setValueAtTime(0.3, 0); /* (4) Connect all nodes to output */ osc.connect(local); local.connect(this.master); /* (5) Bind frequencies over time */ for( let i in mods ) osc.frequency.setValueAtTime(base_freq+mods[i], start + i*time_range ); /* (6) Start playing */ osc.start( start ); /* (7) Set when to stop playing */ osc.stop( start + time_range*mods.length ); } }