discord-client/parcel/lib/audio-manager.js

548 lines
14 KiB
JavaScript
Raw Permalink Normal View History

2018-04-05 22:54:13 +00:00
export default class AudioManager{
2018-04-09 21:44:41 +00:00
static get BUFFER_SIZE(){ return 4096; }
2018-04-05 22:54:13 +00:00
constructor(){
/* (1) Initialise our AudioNodes
---------------------------------------------------------*/
2018-04-05 22:54:13 +00:00
/* (1) Build Audio Context */
this.ctx = new (window.AudioContext || window.webkitAudioContext)();
/* (2) Create the MASTER gain */
this.master = this.ctx.createGain();
this.volume = this.ctx.createGain();
this.peaks = { low: 0, high: 0 };
this.volume_value = 1;
2018-04-05 22:54:13 +00:00
/* (3) Initialise input (typically bound from recorder) */
2018-04-05 22:54:13 +00:00
this.input = null;
/* (4) Initialize analyser (from input) + callback */
this.analyser = this.ctx.createAnalyser();
this.freq_drawer = null;
this.wave_drawer = null;
/* (5) Shortcut our output */
this.output = this.ctx.destination;
/* (6) Connect MASTER gain to output */
this.master.connect(this.output);
/* (2) Initialise processing attributes
---------------------------------------------------------*/
/* (1) Container for our recorder */
this.recorder = null;
/* (2) Initialise filters */
this.filters = {
voice_clarity: this.ctx.createBiquadFilter(),
voice_fullness: this.ctx.createBiquadFilter(),
voice_presence: this.ctx.createBiquadFilter(),
voice_sss: this.ctx.createBiquadFilter()
};
2018-04-06 10:57:22 +00:00
/* (3) Create network I/O controller (WebSocket) */
2018-04-05 22:54:13 +00:00
this.network = {
out: this.ctx.createScriptProcessor(AudioManager.BUFFER_SIZE, 1, 1)
2018-04-05 22:54:13 +00:00
};
2018-04-06 10:57:22 +00:00
/* (4) Initialise websocket */
this.ws = null;
/* (5) Bind network controller to send() function */
2018-04-05 22:54:13 +00:00
this.network.out.onaudioprocess = this.send.bind(this);
2018-04-06 10:57:22 +00:00
/* (6) Set up our filters' parameters */
this.setUpFilters();
/* (7) Initialise coordinator to manage received */
this.stack = [];
this.stack_size = 2;
this.fade_in = 0.1;
this.fade_out = 0.1;
/* (9) Debug data */
this.dbg = {
interval: 10, // debug every ... second
def: {
packets_received: 0,
packets_sent: 0,
kB_received: 0,
kB_sent: 0
},
data: {
packets_received: 0,
packets_sent: 0,
kB_received: 0,
kB_sent: 0
}
2018-04-05 22:54:13 +00:00
};
this.debug = () => setInterval(function(){
2018-04-05 22:54:13 +00:00
console.group('debug');
for( let k in this.data ){
console.log(`${this.data[k]} ${k}`)
this.data[k] = this.def[k]
}
2018-04-05 22:54:13 +00:00
console.groupEnd('debug');
}.bind(this.dbg), this.dbg.interval*1000);
2018-04-05 22:54:13 +00:00
}
/* (2) Setup filters
2018-04-05 22:54:13 +00:00
*
---------------------------------------------------------*/
setUpFilters(){
2018-04-05 22:54:13 +00:00
/* (1) Setup filter parameters
---------------------------------------------------------*/
/* (1) Setup EQ#1 -> voice clarity */
this.filters.voice_clarity.type = 'peaking';
this.filters.voice_clarity.frequency.setValueAtTime(3000, this.ctx.currentTime);
this.filters.voice_clarity.Q.setValueAtTime(.8, this.ctx.currentTime);
this.filters.voice_clarity.gain.setValueAtTime(2, this.ctx.currentTime);
/* (2) Setup EQ#2 -> voice fullness */
this.filters.voice_fullness.type = 'peaking';
this.filters.voice_fullness.frequency.setValueAtTime(200, this.ctx.currentTime);
this.filters.voice_fullness.Q.setValueAtTime(.8, this.ctx.currentTime);
this.filters.voice_fullness.gain.setValueAtTime(2, this.ctx.currentTime);
/* (3) Setup EQ#3 -> reduce voice presence */
this.filters.voice_presence.type = 'peaking';
this.filters.voice_presence.frequency.setValueAtTime(5000, this.ctx.currentTime);
this.filters.voice_presence.Q.setValueAtTime(.8, this.ctx.currentTime);
this.filters.voice_presence.gain.setValueAtTime(-2, this.ctx.currentTime);
/* (4) Setup EQ#3 -> reduce 'sss' metallic sound */
this.filters.voice_sss.type = 'peaking';
this.filters.voice_sss.frequency.setValueAtTime(7000, this.ctx.currentTime);
this.filters.voice_sss.Q.setValueAtTime(.8, this.ctx.currentTime);
this.filters.voice_sss.gain.setValueAtTime(-8, this.ctx.currentTime);
/* (2) Connect filters
---------------------------------------------------------*/
/* (1) Connect clarity to fullness */
this.filters.voice_clarity.connect( this.filters.voice_fullness );
2018-04-05 22:54:13 +00:00
/* (2) Connect fullness to presence reduction */
this.filters.voice_fullness.connect( this.filters.voice_presence );
2018-04-05 22:54:13 +00:00
/* (3) Connect presence reduction to 'ss' removal */
this.filters.voice_presence.connect( this.filters.voice_sss );
2018-04-05 22:54:13 +00:00
}
2018-04-05 22:54:13 +00:00
/* (3) Filter toggle
*
* @unlink<boolean> Whether to unlink filters (directly bind to output)
*
---------------------------------------------------------*/
linkFilters(unlink=false){
2018-04-05 22:54:13 +00:00
/* (1) Disconnect all by default */
this.input.disconnect();
/* (2) Also link to analyser */
this.input.connect(this.analyser);
/*Chrome fix*/this.network.out.connect(this.output);
/* (3) Get first filter */
let first_filter = this.filters.voice_clarity;
2018-04-06 11:11:52 +00:00
let last_filter = this.filters.voice_sss;
/* (4) If unlink -> connect directly to NETWORK output */
if( unlink === true )
2018-04-06 10:57:22 +00:00
return this.input.connect(this.network.out);
/* (5) If linking -> connect input to volume */
this.input.connect(this.volume);
/* (6) If linking -> connect volume to filter stack */
this.volume.connect(first_filter);
2018-04-05 22:54:13 +00:00
/* (7) If linking -> connect stack end to network.out */
2018-04-06 11:11:52 +00:00
last_filter.connect(this.network.out);
2018-04-05 22:54:13 +00:00
}
/* (3) Binds an input stream
2018-04-05 22:54:13 +00:00
*
---------------------------------------------------------*/
bindRecorderStream(_stream){
/* (1) Bind audio stream
---------------------------------------------------------*/
/* (1) bind our audio stream to our source */
2018-04-05 22:54:13 +00:00
this.input = this.ctx.createMediaStreamSource(_stream);
/* (2) By default: link through filters to output
---------------------------------------------------------*/
/* (1) Link through filters */
this.linkFilters();
gs.get.audio_conn = 2; // voice connected
2018-04-05 22:54:13 +00:00
}
/* (4) Send chunks (Float32Array)
2018-04-05 22:54:13 +00:00
*
---------------------------------------------------------*/
send(_audioprocess){
/* Exit here if not connected */
if( this.ws === null || this.ws.readyState !== 1 )
return;
/* (1) WebSocket send packet
---------------------------------------------------------*/
/* (1) Initialize buffer (Float32Array) */
let buf32 = new Float32Array(AudioManager.BUFFER_SIZE);
/* (2) Extract stream into buffer */
_audioprocess.inputBuffer.copyFromChannel(buf32, 0);
/* (4) Convert for WS connection (Int16Array) */
this.peaks.low = 0;
this.peaks.high = 0;
let buf16 = this.f32toi16(buf32);
/* (5) Send buffer through websocket */
this.ws.send(buf16);
/* (5) Adapt microphone volume if had peaks */
if( this.peaks.high > .01 ) // 30% saturation -> decrease
this.volume_value *= .8;
else if( this.peaks.low > .99 && this.volume_value*1.01 < 1 ) // 90% too low volume + less than 30% saturation -> increase
this.volume_value *= 1.01;
// apply new volume
this.volume.gain.setValueAtTime(this.volume_value, this.ctx.currentTime);
/* (2) WebSocket buffer stack read
---------------------------------------------------------*/
setTimeout(function(){
/* (1) Pop too large stack */
this.stack.length > this.stack_size && this.stack.pop();
/* (2) Read input buffer stack */
if( this.stack.length > 0 ){
// 1. extract our source
let source_node = this.stack.shift();
// 2. Play source node
source_node.start();
}
}.bind(this), 0);
/* (3) Manage analyser
---------------------------------------------------------*/
/* (1) Process only if 'freq_drawer' is set */
if( this.freq_drawer instanceof Function ){
2018-04-05 22:54:13 +00:00
// 1. Prepare array
let freqArray = new Uint8Array(this.analyser.frequencyBinCount);
// 2. Get frequency array
this.analyser.getByteFrequencyData(freqArray);
// 3. Send to callback
setTimeout(this.freq_drawer.bind(this,freqArray), 0);
}
/* (2) Process only if 'wave_drawer' is set */
else if( this.wave_drawer instanceof Function ){
// 1. Prepare array
let waveArray = new Uint8Array(this.analyser.fftSize);
// 2. Get wave array
this.analyser.getByteTimeDomainData(waveArray);
2018-04-05 22:54:13 +00:00
// 3. Send to callback
setTimeout(this.wave_drawer.bind(this,waveArray), 0);
}
// DEBUG
this.dbg.data.packets_sent++;
this.dbg.data.kB_sent += buf16.length * 16. / 8 / 1024;
2018-04-05 22:54:13 +00:00
}
/* (5) Play received chunks (Int16Array)
*
---------------------------------------------------------*/
2018-04-05 22:54:13 +00:00
receive(_buffer){
/* (1) Convert to Float32Array */
let buf32 = this.i16tof32(_buffer);
/* (2) Create source node */
let source = this.ctx.createBufferSource();
2018-04-05 22:54:13 +00:00
/* (3) Create buffer and dump data */
let input_buffer = this.ctx.createBuffer(1, AudioManager.BUFFER_SIZE, this.ctx.sampleRate);
input_buffer.getChannelData(0).set(buf32);
/* (4) Bind buffer to source node */
source.buffer = input_buffer;
/* (5) Create a dedicated *muted* gain */
let gain = this.ctx.createGain();
/* (6) source -> gain -> MASTER + play() */
source.connect(gain);
gain.connect(this.master);
/* (7) Push in buffer stack */
this.stack.push(source);
2018-04-05 22:54:13 +00:00
}
/* (6) Convert Float32Array to Int16Array
2018-04-05 22:54:13 +00:00
*
* @buf32<Float32Array> Input
*
* @return buf16<Int16Array> Converted output
*
---------------------------------------------------------*/
f32toi16(buf32){
/* (1) Initialise output */
let buf16 = new Int16Array(buf32.length);
/* (2) Initialize loop */
let i = 0, l = buf32.length;
/* (3) Convert each value */
for( ; i < l ; i++ ){
2018-04-05 22:54:13 +00:00
buf16[i] = (buf32[i] < 0) ? 0x8000 * buf32[i] : 0x7FFF * buf32[i];
( buf32[i] > 0.9 ) && ( this.peaks.high++ );
( buf32[i] < 0.1 ) && ( this.peaks.low++ );
}
/* (4) Report peaks in percentage */
this.peaks.high /= l;
this.peaks.low /= l;
2018-04-05 22:54:13 +00:00
return buf16;
}
/* (7) Convert Int16Array to Float32Array
2018-04-05 22:54:13 +00:00
*
* @buf16<Int16Array> Input
*
* @return buf32<Float32Array> Converted output
*
---------------------------------------------------------*/
i16tof32(buf16){
/* (1) Initialise output */
let buf32 = new Float32Array(buf16.length);
/* (2) Initialize loop */
let i = 0, l = buf16.length;
/* (3) Convert each value */
for( ; i < l ; i++ )
buf32[i] = (buf16[i] >= 0x8000) ? -(0x10000 * buf16[i])/0x8000 : buf16[i] / 0x7FFF;
return buf32;
}
/* (8) Connect websocket
*
* @address<String> Websocket address
*
---------------------------------------------------------*/
wsconnect(_addr){
/* (1) Create websocket connection */
this.ws = new WebSocket(_addr);
gs.get.audio_conn = 0; // connecting
/* (2) Manage websocket responses */
this.ws.onmessage = function(_msg){
if( !(_msg.data instanceof Blob) )
return console.warn('[NaB] Not A Blob');
let fr = new FileReader();
fr.onload = function(){
let buf16 = new Int16Array(fr.result);
this.receive(buf16);
}.bind(this);
fr.readAsArrayBuffer(_msg.data);
}.bind(this);
/* (3) Debug */
this.ws.onopen = () => (gs.get.audio_conn !== 2 && (gs.get.audio_conn = 1)); // listening
this.ws.onclose = () => ( gs.get.audio_conn = null ); // disconnected
}
/* (9) Access microphone + launch all
2018-04-05 22:54:13 +00:00
*
---------------------------------------------------------*/
launch(room_id=0){
/* (1) Start websocket */
this.wsconnect(`wss://ws.douscord.xdrm.io/audio/${room_id}`);
2018-04-05 22:54:13 +00:00
/* (2) Set our streaming binding function */
let streaming_binding = function(stream){
this.recorder = new MediaRecorder(stream);
this.recorder.onstart = function(){
this.bindRecorderStream(stream);
console.warn('[audio] recording');
}.bind(this);
this.recorder.onstop = () => {
this.recorder.stream.getTracks().map( t => t.stop() );
this.recorder = null;
console.warn('[audio] stopped recording');
};
// start recording
this.recorder.start();
}.bind(this);
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
/* (3) If navigator.mediaDevices.getUserMedia */
2018-04-05 22:54:13 +00:00
if( navigator.mediaDevices && navigator.mediaDevices.getUserMedia ){
console.log('[audio] using "navigator.mediaDevices.getUserMedia"')
return navigator.mediaDevices.getUserMedia({ audio: true })
.then(streaming_binding)
.catch((e) => console.warn('[audio] microphone recorder issue', e));
}
/* (4) If old version */
if( navigator.getUserMedia ){
2018-04-05 22:54:13 +00:00
console.log('[audio] using "navigator.getUserMedia"')
2018-04-05 22:54:13 +00:00
return navigator.getUserMedia({ audio: true },
streaming_binding,
(e) => console.warn('[audio] microphone recorder issue', e));
2018-04-05 22:54:13 +00:00
}
2018-04-05 22:54:13 +00:00
console.warn('[audio] recorder not supported');
}
/* (10) Shut down microphone + kill all
*
---------------------------------------------------------*/
kill(){
/* (1) Close websocket */
this.ws && this.ws.close();
/* (2) Stop recording */
this.recorder && this.recorder.stop();
2018-04-05 22:54:13 +00:00
}
/* (11) Play a POP notification
*
---------------------------------------------------------*/
pop(){
/* (1) Base data */
let base_freq = 150;
let mods = [0, 75, 75]; // freq modulations (from base_freq)
let time_range = 0.05; // time between each modulation
let start = this.ctx.currentTime + 0.1;
/* (2) Build oscillator */
let osc = this.ctx.createOscillator();
osc.type = 'triangle';
/* (3) Create local gain to lower volume */
let local = this.ctx.createGain();
local.gain.setValueAtTime(0.3, 0);
/* (4) Connect all nodes to output */
osc.connect(local);
local.connect(this.master);
/* (5) Bind frequencies over time */
for( let i in mods )
osc.frequency.setValueAtTime(base_freq+mods[i], start + i*time_range );
/* (6) Start playing */
osc.start( start );
/* (7) Set when to stop playing */
osc.stop( start + time_range*mods.length );
}
2018-04-05 22:54:13 +00:00
}