Select Git revision
-
RANWEZ Pierre authoredRANWEZ Pierre authored
content.js 21.38 KiB
var audioEvents = [];
var midiEvents = [];
var audioContext = null;
var meter = null;
var analyser = null;
var rafID = null;
var buflen = 1024;
var buf = new Float32Array(buflen);
var audio = false;
var midi = false;
var audioSample = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
var record = false;
var records = []; // array of recorded values
var recording = {
time: new Date().getTime(),
events: [
[1.0, 'controlchange', '1', 0],
[2.0, 'controlchange', '1', 1],
]
}; // dict with current record
var time;
var playLoop = false;
var parameterSave = {};
function range(template, value) {
if (template.options.includes('i')) {
increment = parseInt(template.options.split(':')[1]);
if (template.name in parameterSave) {
parameterSave[template.name] = (parameterSave[template.name] + increment) > parseInt(template.max) ? parseInt(template.min) : (parameterSave[template.name] + increment);
return parameterSave[template.name];
}
else {
parameterSave[template.name] = parseInt(template.min);
return parseInt(template.min);
}
}
if (template.options == 'r') {
if (template.array != '') {
return template.array[Math.floor(Math.random() * template.array.length)];
} else {
if (template.min[0] == '#') {
template.min = parseInt(template.min.replace('#', ''), 16);
template.max = parseInt(template.max.replace('#', ''), 16);
return '#' + ((Math.random() * (template.max - template.min) + template.min).toString(16)).split('.')[0];
}
else {
return (Math.random() * (parseInt(template.max) - parseInt(template.min)) + parseInt(template.min));
}
}
}
if (template.options == 'd') {
if (template.min[0] == '#') {
template.min = parseInt(template.min.replace('#', ''), 16);
template.max = parseInt(template.max.replace('#', ''), 16);
return '#' + ((template.min + Math.round(template.max - template.min) * value).toString(16)).split('.')[0];
}
else {
return (parseInt(template.min) + Math.round(parseInt(template.max) - parseInt(template.min)) * value);
}
}
}
/**
* Initialize Midi
*/
function midiApi() {
if (midi) {
WebMidi
.enable()
.then(launchMidi)
.catch(err => alert(err));
}
else {
WebMidi.disable();
chrome.storage.sync.set({ midiI: false });
chrome.runtime.sendMessage({ type: 'updateUi', data: false });
}
}
/**
* Launch Midi after being started by midiAPi()
*/
function launchMidi() {
// Display available MIDI input devices
if (WebMidi.inputs.length < 1) {
chrome.storage.sync.set({ midiI: false });
chrome.runtime.sendMessage({ type: 'updateUi', data: false });
chrome.runtime.sendMessage({ type: 'midiDevices', data: 'Pas d\'appareil trouvé.' });
} else {
chrome.storage.sync.set({ midiI: true });
chrome.runtime.sendMessage({ type: 'updateUi', data: true });
devices = '';
WebMidi.inputs.forEach((device, index) => {
devices += `${index}: ${device.name} ${device.manufacturer}<br>`;
});
chrome.runtime.sendMessage({ type: 'midiDevices', data: devices });
const mySynth = WebMidi.inputs[0];
mySynth.addListener("noteon", e => {
midiEvent('noteon', e);
}, { channels: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] });
mySynth.addListener("controlchange", e => {
midiEvent('controlchange', e);
}, { channels: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] });
}
}
/**
* Trigger when midi is used. Change value between 0.0 and 1.0
* @param {str} type type of midi event
* @param {dict} data event object
*/
function midiEvent(type, data) {
chrome.storage.sync.get(['popup'], function (result) {
if (result.popup) {
switch (type) {
case 'noteon':
chrome.runtime.sendMessage({ type: 'midiEvent', data: data.note.number });
break;
case 'controlchange':
chrome.runtime.sendMessage({ type: 'midiEvent', data: data.controller.number });
break;
case 'pitchbend':
chrome.runtime.sendMessage({ type: 'midiEvent', data: data.type });
break;
default:
break;
}
}
});
chrome.storage.sync.get(['all'], function (result) {
result.all.forEach(element => {
value = element.value;
midiValue = data.value;
eventIn = false;
element.templates.forEach(template => {
if (template.type == 'midi') {
midiValue = range(template, midiValue);
if (type == 'noteon' && template.eventType == 'note' && template.eventName == data.note.number) {
value = element.value.replace('[' + template.templateFull + ']', midiValue);
eventIn = true;
}
if (type == 'controlchange' && template.eventType == 'cc' && template.eventName == data.controller.number) {
value = element.value.replace('[' + template.templateFull + ']', midiValue);
eventIn = true;
}
if (type == 'pitchbend' && template.eventType == 'pb') {
value = element.value.replace('[' + template.templateFull + ']', midiValue);
eventIn = true;
}
}
});
if (eventIn) {
let els = document.querySelectorAll(element.element);
els.forEach(e => {
e.style[element.property] = value;
});
}
});
});
var newTime = new Date().getTime();
bpm = ((1 / ((newTime - time) / 1000)) * 60);
time = newTime;
if (record && bpm == 120) {
switch (type) {
case 'controlchange':
recording['events'].push([(newTime - time) / 1000, type, data.controller.name, data.value]);
break;
case 'noteon':
recording['events'].push([(newTime - time) / 1000, type, data.note.numer, data.value]);
break;
case 'pitchbend':
recording['events'].push([(newTime - time) / 1000, type, type, data.value]);
break;
}
}
}
// Function that play recorded midi loop
function playMidiLoop(loopId) {
chrome.runtime.sendMessage({ data: 'loop' });
// if (records[loopId]) {
// playLoopInfo = true;
// while (playLoopInfo && playLoop && records[loopId]['events'].length > 0) {
// chrome.storage.sync.get(['loopPlay'], function (result) {
// playLoopInfo = result.loopPlay;
// });
// console.log(playLoopInfo);
// for (let index = 0; index < records[loopId]['events'].length; index++) {
// const event = records[loopId]['events'][index];
// const eventNext = records[loopId]['events'][(index + 1) % records[loopId]['events'].length];
// // !!! TODO: add a way to play midi events
// audioEvents.forEach(event => {
// midiValue = event[3];
// console.log(event[3]);
// templates = event['templates'];
// templates.forEach(template => {
// midiValue = (template['min'] + ((template['max'] - template['min']) * midiValue));
// if (event[1] == 'noteon' && template['eventType'] == 'note' && template['eventName'] == event[2]) {
// value = event['value'].replace('[' + template['templateFull'] + ']', midiValue);
// let els = document.querySelectorAll(event['selector']);
// els.forEach(e => {
// e.style[event['property']] = value; // Note velocity
// });
// }
// if (event[1] == 'controlchange' && template['eventType'] == 'cc' && template['eventName'] == event[2]) {
// value = event['value'].replace('[' + template['templateFull'] + ']', midiValue);
// let els = document.querySelectorAll(event['selector']);
// els.forEach(e => {
// e.style[event['property']] = value; // Control value
// });
// }
// if (event[1] == 'pitchbend' && template['eventType'] == 'pb') {
// value = event['value'].replace('[' + template['templateFull'] + ']', midiValue);
// let els = document.querySelectorAll(event['selector']);
// els.forEach(e => {
// e.style[event['property']] = value; // Pitchbend value
// });
// }
// if (event[1] == 'loud' && template['eventType'] == 'loud') {
// value = event['value'].replace('[' + template['templateFull'] + ']', midiValue);
// let els = document.querySelectorAll(event['selector']);
// els.forEach(e => {
// e.style[event['property']] = value; // Pitchbend value
// });
// }
// });
// });
// sleepFor((event[0] - eventNext[0]) * 1000);
// }
// }
// }
}
function createAudioMeter(audioContext, clipLevel, averaging, clipLag) {
var processor = audioContext.createScriptProcessor(512);
processor.onaudioprocess = volumeAudioProcess;
processor.clipping = false;
processor.lastClip = 0;
processor.volume = 0;
processor.clipLevel = clipLevel || 0.98;
processor.averaging = averaging || 0.95;
processor.clipLag = clipLag || 750;
processor.connect(audioContext.destination);
processor.checkClipping =
function () {
if (!this.clipping)
return false;
if ((this.lastClip + this.clipLag) < window.performance.now())
this.clipping = false;
return this.clipping;
};
processor.shutdown =
function () {
this.disconnect();
this.onaudioprocess = null;
};
return processor;
}
function volumeAudioProcess(event) {
var buf = event.inputBuffer.getChannelData(0);
var bufLength = buf.length;
var sum = 0;
var x;
for (var i = 0; i < bufLength; i++) {
x = buf[i];
if (Math.abs(x) >= this.clipLevel) {
this.clipping = true;
this.lastClip = window.performance.now();
}
sum += x * x;
}
var rms = Math.sqrt(sum / bufLength);
this.volume = Math.max(rms, this.volume * this.averaging);
}
/**
* Initialize audio, request micro to user
*/
function audioApi() {
chrome.storage.sync.get(['audioI'], function (result) {
audio = result.audioI;
if (audio) {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audioContext = new AudioContext();
try {
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
navigator.getUserMedia(
{
"audio": {
"mandatory": {
"googEchoCancellation": "false",
"googAutoGainControl": "false",
"googNoiseSuppression": "false",
"googHighpassFilter": "false"
},
"optional": []
},
}, gotStream, didntGetStream);
} catch (e) {
alert('getUserMedia threw exception :' + e);
}
}
else {
console.info('Audio suspend');
audioContext.suspend();
chrome.storage.sync.set({ audioI: false });
audio = false;
chrome.runtime.sendMessage({ type: 'updateUi', data: false });
}
});
};
function sleepFor(sleepDuration) {
var now = new Date().getTime();
while (new Date().getTime() < now + sleepDuration) { /* Do nothing */ }
}
function didntGetStream() {
chrome.storage.sync.set({ audioI: false });
audio = false;
chrome.runtime.sendMessage({ type: 'updateUi', data: false });
console.error('Stream generation failed.');
}
var mediaStreamSource = null;
function gotStream(stream) {
// Create an AudioNode from the stream.
mediaStreamSource = audioContext.createMediaStreamSource(stream);
analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
analyser.maxDecibels = -25;
analyser.minDecibels = -85;
mediaStreamSource.connect(analyser);
// Create a new volume meter and connect it.
meter = createAudioMeter(audioContext);
mediaStreamSource.connect(meter);
audioContext.resume();
console.info('Audio start');
chrome.storage.sync.set({ audioI: true });
chrome.runtime.sendMessage({ type: 'updateUi', data: true });
// kick off the visual updating
audioEvent();
}
var noteStrings = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"];
function noteFromPitch(frequency) {
var noteNum = 12 * (Math.log(frequency / 440) / Math.log(2));
return Math.round(noteNum) + 69;
}
function frequencyFromNoteNumber(note) {
return 440 * Math.pow(2, (note - 69) / 12);
}
function centsOffFromPitch(frequency, note) {
return Math.floor(1200 * Math.log(frequency / frequencyFromNoteNumber(note)) / Math.log(2));
}
var MIN_SAMPLES = 0; // will be initialized when AudioContext is created.
function autoCorrelate(buf, sampleRate) {
var SIZE = buf.length;
var MAX_SAMPLES = Math.floor(SIZE / 2);
var best_offset = -1;
var best_correlation = 0;
var rms = 0;
var foundGoodCorrelation = false;
var correlations = new Array(MAX_SAMPLES);
for (var i = 0; i < SIZE; i++) {
var val = buf[i];
rms += val * val;
}
rms = Math.sqrt(rms / SIZE);
if (rms < 0.01) // not enough signal
return -1;
var lastCorrelation = 1;
for (var offset = MIN_SAMPLES; offset < MAX_SAMPLES; offset++) {
var correlation = 0;
for (var i = 0; i < MAX_SAMPLES; i++) {
correlation += Math.abs((buf[i]) - (buf[i + offset]));
}
correlation = 1 - (correlation / MAX_SAMPLES);
correlations[offset] = correlation; // store it, for the tweaking we need to do below.
if ((correlation > 0.9) && (correlation > lastCorrelation)) {
foundGoodCorrelation = true;
if (correlation > best_correlation) {
best_correlation = correlation;
best_offset = offset;
}
} else if (foundGoodCorrelation) {
var shift = (correlations[best_offset + 1] - correlations[best_offset - 1]) / correlations[best_offset];
return sampleRate / (best_offset + (8 * shift));
}
lastCorrelation = correlation;
}
if (best_correlation > 0.01) {
return sampleRate / best_offset;
}
return -1;
}
function freqToBin(freq, rounding = 'round') {
const max = analyser.frequencyBinCount - 1,
bin = Math[rounding](freq * 256 / audioContext.sampleRate);
return bin < max ? bin : max;
}
function audioEvent() {
analyser.getFloatTimeDomainData(buf);
var ac = autoCorrelate(buf, audioContext.sampleRate);
if (ac == -1) {
a = 5;
} else {
// $('#analyser1').text(Math.round(ac));
var note = noteFromPitch(ac);
// $('#analyser2').text(noteStrings[note % 12]);
var detune = centsOffFromPitch(ac, note);
// if (detune == 0) {
// $('#analyser3').text("--");
// } else {
// $('#analyser3').text(Math.abs(detune));
// }
}
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
analyser.getByteFrequencyData(dataArray);
const presets = {
low: [20, 250],
lomi: [250, 500],
mid: [500, 2e3],
mihi: [2e3, 4e3],
hi: [4e3, 16e3]
}
var startFreq, endFreq, startBin, endBin, energy;
energies = [];
Object.keys(presets).forEach(key => {
[startFreq, endFreq] = presets[key];
startBin = freqToBin(startFreq);
endBin = endFreq ? freqToBin(endFreq) : startBin;
energy = 0;
for (let i = startBin; i <= endBin; i++)
energy += dataArray[i];
energies[key] = energy / (endBin - startBin + 1) / 255;
});
const sum = audioSample.reduce((a, b) => a + b, 0);
const avg = (sum / audioSample.length) || 0;
var attack = false;
if (avg + 0.05 < meter.volume) {
attack = true;
}
audioSample.shift();
audioSample.push(meter.volume);
chrome.storage.sync.get(['all'], function (result) {
result.all.forEach(element => {
value = element.value;
eventIn = false;
element.templates.forEach(template => {
if (template.type == 'audio') {
Object.keys(presets).forEach(preset => {
if (template.eventType == preset) {
eventIn = true;
value = value.replace('[' + template.templateFull + ']', range(template, energies[preset]));
}
});
if (template.eventType == 'loud') {
eventIn = true;
volume = meter.volume * 1.4;
value = value.replace('[' + template.templateFull + ']', (meter.volume * 1.4 < 1 ? range(template, volume) : 0));
}
if (template.eventType == 'onset' && template.eventName == noteStrings[note % 12]) {
eventIn = true;
value = value.replace('[' + template.templateFull + ']', range(template, 1));
}
if (attack && template.eventType == 'attack') {
console.log('Attack');
eventIn = true;
value = value.replace('[' + template.templateFull + ']', '');
}
}
});
if (eventIn) {
let els = document.querySelectorAll(element.element);
els.forEach(e => {
e.style[element.property] = value;
});
}
});
});
var newTime = new Date().getTime();
bpm = ((1 / ((newTime - time) / 1000)) * 60);
time = newTime;
if (record && bpm == 120) {
recording['events'].push([(newTime - time) / 1000, 'loud', 'data.controller.name', meter.volume * 1.4]);
}
// sleepFor(20);
if (audio) {
rafID = window.requestAnimationFrame(audioEvent);
}
}
/**
* Function trigger when new message received.
* @param {type, data} message parameters
*/
function onMessage({ type, data }) {
console.info('Message received from CSSLSD back :', type);
switch (type) {
case 'update': {
const css = data.cssStr;
audioEvents = [];
midiEvents = [];
// parseCSS(css)
break;
}
case 'start': {
chrome.storage.sync.get(['activate'], function (result) {
if (result.activate) {
// zzz
}
else {
audioApi();
midiApi();
}
});
break;
}
case 'audio': {
audioApi();
break;
}
case 'midi': {
midi = data;
midiApi();
break;
}
case 'playLoop': {
playLoop = !playLoop;
playMidiLoop(data);
break;
}
case 'getLoop': {
chrome.runtime.sendMessage({ type: 'midiRecords', data: records });
break;
}
case 'record': {
record = data;
// If record in stopped add it to the list of records.
if (!record) {
recording['time'] = (new Date().getTime() - recording['time']) / 1000;
records.push(recording);
recording = {
time: new Date().getTime(),
events: []
};
chrome.runtime.sendMessage({ type: 'midiRecords', data: records });
}
else {
time = new Date().getTime();
recording['time'] = new Date().getTime();
}
break;
}
}
}
chrome.storage.sync.set({ popup: true });
chrome.runtime.onMessage.addListener(onMessage);
chrome.storage.sync.get(['activate'], function (result) {
if (result.activate) {
audioApi();
}
});