From 79664f56a63122da87ad91eaebcdf85083b7f0a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Hru=C5=A1ka?= Date: Sun, 10 Sep 2017 13:47:29 +0200 Subject: [PATCH] eslint all the things --- .eslintrc | 10 +- build.sh | 36 +- build_html.php => compile_html.php | 0 dump_js_lang.php | 5 +- jssrc/appcommon.js | 195 ++--- jssrc/lang.js | 10 +- jssrc/{ => lib}/chibi.js | 0 jssrc/{ => lib}/keymaster.js | 0 jssrc/lib/polyfills.js | 63 ++ jssrc/modal.js | 48 +- jssrc/notif.js | 43 +- jssrc/soft_keyboard.js | 82 +- jssrc/td/WebAudio.d.ts | 1144 ++++++++++++++++++++++++++++ jssrc/term.js | 8 +- jssrc/term_conn.js | 152 ++-- jssrc/term_input.js | 449 ++++++----- jssrc/term_screen.js | 893 +++++++++++----------- jssrc/term_upload.js | 138 ++-- jssrc/utils.js | 134 ++-- jssrc/wifi.js | 181 +++-- pages/term.php | 4 + 21 files changed, 2371 insertions(+), 1224 deletions(-) rename build_html.php => compile_html.php (100%) rename jssrc/{ => lib}/chibi.js (100%) rename jssrc/{ => lib}/keymaster.js (100%) create mode 100644 jssrc/lib/polyfills.js create mode 100644 jssrc/td/WebAudio.d.ts diff --git a/.eslintrc b/.eslintrc index 14983b6..eecb50b 100644 --- a/.eslintrc +++ b/.eslintrc @@ -45,7 +45,7 @@ "curly": ["error", "multi-line"], "dot-location": ["error", "property"], "eol-last": "error", - "eqeqeq": ["error", "always", { "null": "ignore" }], + "eqeqeq": ["off", "always", { "null": "ignore" }], "func-call-spacing": ["error", "never"], "generator-star-spacing": ["error", { "before": true, "after": true }], "handle-callback-err": ["error", "^(err|error)$" ], @@ -72,7 +72,7 @@ "no-empty-pattern": "error", "no-eval": "error", "no-ex-assign": "error", - "no-extend-native": "error", + "no-extend-native": "warn", "no-extra-bind": "error", "no-extra-boolean-cast": "error", "no-extra-parens": ["error", "functions"], @@ -87,7 +87,7 @@ "no-iterator": "error", "no-label-var": "error", "no-labels": ["error", { "allowLoop": false, "allowSwitch": false }], - "no-lone-blocks": "error", + "no-lone-blocks": "warn", "no-mixed-operators": ["error", { "groups": [ ["==", "!=", "===", "!==", ">", ">=", "<", "<="], @@ -134,8 +134,8 @@ "no-unreachable": "error", "no-unsafe-finally": "error", "no-unsafe-negation": "error", - "no-unused-expressions": ["error", { "allowShortCircuit": true, "allowTernary": true, "allowTaggedTemplates": true }], - "no-unused-vars": ["error", { "vars": "all", "args": "none", "ignoreRestSiblings": true }], + "no-unused-expressions": ["warn", { "allowShortCircuit": true, "allowTernary": true, "allowTaggedTemplates": true }], + "no-unused-vars": ["off", { "vars": "local", "args": "none", "ignoreRestSiblings": true }], "no-use-before-define": ["error", { "functions": false, "classes": false, "variables": false }], "no-useless-call": "error", "no-useless-computed-key": "error", diff --git a/build.sh b/build.sh index 6e5bbaa..1471371 100755 --- a/build.sh +++ b/build.sh @@ -1,25 +1,29 @@ #!/bin/bash -echo "Packing JS..." +echo 'Packing JS...' -cat jssrc/chibi.js \ - jssrc/keymaster.js \ - jssrc/utils.js \ - jssrc/modal.js \ - jssrc/notif.js \ - jssrc/appcommon.js \ - jssrc/lang.js \ - jssrc/wifi.js \ - jssrc/term_* \ - jssrc/term.js \ - jssrc/soft_keyboard.js | npm run --silent minify > js/app.js +echo ';' > ';' +cat jssrc/lib/chibi.js ';' \ + jssrc/lib/keymaster.js ';' \ + jssrc/lib/polyfills.js ';' \ + jssrc/utils.js ';' \ + jssrc/modal.js ';' \ + jssrc/notif.js ';' \ + jssrc/appcommon.js ';' \ + jssrc/lang.js ';' \ + jssrc/wifi.js ';' \ + jssrc/term_* ';' \ + jssrc/term.js ';' \ + jssrc/soft_keyboard.js | npm run --silent minify > js/app.js +rm ';' -echo "Building CSS..." +echo 'Building CSS...' npm run sass -- --output-style compressed sass/app.scss css/app.css -echo "Building HTML..." +echo 'Building HTML...' -php ./build_html.php +php ./dump_js_lang.php +php ./compile_html.php -echo "ESPTerm front-end ready" +echo 'ESPTerm front-end ready' diff --git a/build_html.php b/compile_html.php similarity index 100% rename from build_html.php rename to compile_html.php diff --git a/dump_js_lang.php b/dump_js_lang.php index ab2f976..47eee0a 100755 --- a/dump_js_lang.php +++ b/dump_js_lang.php @@ -8,7 +8,6 @@ $selected = [ 'wifi.connected_ip_is', 'wifi.not_conn', 'wifi.enter_passwd', - 'wifi.passwd_saved', ]; $out = []; @@ -18,6 +17,6 @@ foreach ($selected as $key) { file_put_contents(__DIR__. '/jssrc/lang.js', "// Generated from PHP locale file\n" . - 'var _tr = ' . json_encode($out, JSON_PRETTY_PRINT|JSON_UNESCAPED_UNICODE) . ";\n\n" . - "function tr(key) { return _tr[key] || '?'+key+'?'; }\n" + 'let _tr = ' . json_encode($out, JSON_PRETTY_PRINT|JSON_UNESCAPED_UNICODE) . ";\n\n" . + "function tr (key) { return _tr[key] || '?' + key + '?' }\n" ); diff --git a/jssrc/appcommon.js b/jssrc/appcommon.js index b5d09d5..2927c89 100644 --- a/jssrc/appcommon.js +++ b/jssrc/appcommon.js @@ -2,189 +2,124 @@ $.ready(function () { // Checkbox UI (checkbox CSS and hidden input with int value) $('.Row.checkbox').forEach(function (x) { - var inp = x.querySelector('input'); - var box = x.querySelector('.box'); + let inp = x.querySelector('input') + let box = x.querySelector('.box') - $(box).toggleClass('checked', inp.value); + $(box).toggleClass('checked', inp.value) - var hdl = function () { - inp.value = 1 - inp.value; + let hdl = function () { + inp.value = 1 - inp.value $(box).toggleClass('checked', inp.value) - }; + } - $(x).on('click', hdl).on('keypress', cr(hdl)); - }); + $(x).on('click', hdl).on('keypress', cr(hdl)) + }) // Expanding boxes on mobile $('.Box.mobcol,.Box.fold').forEach(function (x) { - var h = x.querySelector('h2'); + let h = x.querySelector('h2') - var hdl = function () { - $(x).toggleClass('expanded'); - }; - $(h).on('click', hdl).on('keypress', cr(hdl)); - }); + let hdl = function () { + $(x).toggleClass('expanded') + } + $(h).on('click', hdl).on('keypress', cr(hdl)) + }) $('form').forEach(function (x) { $(x).on('keypress', function (e) { if ((e.keyCode == 10 || e.keyCode == 13) && e.ctrlKey) { - x.submit(); + x.submit() } }) - }); + }) // loader dots... setInterval(function () { $('.anim-dots').each(function (x) { - var $x = $(x); - var dots = $x.html() + '.'; - if (dots.length == 5) dots = '.'; - $x.html(dots); - }); - }, 1000); + let $x = $(x) + let dots = $x.html() + '.' + if (dots.length == 5) dots = '.' + $x.html(dots) + }) + }, 1000) // flipping number boxes with the mouse wheel $('input[type=number]').on('mousewheel', function (e) { - var $this = $(this); - var val = +$this.val(); - if (isNaN(val)) val = 1; + let $this = $(this) + let val = +$this.val() + if (isNaN(val)) val = 1 - var step = +($this.attr('step') || 1); - var min = +$this.attr('min'); - var max = +$this.attr('max'); + const step = +($this.attr('step') || 1) + const min = +$this.attr('min') + const max = +$this.attr('max') if (e.wheelDelta > 0) { - val += step; + val += step } else { - val -= step; + val -= step } - if (typeof min != 'undefined') val = Math.max(val, +min); - if (typeof max != 'undefined') val = Math.min(val, +max); - $this.val(val); + if (typeof min != 'undefined') val = Math.max(val, +min) + if (typeof max != 'undefined') val = Math.min(val, +max) + $this.val(val) - if ("createEvent" in document) { - var evt = document.createEvent("HTMLEvents"); - evt.initEvent("change", false, true); - $this[0].dispatchEvent(evt); + if ('createEvent' in document) { + let evt = document.createEvent('HTMLEvents') + evt.initEvent('change', false, true) + $this[0].dispatchEvent(evt) } else { - $this[0].fireEvent("onchange"); + $this[0].fireEvent('onchange') } - e.preventDefault(); - }); + e.preventDefault() + }) - var errAt = location.search.indexOf('err='); + let errAt = location.search.indexOf('err=') if (errAt !== -1 && qs('.Box.errors')) { - var errs = location.search.substr(errAt + 4).split(','); - var hres = []; + let errs = location.search.substr(errAt + 4).split(',') + let hres = [] errs.forEach(function (er) { - var lbl = qs('label[for="' + er + '"]'); + let lbl = qs('label[for="' + er + '"]') if (lbl) { - lbl.classList.add('error'); - hres.push(lbl.childNodes[0].textContent.trim().replace(/: ?$/, '')); + lbl.classList.add('error') + hres.push(lbl.childNodes[0].textContent.trim().replace(/: ?$/, '')) } else { - hres.push(er); + hres.push(er) } - }); + }) - qs('.Box.errors .list').innerHTML = hres.join(', '); - qs('.Box.errors').classList.remove('hidden'); + qs('.Box.errors .list').innerHTML = hres.join(', ') + qs('.Box.errors').classList.remove('hidden') } - Modal.init(); - Notify.init(); + Modal.init() + Notify.init() // remove tabindixes from h2 if wide if (window.innerWidth > 550) { $('.Box h2').forEach(function (x) { - x.removeAttribute('tabindex'); - }); + x.removeAttribute('tabindex') + }) // brand works as a link back to term in widescreen mode - var br = qs('#brand'); + let br = qs('#brand') br && br.addEventListener('click', function () { - location.href = '/'; // go to terminal - }); + location.href = '/' // go to terminal + }) } -}); +}) $._loader = function (vis) { - $('#loader').toggleClass('show', vis); -}; + $('#loader').toggleClass('show', vis) +} -function showPage() { - $('#content').addClass('load'); +function showPage () { + $('#content').addClass('load') } $.ready(function () { if (window.noAutoShow !== true) { setTimeout(function () { - showPage(); - }, 1); + showPage() + }, 1) } -}); - - -/*! http://mths.be/fromcodepoint v0.1.0 by @mathias */ -if (!String.fromCodePoint) { - (function () { - var defineProperty = (function () { - // IE 8 only supports `Object.defineProperty` on DOM elements - try { - var object = {}; - var $defineProperty = Object.defineProperty; - var result = $defineProperty(object, object, object) && $defineProperty; - } catch (error) { - } - return result; - }()); - var stringFromCharCode = String.fromCharCode; - var floor = Math.floor; - var fromCodePoint = function () { - var MAX_SIZE = 0x4000; - var codeUnits = []; - var highSurrogate; - var lowSurrogate; - var index = -1; - var length = arguments.length; - if (!length) { - return ''; - } - var result = ''; - while (++index < length) { - var codePoint = Number(arguments[index]); - if ( - !isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity` - codePoint < 0 || // not a valid Unicode code point - codePoint > 0x10FFFF || // not a valid Unicode code point - floor(codePoint) != codePoint // not an integer - ) { - throw RangeError('Invalid code point: ' + codePoint); - } - if (codePoint <= 0xFFFF) { // BMP code point - codeUnits.push(codePoint); - } else { // Astral code point; split in surrogate halves - // http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae - codePoint -= 0x10000; - highSurrogate = (codePoint >> 10) + 0xD800; - lowSurrogate = (codePoint % 0x400) + 0xDC00; - codeUnits.push(highSurrogate, lowSurrogate); - } - if (index + 1 == length || codeUnits.length > MAX_SIZE) { - result += stringFromCharCode.apply(null, codeUnits); - codeUnits.length = 0; - } - } - return result; - }; - if (defineProperty) { - defineProperty(String, 'fromCodePoint', { - 'value': fromCodePoint, - 'configurable': true, - 'writable': true - }); - } else { - String.fromCodePoint = fromCodePoint; - } - }()); -} +}) diff --git a/jssrc/lang.js b/jssrc/lang.js index 866025c..bce4adb 100644 --- a/jssrc/lang.js +++ b/jssrc/lang.js @@ -1,8 +1,8 @@ // Generated from PHP locale file -var _tr = { - "wifi.connected_ip_is": "Connected, IP is ", - "wifi.not_conn": "Not connected.", - "wifi.enter_passwd": "Enter password for \":ssid:\"" +let _tr = { + "wifi.connected_ip_is": "Connected, IP is ", + "wifi.not_conn": "Not connected.", + "wifi.enter_passwd": "Enter password for \":ssid:\"" }; -function tr(key) { return _tr[key] || '?'+key+'?'; } +function tr (key) { return _tr[key] || '?' + key + '?' } diff --git a/jssrc/chibi.js b/jssrc/lib/chibi.js similarity index 100% rename from jssrc/chibi.js rename to jssrc/lib/chibi.js diff --git a/jssrc/keymaster.js b/jssrc/lib/keymaster.js similarity index 100% rename from jssrc/keymaster.js rename to jssrc/lib/keymaster.js diff --git a/jssrc/lib/polyfills.js b/jssrc/lib/polyfills.js new file mode 100644 index 0000000..bfd2b31 --- /dev/null +++ b/jssrc/lib/polyfills.js @@ -0,0 +1,63 @@ +/*! http://mths.be/fromcodepoint v0.1.0 by @mathias */ +if (!String.fromCodePoint) { + (function () { + var defineProperty = (function () { + // IE 8 only supports `Object.defineProperty` on DOM elements + try { + var object = {}; + var $defineProperty = Object.defineProperty; + var result = $defineProperty(object, object, object) && $defineProperty; + } catch (error) { + } + return result; + }()); + var stringFromCharCode = String.fromCharCode; + var floor = Math.floor; + var fromCodePoint = function () { + var MAX_SIZE = 0x4000; + var codeUnits = []; + var highSurrogate; + var lowSurrogate; + var index = -1; + var length = arguments.length; + if (!length) { + return ''; + } + var result = ''; + while (++index < length) { + var codePoint = Number(arguments[index]); + if ( + !isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity` + codePoint < 0 || // not a valid Unicode code point + codePoint > 0x10FFFF || // not a valid Unicode code point + floor(codePoint) != codePoint // not an integer + ) { + throw RangeError('Invalid code point: ' + codePoint); + } + if (codePoint <= 0xFFFF) { // BMP code point + codeUnits.push(codePoint); + } else { // Astral code point; split in surrogate halves + // http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + codePoint -= 0x10000; + highSurrogate = (codePoint >> 10) + 0xD800; + lowSurrogate = (codePoint % 0x400) + 0xDC00; + codeUnits.push(highSurrogate, lowSurrogate); + } + if (index + 1 == length || codeUnits.length > MAX_SIZE) { + result += stringFromCharCode.apply(null, codeUnits); + codeUnits.length = 0; + } + } + return result; + }; + if (defineProperty) { + defineProperty(String, 'fromCodePoint', { + 'value': fromCodePoint, + 'configurable': true, + 'writable': true + }); + } else { + String.fromCodePoint = fromCodePoint; + } + }()); +} diff --git a/jssrc/modal.js b/jssrc/modal.js index d012ef1..9965469 100644 --- a/jssrc/modal.js +++ b/jssrc/modal.js @@ -1,44 +1,44 @@ /** Module for toggling a modal overlay */ (function () { - var modal = {}; - var curCloseCb = null; + let modal = {} + let curCloseCb = null modal.show = function (sel, closeCb) { - var $m = $(sel); - $m.removeClass('hidden visible'); + let $m = $(sel) + $m.removeClass('hidden visible') setTimeout(function () { - $m.addClass('visible'); - }, 1); - curCloseCb = closeCb; - }; + $m.addClass('visible') + }, 1) + curCloseCb = closeCb + } modal.hide = function (sel) { - var $m = $(sel); - $m.removeClass('visible'); + let $m = $(sel) + $m.removeClass('visible') setTimeout(function () { - $m.addClass('hidden'); - if (curCloseCb) curCloseCb(); - }, 500); // transition time - }; + $m.addClass('hidden') + if (curCloseCb) curCloseCb() + }, 500) // transition time + } modal.init = function () { // close modal by click outside the dialog $('.Modal').on('click', function () { - if ($(this).hasClass('no-close')) return; // this is a no-close modal - modal.hide(this); - }); + if ($(this).hasClass('no-close')) return // this is a no-close modal + modal.hide(this) + }) $('.Dialog').on('click', function (e) { - e.stopImmediatePropagation(); - }); + e.stopImmediatePropagation() + }) // Hide all modals on esc $(window).on('keydown', function (e) { if (e.which == 27) { - modal.hide('.Modal'); + modal.hide('.Modal') } - }); - }; + }) + } - window.Modal = modal; -})(); + window.Modal = modal +})() diff --git a/jssrc/notif.js b/jssrc/notif.js index ca19224..a3ea609 100644 --- a/jssrc/notif.js +++ b/jssrc/notif.js @@ -1,32 +1,35 @@ -(function (nt) { - var sel = '#notif'; +window.Notify = (function () { + let nt = {} + const sel = '#notif' - var hideTmeo1; // timeout to start hiding (transition) - var hideTmeo2; // timeout to add the hidden class + let hideTmeo1 // timeout to start hiding (transition) + let hideTmeo2 // timeout to add the hidden class nt.show = function (message, timeout) { - $(sel).html(message); - Modal.show(sel); + $(sel).html(message) + Modal.show(sel) - clearTimeout(hideTmeo1); - clearTimeout(hideTmeo2); + clearTimeout(hideTmeo1) + clearTimeout(hideTmeo2) - if (undef(timeout)) timeout = 2500; + if (undef(timeout)) timeout = 2500 - hideTmeo1 = setTimeout(nt.hide, timeout); - }; + hideTmeo1 = setTimeout(nt.hide, timeout) + } nt.hide = function () { - var $m = $(sel); - $m.removeClass('visible'); + let $m = $(sel) + $m.removeClass('visible') hideTmeo2 = setTimeout(function () { - $m.addClass('hidden'); - }, 250); // transition time - }; + $m.addClass('hidden') + }, 250) // transition time + } nt.init = function () { $(sel).on('click', function () { - nt.hide(this); - }); - }; -})(window.Notify = {}); + nt.hide(this) + }) + } + + return nt +})() diff --git a/jssrc/soft_keyboard.js b/jssrc/soft_keyboard.js index b21b9e4..006590b 100644 --- a/jssrc/soft_keyboard.js +++ b/jssrc/soft_keyboard.js @@ -1,87 +1,87 @@ $.ready(() => { - const input = qs('#softkb-input'); - if (!input) return; // abort, we're not on the terminal page + const input = qs('#softkb-input') + if (!input) return // abort, we're not on the terminal page - let keyboardOpen = false; + let keyboardOpen = false let updateInputPosition = function () { - if (!keyboardOpen) return; + if (!keyboardOpen) return - let [x, y] = Screen.gridToScreen(Screen.cursor.x, Screen.cursor.y); + let [x, y] = Screen.gridToScreen(Screen.cursor.x, Screen.cursor.y) input.style.transform = `translate(${x}px, ${y}px)` - }; + } input.addEventListener('focus', () => { - keyboardOpen = true; + keyboardOpen = true updateInputPosition() - }); - input.addEventListener('blur', () => (keyboardOpen = false)); - Screen.on('cursor-moved', updateInputPosition); + }) + input.addEventListener('blur', () => (keyboardOpen = false)) + Screen.on('cursor-moved', updateInputPosition) window.kbOpen = function openSoftKeyboard (open) { - keyboardOpen = open; - updateInputPosition(); - if (open) input.focus(); + keyboardOpen = open + updateInputPosition() + if (open) input.focus() else input.blur() - }; + } - let lastCompositionString = ''; - let compositing = false; + let lastCompositionString = '' + let compositing = false let sendInputDelta = function (newValue) { - let resend = false; + let resend = false if (newValue.length > lastCompositionString.length) { if (newValue.startsWith(lastCompositionString)) { // characters have been added at the end Input.sendString(newValue.substr(lastCompositionString.length)) - } else resend = true; + } else resend = true } else if (newValue.length < lastCompositionString.length) { if (lastCompositionString.startsWith(newValue)) { // characters have been removed at the end Input.sendString('\b'.repeat(lastCompositionString.length - newValue.length)) - } else resend = true; - } else if (newValue !== lastCompositionString) resend = true; + } else resend = true + } else if (newValue !== lastCompositionString) resend = true if (resend) { // the entire string changed; resend everything Input.sendString('\b'.repeat(lastCompositionString.length) + newValue) } - lastCompositionString = newValue; - }; + lastCompositionString = newValue + } input.addEventListener('keydown', e => { - if (e.key === 'Unidentified') return; + if (e.key === 'Unidentified') return - e.preventDefault(); - input.value = ''; + e.preventDefault() + input.value = '' - if (e.key === 'Backspace') Input.sendString('\b'); + if (e.key === 'Backspace') Input.sendString('\b') else if (e.key === 'Enter') Input.sendString('\x0d') - }); + }) input.addEventListener('input', e => { - e.stopPropagation(); + e.stopPropagation() if (e.isComposing) { - sendInputDelta(e.data); + sendInputDelta(e.data) } else { - if (e.data) Input.sendString(e.data); + if (e.data) Input.sendString(e.data) else if (e.inputType === 'deleteContentBackward') { - lastCompositionString = ''; - sendInputDelta(''); + lastCompositionString = '' + sendInputDelta('') } } - }); + }) input.addEventListener('compositionstart', e => { - lastCompositionString = ''; - compositing = true; - }); + lastCompositionString = '' + compositing = true + }) input.addEventListener('compositionend', e => { - lastCompositionString = ''; - compositing = false; - input.value = ''; - }); + lastCompositionString = '' + compositing = false + input.value = '' + }) Screen.on('open-soft-keyboard', () => input.focus()) -}); +}) diff --git a/jssrc/td/WebAudio.d.ts b/jssrc/td/WebAudio.d.ts new file mode 100644 index 0000000..b923676 --- /dev/null +++ b/jssrc/td/WebAudio.d.ts @@ -0,0 +1,1144 @@ +// Type definitions for Web Audio API +// Project: http://www.w3.org/TR/webaudio/ +// Definitions by: Baruch Berger , Kon +// Definitions: https://github.com/borisyankov/DefinitelyTyped + +/** + * This interface represents a set of AudioNode objects and their connections. It allows for arbitrary routing of signals to the AudioDestinationNode (what the user ultimately hears). Nodes are created from the context and are then connected together. In most use cases, only a single AudioContext is used per document. An AudioContext is constructed as follows: + * + * var context = new AudioContext(); + */ +interface AudioContext { + /** + * An AudioDestinationNode with a single input representing the final destination for all audio (to be rendered to the audio hardware). All AudioNodes actively rendering audio will directly or indirectly connect to destination. + */ + destination: AudioDestinationNode; + + /** + * The sample rate (in sample-frames per second) at which the AudioContext handles audio. It is assumed that all AudioNodes in the context run at this rate. In making this assumption, sample-rate converters or "varispeed" processors are not supported in real-time processing. + */ + sampleRate: number; + + /** + * This is a time in seconds which starts at zero when the context is created and increases in real-time. All scheduled times are relative to it. This is not a "transport" time which can be started, paused, and re-positioned. It is always moving forward. A GarageBand-like timeline transport system can be very easily built on top of this (in JavaScript). This time corresponds to an ever-increasing hardware timestamp. + */ + currentTime: number; + + /** + * An AudioListener which is used for 3D spatialization. + */ + listener: AudioListener; + + /** + * The number of AudioBufferSourceNodes that are currently playing. + */ + activeSourceCount: number; + + /** + * Creates an AudioBuffer of the given size. The audio data in the buffer will be zero-initialized (silent). An exception will be thrown if the numberOfChannels or sampleRate are out-of-bounds. + * @param numberOfChannels how many channels the buffer will have. An implementation must support at least 32 channels. + * @param length the size of the buffer in sample-frames. + * @param sampleRate the sample-rate of the linear PCM audio data in the buffer in sample-frames per second. An implementation must support sample-rates in at least the range 22050 to 96000. + */ + createBuffer(numberOfChannels: number, length: number, sampleRate: number): AudioBuffer; + + /** + * Creates an AudioBuffer given the audio file data contained in the ArrayBuffer. The ArrayBuffer can, for example, be loaded from an XMLHttpRequest's response attribute after setting the responseType to "arraybuffer". Audio file data can be in any of the formats supported by the audio element. + * The following steps must be performed: + * 1. Decode the encoded buffer from the AudioBuffer into linear PCM. If a decoding error is encountered due to the audio format not being recognized or supported, or because of corrupted/unexpected/inconsistent data then return NULL (and these steps will be terminated). + * 2. If mixToMono is true, then mixdown the decoded linear PCM data to mono. + * 3. Take the decoded (possibly mixed-down) linear PCM audio data, and resample it to the sample-rate of the AudioContext if it is different from the sample-rate of buffer. The final result will be stored in an AudioBuffer and returned as the result of this method. + * @param buffer the audio file data (for example from a .wav file). + * @param mixToMono if a mixdown to mono will be performed. Normally, this would not be set. + */ + createBuffer(buffer: ArrayBuffer, mixToMono: boolean): AudioBuffer; + + /** + * Asynchronously decodes the audio file data contained in the ArrayBuffer. The ArrayBuffer can, for example, be loaded from an XMLHttpRequest's response attribute after setting the responseType to "arraybuffer". Audio file data can be in any of the formats supported by the audio element. + * The decodeAudioData() method is preferred over the createBuffer() from ArrayBuffer method because it is asynchronous and does not block the main JavaScript thread. + * + * The following steps must be performed: + * 1. Temporarily neuter the audioData ArrayBuffer in such a way that JavaScript code may not access or modify the data. + * 2. Queue a decoding operation to be performed on another thread. + * 3. The decoding thread will attempt to decode the encoded audioData into linear PCM. If a decoding error is encountered due to the audio format not being recognized or supported, or because of corrupted/unexpected/inconsistent data then the audioData neutered state will be restored to normal and the errorCallback will be scheduled to run on the main thread's event loop and these steps will be terminated. + * 4. The decoding thread will take the result, representing the decoded linear PCM audio data, and resample it to the sample-rate of the AudioContext if it is different from the sample-rate of audioData. The final result (after possibly sample-rate converting) will be stored in an AudioBuffer. + * 5. The audioData neutered state will be restored to normal + * 6. The successCallback function will be scheduled to run on the main thread's event loop given the AudioBuffer from step (4) as an argument. + * + * @param ArrayBuffer containing audio file data. + * @param callback function which will be invoked when the decoding is finished. The single argument to this callback is an AudioBuffer representing the decoded PCM audio data. + * @param callback function which will be invoked if there is an error decoding the audio file data. + */ + decodeAudioData(audioData: ArrayBuffer, successCallback: any, errorCallback?: any): void; + + /** + * Creates an AudioBufferSourceNode. + */ + createBufferSource(): AudioBufferSourceNode; + + /** + * Creates a MediaElementAudioSourceNode given an HTMLMediaElement. As a consequence of calling this method, audio playback from the HTMLMediaElement will be re-routed into the processing graph of the AudioContext. + */ + createMediaElementSource(mediaElement: HTMLMediaElement): MediaElementAudioSourceNode; + + /** + * Creates a MediaStreamAudioSourceNode given a MediaStream. As a consequence of calling this method, audio playback from the MediaStream will be re-routed into the processing graph of the AudioContext. + */ + createMediaStreamSource(mediaStream: any): MediaStreamAudioSourceNode; + + /** + * Creates a ScriptProcessorNode for direct audio processing using JavaScript. An exception will be thrown if bufferSize or numberOfInputChannels or numberOfOutputChannels are outside the valid range. + * It is invalid for both numberOfInputChannels and numberOfOutputChannels to be zero. + * @param bufferSize the buffer size in units of sample-frames. It must be one of the following values: 256, 512, 1024, 2048, 4096, 8192, 16384. This value controls how frequently the onaudioprocess event handler is called and how many sample-frames need to be processed each call. Lower values for bufferSize will result in a lower (better) latency. Higher values will be necessary to avoid audio breakup and glitches. The value chosen must carefully balance between latency and audio quality. + * @param numberOfInputChannels (defaults to 2) the number of channels for this node's input. Values of up to 32 must be supported. + * @param numberOfOutputChannels (defaults to 2) the number of channels for this node's output. Values of up to 32 must be supported. + */ + createScriptProcessor(bufferSize: number, numberOfInputChannels?: number, numberOfOutputChannels?: number): ScriptProcessorNode; + + /** + * Creates a AnalyserNode. + */ + createAnalyser(): AnalyserNode; + + /** + * Creates a GainNode. + */ + createGain(): GainNode; + + /** + * Creates a DelayNode representing a variable delay line. The initial default delay time will be 0 seconds. + * @param maxDelayTime the maximum delay time in seconds allowed for the delay line. If specified, this value must be greater than zero and less than three minutes or a NOT_SUPPORTED_ERR exception will be thrown. + */ + createDelay(maxDelayTime?: number): DelayNode; + //createDelayNode(maxDelayTime?: number): DelayNode; + + /** + * Creates a BiquadFilterNode representing a second order filter which can be configured as one of several common filter types. + */ + createBiquadFilter(): BiquadFilterNode; + + /** + * Creates a WaveShaperNode representing a non-linear distortion. + */ + createWaveShaper(): WaveShaperNode; + + /** + * Creates an PannerNode. + */ + createPanner(): PannerNode; + + /** + * Creates a ConvolverNode. + */ + createConvolver(): ConvolverNode; + + /** + * Creates an ChannelSplitterNode representing a channel splitter. An exception will be thrown for invalid parameter values. + * @param numberOfOutputs the number of outputs. Values of up to 32 must be supported. If not specified, then 6 will be used. + */ + createChannelSplitter(numberOfOutputs?: number): ChannelSplitterNode; + + /** + * Creates an ChannelMergerNode representing a channel merger. An exception will be thrown for invalid parameter values. + * @param numberOfInputs the number of inputs. Values of up to 32 must be supported. If not specified, then 6 will be used. + */ + createChannelMerger(numberOfInputs?: number): ChannelMergerNode; + + /** + * Creates a DynamicsCompressorNode. + */ + createDynamicsCompressor(): DynamicsCompressorNode; + + /** + * Creates an OscillatorNode. + */ + createOscillator(): OscillatorNode; + + /** + * Creates a WaveTable representing a waveform containing arbitrary harmonic content. The real and imag parameters must be of type Float32Array of equal lengths greater than zero and less than or equal to 4096 or an exception will be thrown. These parameters specify the Fourier coefficients of a Fourier series representing the partials of a periodic waveform. The created WaveTable will be used with an OscillatorNode and will represent a normalized time-domain waveform having maximum absolute peak value of 1. Another way of saying this is that the generated waveform of an OscillatorNode will have maximum peak value at 0dBFS. Conveniently, this corresponds to the full-range of the signal values used by the Web Audio API. Because the WaveTable will be normalized on creation, the real and imag parameters represent relative values. + * @param real an array of cosine terms (traditionally the A terms). In audio terminology, the first element (index 0) is the DC-offset of the periodic waveform and is usually set to zero. The second element (index 1) represents the fundamental frequency. The third element represents the first overtone, and so on. + * @param imag an array of sine terms (traditionally the B terms). The first element (index 0) should be set to zero (and will be ignored) since this term does not exist in the Fourier series. The second element (index 1) represents the fundamental frequency. The third element represents the first overtone, and so on. + */ + createWaveTable(real: any,imag: any): WaveTable; +} + +declare var AudioContext: { + new (): AudioContext; +} + +declare var webkitAudioContext: { + new (): AudioContext; +} + +interface OfflineRenderSuccessCallback{ + (renderedData: AudioBuffer): void; +} + +/** + * OfflineAudioContext is a particular type of AudioContext for rendering/mixing-down (potentially) faster than real-time. It does not render to the audio hardware, but instead renders as quickly as possible, calling a render callback function upon completion with the result provided as an AudioBuffer. It is constructed by specifying the numberOfChannels, length, and sampleRate as follows: + * + * var offlineContext = new OfflineAudioContext(unsigned long numberOfChannels, unsigned long length, float sampleRate); + */ +interface OfflineAudioContext extends AudioContext{ + startRendering(): void; + oncomplete: OfflineRenderSuccessCallback; +} + +declare var webkitOfflineAudioContext: { + new (numberOfChannels: number, length: number, sampleRate: number): OfflineAudioContext; +} + +/** + * AudioNodes are the building blocks of an AudioContext. This interface represents audio sources, the audio destination, and intermediate processing modules. These modules can be connected together to form processing graphs for rendering audio to the audio hardware. Each node can have inputs and/or outputs. An AudioSourceNode has no inputs and a single output. An AudioDestinationNode has one input and no outputs and represents the final destination to the audio hardware. Most processing nodes such as filters will have one input and one output. Each type of AudioNode differs in the details of how it processes or synthesizes audio. But, in general, AudioNodes will process its inputs (if it has any), and generate audio for its outputs (if it has any). + * + * An output may connect to one or more AudioNode inputs, thus fanout is supported. An input may be connected from one or more AudioNode outputs, thus fanin is supported. + * + * In order to handle this fanin, any AudioNode with inputs performs an up-mixing of all connections for each input: + * + * 1. Calculate N: the maximum number of channels of all the connections to the input. For example, if an input has a mono connection and a stereo connection then this number will be 2. + * 2. For each connection to the input, up-mix to N channels. + * 3. Mix together all the up-mixed streams from (2). This is a straight-forward mixing together of each of the corresponding channels from each connection. + * + * Please see Mixer Gain Structure for more informative details. + * + * For performance reasons, practical implementations will need to use block processing, with each AudioNode processing a fixed number of sample-frames of size block-size. In order to get uniform behavior across implementations, we will define this value explicitly. block-size is defined to be 128 sample-frames which corresponds to roughly 3ms at a sample-rate of 44.1KHz. + */ +interface AudioNode { + /** + * Connects the AudioNode to another AudioNode. + * + * It is possible to connect an AudioNode output to more than one input with multiple calls to connect(). Thus, "fanout" is supported. + * + * It is possible to connect an AudioNode to another AudioNode which creates a cycle. In other words, an AudioNode may connect to another AudioNode, which in turn connects back to the first AudioNode. This is allowed only if there is at least one DelayNode in the cycle or an exception will be thrown. + * + * There can only be one connection between a given output of one specific node and a given input of another specific node. Multiple connections with the same termini are ignored. For example: + * + * nodeA.connect(nodeB); + * nodeA.connect(nodeB); + * + * will have the same effect as + * + * nodeA.connect(nodeB); + * + * @param destination the AudioNode to connect to. + * @param output an index describing which output of the AudioNode from which to connect. An out-of-bound value throws an exception. + * @param input an index describing which input of the destination AudioNode to connect to. An out-of-bound value throws an exception. + */ + connect(destination: AudioNode, output?: number, input?: number): void; + + /** + * Connects the AudioNode to an AudioParam, controlling the parameter value with an audio-rate signal. + * + * It is possible to connect an AudioNode output to more than one AudioParam with multiple calls to connect(). Thus, "fanout" is supported. + * + * It is possible to connect more than one AudioNode output to a single AudioParam with multiple calls to connect(). Thus, "fanin" is supported. + * + * An AudioParam will take the rendered audio data from any AudioNode output connected to it and convert it to mono by down-mixing if it is not already mono, then mix it together with other such outputs and finally will mix with the intrinsic parameter value (the value the AudioParam would normally have without any audio connections), including any timeline changes scheduled for the parameter. + * + * There can only be one connection between a given output of one specific node and a specific AudioParam. Multiple connections with the same termini are ignored. For example: + * + * nodeA.connect(param); + * nodeA.connect(param); + * + * will have the same effect as + * + * nodeA.connect(param); + * + * @param destination the AudioParam to connect to. + * @param output an index describing which output of the AudioNode from which to connect. An out-of-bound value throws an exception. + */ + connect(destination: AudioParam, output?: number): void; + + /** + * Disconnects an AudioNode's output. + * @param output an index describing which output of the AudioNode to disconnect. An out-of-bound value throws an exception. + */ + disconnect(output?: number): void; + + /** + * The AudioContext which owns this AudioNode. + */ + context: AudioContext; + + /** + * The number of inputs feeding into the AudioNode. This will be 0 for an AudioSourceNode. + */ + numberOfInputs: number; + + /** + * The number of outputs coming out of the AudioNode. This will be 0 for an AudioDestinationNode. + */ + numberOfOutputs: number; +} + + +/** + * This is an abstract interface representing an audio source, an AudioNode which has no inputs and a single output: + * + * numberOfInputs : 0 + * numberOfOutputs : 1 + * + * Subclasses of AudioSourceNode will implement specific types of audio sources. + */ +interface AudioSourceNode extends AudioNode { + +} + +/** + * This is an AudioNode representing the final audio destination and is what the user will ultimately hear. It can be considered as an audio output device which is connected to speakers. All rendered audio to be heard will be routed to this node, a "terminal" node in the AudioContext's routing graph. There is only a single AudioDestinationNode per AudioContext, provided through the destination attribute of AudioContext. + * + * numberOfInputs : 1 + * numberOfOutputs : 0 + */ +interface AudioDestinationNode extends AudioNode { + /** + * The maximum number of channels that the numberOfChannels attribute can be set to. An AudioDestinationNode representing the audio hardware end-point (the normal case) can potentially output more than 2 channels of audio if the audio hardware is multi-channel. maxNumberOfChannels is the maximum number of channels that this hardware is capable of supporting. If this value is 0, then this indicates that maxNumberOfChannels may not be changed. This will be the case for an AudioDestinationNode in an OfflineAudioContext. + * @readonly + */ + maxNumberOfChannels: number; + + /** + * The number of channels of the destination's input. This value will default to 2, and may be set to any non-zero value less than or equal to maxNumberOfChannels. An exception will be thrown if this value is not within the valid range. Giving a concrete example, if the audio hardware supports 8-channel output, then we may set numberOfChannels to 8, and render 8-channels of output. + */ + numberOfChannels: number; +} + +/** + * AudioParam controls an individual aspect of an AudioNode's functioning, such as volume. The parameter can be set immediately to a particular value using the "value" attribute. Or, value changes can be scheduled to happen at very precise times (in the coordinate system of AudioContext.currentTime), for envelopes, volume fades, LFOs, filter sweeps, grain windows, etc. In this way, arbitrary timeline-based automation curves can be set on any AudioParam. Additionally, audio signals from the outputs of AudioNodes can be connected to an AudioParam, summing with the intrinsic parameter value. + * + * Some synthesis and processing AudioNodes have AudioParams as attributes whose values must be taken into account on a per-audio-sample basis. For other AudioParams, sample-accuracy is not important and the value changes can be sampled more coarsely. Each individual AudioParam will specify that it is either an a-rate parameter which means that its values must be taken into account on a per-audio-sample basis, or it is a k-rate parameter. + * + * Implementations must use block processing, with each AudioNode processing 128 sample-frames in each block. + * + * For each 128 sample-frame block, the value of a k-rate parameter must be sampled at the time of the very first sample-frame, and that value must be used for the entire block. a-rate parameters must be sampled for each sample-frame of the block. + */ +interface AudioParam { + /** + * The parameter's floating-point value. This attribute is initialized to the defaultValue. If a value is set outside the allowable range described by minValue and maxValue no exception is thrown, because these limits are just nominal and may be exceeded. If a value is set during a time when there are any automation events scheduled then it will be ignored and no exception will be thrown. + */ + value: number; + + /** + * Nominal minimum value. This attribute is informational and value may be set lower than this value. + */ + minValue: number; + + /** + * Nominal maximum value. This attribute is informational and value may be set higher than this value. + */ + maxValue: number; + + /** + * Initial value for the value attribute + */ + defaultValue: number; + + /** + * Schedules a parameter value change at the given time. + * + * If there are no more events after this SetValue event, then for t >= startTime, v(t) = value. In other words, the value will remain constant. + * + * If the next event (having time T1) after this SetValue event is not of type LinearRampToValue or ExponentialRampToValue, then, for t: startTime <= t < T1, v(t) = value. In other words, the value will remain constant during this time interval, allowing the creation of "step" functions. + * + * If the next event after this SetValue event is of type LinearRampToValue or ExponentialRampToValue then please see details below. + * + * @param value the value the parameter will change to at the given time + * @param startTime parameter is the time in the same time coordinate system as AudioContext.currentTime. + */ + setValueAtTime(value: number, startTime: number): void; + + /** + * Schedules a linear continuous change in parameter value from the previous scheduled parameter value to the given value. + * + * The value during the time interval T0 <= t < T1 (where T0 is the time of the previous event and T1 is the endTime parameter passed into this method) will be calculated as: + * + * v(t) = V0 + (V1 - V0) * ((t - T0) / (T1 - T0)) + * + * Where V0 is the value at the time T0 and V1 is the value parameter passed into this method. + * + * If there are no more events after this LinearRampToValue event then for t >= T1, v(t) = V1 + * + * @param value the value the parameter will linearly ramp to at the given time. + * @param endTime the time in the same time coordinate system as AudioContext.currentTime. + */ + linearRampToValueAtTime(value: number, time: number): void; + + /** + * Schedules an exponential continuous change in parameter value from the previous scheduled parameter value to the given value. Parameters representing filter frequencies and playback rate are best changed exponentially because of the way humans perceive sound. + * + * The value during the time interval T0 <= t < T1 (where T0 is the time of the previous event and T1 is the endTime parameter passed into this method) will be calculated as: + * + * v(t) = V0 * (V1 / V0) ^ ((t - T0) / (T1 - T0)) + * + * Where V0 is the value at the time T0 and V1 is the value parameter passed into this method. + * + * If there are no more events after this ExponentialRampToValue event then for t >= T1, v(t) = V1 + * + * @param value the value the parameter will exponentially ramp to at the given time. An exception will be thrown if this value is less than or equal to 0, or if the value at the time of the previous event is less than or equal to 0. + * @param endTime the time in the same time coordinate system as AudioContext.currentTime. + */ + exponentialRampToValueAtTime(value: number, endTime: number): void; + + /** + * Start exponentially approaching the target value at the given time with a rate having the given time constant. Among other uses, this is useful for implementing the "decay" and "release" portions of an ADSR envelope. Please note that the parameter value does not immediately change to the target value at the given time, but instead gradually changes to the target value. + * + * More precisely, timeConstant is the time it takes a first-order linear continuous time-invariant system to reach the value 1 - 1/e (around 63.2%) given a step input response (transition from 0 to 1 value). + * + * During the time interval: T0 <= t < T1, where T0 is the startTime parameter and T1 represents the time of the event following this event (or infinity if there are no following events): + * + * v(t) = V1 + (V0 - V1) * exp(-(t - T0) / timeConstant) + * + * Where V0 is the initial value (the .value attribute) at T0 (the startTime parameter) and V1 is equal to the target parameter. + * + * @param target the value the parameter will start changing to at the given time. + * @param startTime the time in the same time coordinate system as AudioContext.currentTime. + * @param timeConstant the time-constant value of first-order filter (exponential) approach to the target value. The larger this value is, the slower the transition will be. + */ + setTargetValueAtTime(target: number, startTime: number, timeConstant: number): void; + + /** + * Sets an array of arbitrary parameter values starting at the given time for the given duration. The number of values will be scaled to fit into the desired duration. + * + * During the time interval: startTime <= t < startTime + duration, values will be calculated: + * + * v(t) = values[N * (t - startTime) / duration], where N is the length of the values array. + * + * After the end of the curve time interval (t >= startTime + duration), the value will remain constant at the final curve value, until there is another automation event (if any). + * + * @param values a Float32Array representing a parameter value curve. These values will apply starting at the given time and lasting for the given duration. + * @param startTime the time in the same time coordinate system as AudioContext.currentTime. + * @param duration the amount of time in seconds (after the time parameter) where values will be calculated according to the values parameter.. + * + */ + setValueCurveAtTime(values: Float32Array, time: number, duration: number): void; + + /** + * Cancels all scheduled parameter changes with times greater than or equal to startTime. + * + * @param startTime the starting time at and after which any previously scheduled parameter changes will be cancelled. It is a time in the same time coordinate system as AudioContext.currentTime. + */ + cancelScheduledValues(startTime: number): void; +} + +/** + * Changing the gain of an audio signal is a fundamental operation in audio applications. The GainNode is one of the building blocks for creating mixers. This interface is an AudioNode with a single input and single output: + * + * numberOfInputs : 1 + * numberOfOutputs : 1 + * + * which multiplies the input audio signal by the (possibly time-varying) gain attribute, copying the result to the output. By default, it will take the input and pass it through to the output unchanged, which represents a constant gain change of 1. + * + * As with other AudioParams, the gain parameter represents a mapping from time (in the coordinate system of AudioContext.currentTime) to floating-point value. Every PCM audio sample in the input is multiplied by the gain parameter's value for the specific time corresponding to that audio sample. This multiplied value represents the PCM audio sample for the output. + * + * The number of channels of the output will always equal the number of channels of the input, with each channel of the input being multiplied by the gain values and being copied into the corresponding channel of the output. + * + * The implementation must make gain changes to the audio stream smoothly, without introducing noticeable clicks or glitches. This process is called "de-zippering". + */ +interface GainNode extends AudioNode { + /** + * Represents the amount of gain to apply. Its default value is 1 (no gain change). The nominal minValue is 0, but may be set negative for phase inversion. The nominal maxValue is 1, but higher values are allowed (no exception thrown).This parameter is a-rate + */ + gain: AudioParam; +} + +/** + * A delay-line is a fundamental building block in audio applications. This interface is an AudioNode with a single input and single output: + * + * numberOfInputs : 1 + * numberOfOutputs : 1 + * + * which delays the incoming audio signal by a certain amount. The default amount is 0 seconds (no delay). When the delay time is changed, the implementation must make the transition smoothly, without introducing noticeable clicks or glitches to the audio stream. + */ +interface DelayNode extends AudioNode { + /** + * An AudioParam object representing the amount of delay (in seconds) to apply. The default value (delayTime.value) is 0 (no delay). The minimum value is 0 and the maximum value is determined by the maxDelayTime argument to the AudioContext method createDelay. This parameter is k-rate + */ + delayTime: AudioParam; +} + +/** + * This interface represents a memory-resident audio asset (for one-shot sounds and other short audio clips). Its format is non-interleaved IEEE 32-bit linear PCM with a nominal range of -1 -> +1. It can contain one or more channels. It is analogous to a WebGL texture. Typically, it would be expected that the length of the PCM data would be fairly short (usually somewhat less than a minute). For longer sounds, such as music soundtracks, streaming should be used with the audio element and MediaElementAudioSourceNode. + * + * An AudioBuffer may be used by one or more AudioContexts. + */ +interface AudioBuffer { + /** + * The sample-rate for the PCM audio data in samples per second. + * @readonly + */ + sampleRate: number; + + /** + * Length of the PCM audio data in sample-frames. + * @readonly + */ + length: number; + + /** + * Duration of the PCM audio data in seconds. + * @readonly + */ + duration: number; + + /** + * The number of discrete audio channels. + * @readonly + */ + numberOfChannels: number; + + /** + * Returns the Float32Array representing the PCM audio data for the specific channel. + * + * The channel parameter is an index representing the particular channel to get data for. An index value of 0 represents the first channel. This index value MUST be less than numberOfChannels or an exception will be thrown. + */ + getChannelData(channel: number): Float32Array; + +} + +/** + * This interface represents an audio source from an in-memory audio asset in an AudioBuffer. It generally will be used for short audio assets which require a high degree of scheduling flexibility (can playback in rhythmically perfect ways). The playback state of an AudioBufferSourceNode goes through distinct stages during its lifetime in this order: UNSCHEDULED_STATE, SCHEDULED_STATE, PLAYING_STATE, FINISHED_STATE. The start() method causes a transition from the UNSCHEDULED_STATE to SCHEDULED_STATE. Depending on the time argument passed to start(), a transition is made from the SCHEDULED_STATE to PLAYING_STATE, at which time sound is first generated. Following this, a transition from the PLAYING_STATE to FINISHED_STATE happens when either the buffer's audio data has been completely played (if the loop attribute is false), or when the stop() method has been called and the specified time has been reached. Please see more details in the start() and stop() description. Once an AudioBufferSourceNode has reached the FINISHED state it will no longer emit any sound. Thus start() and stop() may not be issued multiple times for a given AudioBufferSourceNode. + * + * numberOfInputs : 0 + * numberOfOutputs : 1 + */ +interface AudioBufferSourceNode extends AudioSourceNode { + + /** + * The playback state, initialized to UNSCHEDULED_STATE. + */ + playbackState: number; + + /** + * Represents the audio asset to be played. + */ + buffer: AudioBuffer; + + /** + * The speed at which to render the audio stream. The default playbackRate.value is 1. This parameter is a-rate + */ + playbackRate: AudioParam; + + /** + * Indicates if the audio data should play in a loop. The default value is false. + */ + loop: boolean; + + /** + * An optional value in seconds where looping should begin if the loop attribute is true. Its default value is 0, and it may usefully be set to any value between 0 and the duration of the buffer. + */ + loopStart: number; + + /** + * An optional value in seconds where looping should end if the loop attribute is true. Its default value is 0, and it may usefully be set to any value between 0 and the duration of the buffer. + */ + loopEnd: number; + + /** + * A property used to set the EventHandler for the ended event that is dispatched to AudioBufferSourceNode node types. When the playback of the buffer for an AudioBufferSourceNode is finished, an event of type Event will be dispatched to the event handler. + */ + onended: EventListener; + + /** + * Schedules a sound to playback at an exact time. + * + * @param when time (in seconds) the sound should start playing. It is in the same time coordinate system as AudioContext.currentTime. If 0 is passed in for this value or if the value is less than currentTime, then the sound will start playing immediately. start may only be called one time and must be called before stop is called or an exception will be thrown. + * @param offset the offset time in the buffer (in seconds) where playback will begin. This parameter is optional with a default value of 0 (playing back from the beginning of the buffer). + * @param duration the duration of the portion (in seconds) to be played. This parameter is optional, with the default value equal to the total duration of the AudioBuffer minus the offset parameter. Thus if neither offset nor duration are specified then the implied duration is the total duration of the AudioBuffer. + */ + start(when: number, offset?: number, duration?: number): void; + + /** + * Schedules a sound to stop playback at an exact time. Please see deprecation section for the old method name. + * + * The when parameter describes at what time (in seconds) the sound should stop playing. It is in the same time coordinate system as AudioContext.currentTime. If 0 is passed in for this value or if the value is less than currentTime, then the sound will stop playing immediately. stop must only be called one time and only after a call to start or stop, or an exception will be thrown. + */ + stop(when: number): void; +} + +/* + * This interface represents an audio source from an audio or video element. + * + * numberOfInputs : 0 + * numberOfOutputs : 1 + */ +interface MediaElementAudioSourceNode extends AudioSourceNode { +} + +/** + * This interface is an AudioNode which can generate, process, or analyse audio directly using JavaScript. + * + * numberOfInputs : 1 + * numberOfOutputs : 1 + * + * The ScriptProcessorNode is constructed with a bufferSize which must be one of the following values: 256, 512, 1024, 2048, 4096, 8192, 16384. This value controls how frequently the onaudioprocess event handler is called and how many sample-frames need to be processed each call. Lower numbers for bufferSize will result in a lower (better) latency. Higher numbers will be necessary to avoid audio breakup and glitches. The value chosen must carefully balance between latency and audio quality. + * + * numberOfInputChannels and numberOfOutputChannels determine the number of input and output channels. It is invalid for both numberOfInputChannels and numberOfOutputChannels to be zero. + * + * var node = context.createScriptProcessor(bufferSize, numberOfInputChannels, numberOfOutputChannels); + */ +interface ScriptProcessorNode extends AudioNode { + /** + * An event listener which is called periodically for audio processing. An event of type AudioProcessingEvent will be passed to the event handler. + */ + onaudioprocess: EventListener; + + /** + * The size of the buffer (in sample-frames) which needs to be processed each time onprocessaudio is called. Legal values are (256, 512, 1024, 2048, 4096, 8192, 16384). + */ + bufferSize: number; +} + +/** + * This interface is a type of Event which is passed to the onaudioprocess event handler used by ScriptProcessorNode. + * + * The event handler processes audio from the input (if any) by accessing the audio data from the inputBuffer attribute. The audio data which is the result of the processing (or the synthesized data if there are no inputs) is then placed into the outputBuffer. + */ +interface AudioProcessingEvent extends Event { + /** + * The ScriptProcessorNode associated with this processing event. + */ + node: ScriptProcessorNode; + + /** + * The time when the audio will be played in the same time coordinate system as AudioContext.currentTime. playbackTime allows for very tight synchronization between processing directly in JavaScript with the other events in the context's rendering graph. + */ + playbackTime: number; + + /** + * An AudioBuffer containing the input audio data. It will have a number of channels equal to the numberOfInputChannels parameter of the createScriptProcessor() method. This AudioBuffer is only valid while in the scope of the onaudioprocess function. Its values will be meaningless outside of this scope. + */ + inputBuffer: AudioBuffer; + + /** + * An AudioBuffer where the output audio data should be written. It will have a number of channels equal to the numberOfOutputChannels parameter of the createScriptProcessor() method. Script code within the scope of the onaudioprocess function is expected to modify the Float32Array arrays representing channel data in this AudioBuffer. Any script modifications to this AudioBuffer outside of this scope will not produce any audible effects. + */ + outputBuffer: AudioBuffer; +} + +declare enum PanningModelType { + /** + * A simple and efficient spatialization algorithm using equal-power panning. + */ + equalpower, + + /** + * A higher quality spatialization algorithm using a convolution with measured impulse responses from human subjects. This panning method renders stereo output. + */ + HRTF, + + /** + * An algorithm which spatializes multi-channel audio using sound field algorithms. + */ + soundfield +} + +declare enum DistanceModelType { + /** + * A linear distance model which calculates distanceGain according to: + * 1 - rolloffFactor * (distance - refDistance) / (maxDistance - refDistance) + */ + linear, + + /** + * An inverse distance model which calculates distanceGain according to: + * refDistance / (refDistance + rolloffFactor * (distance - refDistance)) + */ + inverse, + + /** + * An exponential distance model which calculates distanceGain according to: + * pow(distance / refDistance, -rolloffFactor) + */ + exponential +} + +/** + * This interface represents a processing node which positions / spatializes an incoming audio stream in three-dimensional space. The spatialization is in relation to the AudioContext's AudioListener (listener attribute). + * + * numberOfInputs : 1 + * numberOfOutputs : 1 + * + * The audio stream from the input will be either mono or stereo, depending on the connection(s) to the input. + * + * The output of this node is hard-coded to stereo (2 channels) and currently cannot be configured. + */ +interface PannerNode extends AudioNode { + /** + * Determines which spatialization algorithm will be used to position the audio in 3D space. The default is "HRTF". + */ + panningModel: PanningModelType; + + /** + * Sets the position of the audio source relative to the listener attribute. A 3D cartesian coordinate system is used. + * + * The default value is (0,0,0) + * + * @param x the x coordinates in 3D space. + * @param y the y coordinates in 3D space. + * @param z the z coordinates in 3D space. + */ + setPosition(x: number, y: number, z: number): void; + + /** + * Describes which direction the audio source is pointing in the 3D cartesian coordinate space. Depending on how directional the sound is (controlled by the cone attributes), a sound pointing away from the listener can be very quiet or completely silent. + * + * The default value is (1,0,0) + * + * @param x + * @param y + * @param z + */ + setOrientation(x: number, y: number, z: number): void; + + /** + * Sets the velocity vector of the audio source. This vector controls both the direction of travel and the speed in 3D space. This velocity relative to the listener's velocity is used to determine how much doppler shift (pitch change) to apply. The units used for this vector is meters / second and is independent of the units used for position and orientation vectors. + * + * The default value is (0,0,0) + * + * @param x a direction vector indicating direction of travel and intensity. + * @param y + * @param z + */ + setVelocity(x: number, y: number, z: number): void; + + /** + * Determines which algorithm will be used to reduce the volume of an audio source as it moves away from the listener. The default is "inverse". + */ + distanceModel: DistanceModelType; + + /** + * A reference distance for reducing volume as source move further from the listener. The default value is 1. + */ + refDistance: number; + + /** + * The maximum distance between source and listener, after which the volume will not be reduced any further. The default value is 10000. + */ + maxDistance: number; + + /** + * Describes how quickly the volume is reduced as source moves away from listener. The default value is 1. + */ + rolloffFactor: number; + + /** + * A parameter for directional audio sources, this is an angle, inside of which there will be no volume reduction. The default value is 360. + */ + coneInnerAngle: number; + + /** + * A parameter for directional audio sources, this is an angle, outside of which the volume will be reduced to a constant value of coneOuterGain. The default value is 360. + */ + coneOuterAngle: number; + + /** + * A parameter for directional audio sources, this is the amount of volume reduction outside of the coneOuterAngle. The default value is 0. + */ + coneOuterGain: number; +} + +/** + * This interface represents the position and orientation of the person listening to the audio scene. All PannerNode objects spatialize in relation to the AudioContext's listener. See this section for more details about spatialization. + */ +interface AudioListener { + /** + * A constant used to determine the amount of pitch shift to use when rendering a doppler effect. The default value is 1. + */ + dopplerFactor: number; + + /** + * The speed of sound used for calculating doppler shift. The default value is 343.3 meters / second. + */ + speedOfSound: number; + + /** + * Sets the position of the listener in a 3D cartesian coordinate space. PannerNode objects use this position relative to individual audio sources for spatialization. + * + * The default value is (0,0,0) + * + * @param x + * @param y + * @param z + */ + setPosition(x: number, y: number, z: number): void; + + /** + * Describes which direction the listener is pointing in the 3D cartesian coordinate space. Both a front vector and an up vector are provided. In simple human terms, the front vector represents which direction the person's nose is pointing. The up vector represents the direction the top of a person's head is pointing. These values are expected to be linearly independent (at right angles to each other). For normative requirements of how these values are to be interpreted, see the spatialization section. + * + * @param x x coordinate of a front direction vector in 3D space, with the default value being 0 + * @param y y coordinate of a front direction vector in 3D space, with the default value being 0 + * @param z z coordinate of a front direction vector in 3D space, with the default value being -1 + * @param xUp x coodinate of an up direction vector in 3D space, with the default value being 0 + * @param yUp y coodinate of an up direction vector in 3D space, with the default value being 1 + * @param zUp z coodinate of an up direction vector in 3D space, with the default value being 0 + */ + setOrientation(x: number, y: number, z: number, xUp: number, yUp: number, zUp: number): void; + + /** + * Sets the velocity vector of the listener. This vector controls both the direction of travel and the speed in 3D space. This velocity relative to an audio source's velocity is used to determine how much doppler shift (pitch change) to apply. The units used for this vector is meters / second and is independent of the units used for position and orientation vectors. + * + * @param x x coordinate of a direction vector indicating direction of travel and intensity. The default value is 0 + * @param y y coordinate of a direction vector indicating direction of travel and intensity. The default value is 0 + * @param z z coordinate of a direction vector indicating direction of travel and intensity. The default value is 0 + */ + setVelocity(x: number, y: number, z: number): void; +} + + +/** + * This interface represents a processing node which applies a linear convolution effect given an impulse response. Normative requirements for multi-channel convolution matrixing are described [here](http://www.w3.org/TR/2012/WD-webaudio-20121213/#Convolution-reverb-effect). + * + * numberOfInputs : 1 + * numberOfOutputs : 1 + */ +interface ConvolverNode extends AudioNode { + /** + * A mono, stereo, or 4-channel AudioBuffer containing the (possibly multi-channel) impulse response used by the ConvolverNode. At the time when this attribute is set, the buffer and the state of the normalize attribute will be used to configure the ConvolverNode with this impulse response having the given normalization. + */ + buffer: AudioBuffer; + + /** + * Controls whether the impulse response from the buffer will be scaled by an equal-power normalization when the buffer atttribute is set. Its default value is true in order to achieve a more uniform output level from the convolver when loaded with diverse impulse responses. If normalize is set to false, then the convolution will be rendered with no pre-processing/scaling of the impulse response. Changes to this value do not take effect until the next time the buffer attribute is set. + */ + normalize: boolean; +} + +/** + * This interface represents a node which is able to provide real-time frequency and time-domain analysis information. The audio stream will be passed un-processed from input to output. + * + * numberOfInputs : 1 + * numberOfOutputs : 1 Note that this output may be left unconnected. + */ +interface AnalyserNode extends AudioNode { + /** + * Copies the current frequency data into the passed floating-point array. If the array has fewer elements than the frequencyBinCount, the excess elements will be dropped. + * @param array where frequency-domain analysis data will be copied. + */ + getFloatFrequencyData(array: any): void; + + /** + * Copies the current frequency data into the passed unsigned byte array. If the array has fewer elements than the frequencyBinCount, the excess elements will be dropped. + * @param Tarray where frequency-domain analysis data will be copied. + */ + getByteFrequencyData(array: any): void; + + /** + * Copies the current time-domain (waveform) data into the passed unsigned byte array. If the array has fewer elements than the frequencyBinCount, the excess elements will be dropped. + * @param array where time-domain analysis data will be copied. + */ + getByteTimeDomainData(array: any): void; + + /** + * The size of the FFT used for frequency-domain analysis. This must be a power of two. + */ + fftSize: number; + + /** + * Half the FFT size. + */ + frequencyBinCount: number; + + /** + * The minimum power value in the scaling range for the FFT analysis data for conversion to unsigned byte values. + */ + minDecibels: number; + + /** + * The maximum power value in the scaling range for the FFT analysis data for conversion to unsigned byte values. + */ + maxDecibels: number; + + /** + * A value from 0 -> 1 where 0 represents no time averaging with the last analysis frame. + */ + smoothingTimeConstant: number; +} + +/** + * The ChannelSplitterNode is for use in more advanced applications and would often be used in conjunction with ChannelMergerNode. + * + * numberOfInputs : 1 + * numberOfOutputs : Variable N (defaults to 6) // number of "active" (non-silent) outputs is determined by number of channels in the input + */ +interface ChannelSplitterNode extends AudioNode { +} + +/** + * The ChannelMergerNode is for use in more advanced applications and would often be used in conjunction with ChannelSplitterNode. + * + * numberOfInputs : Variable N (default to 6) // number of connected inputs may be less than this + * numberOfOutputs : 1 + */ +interface ChannelMergerNode extends AudioNode { +} + +/** + * DynamicsCompressorNode is an AudioNode processor implementing a dynamics compression effect. + * + * Dynamics compression is very commonly used in musical production and game audio. It lowers the volume of the loudest parts of the signal and raises the volume of the softest parts. Overall, a louder, richer, and fuller sound can be achieved. It is especially important in games and musical applications where large numbers of individual sounds are played simultaneous to control the overall signal level and help avoid clipping (distorting) the audio output to the speakers. + * + * numberOfInputs : 1 + * numberOfOutputs : 1 + */ +interface DynamicsCompressorNode extends AudioNode { + /** + * The decibel value above which the compression will start taking effect. Its default value is -24, with a nominal range of -100 to 0. + */ + threshold: AudioParam; + + /** + * A decibel value representing the range above the threshold where the curve smoothly transitions to the "ratio" portion. Its default value is 30, with a nominal range of 0 to 40. + */ + knee: AudioParam; + + /** + * The amount of dB change in input for a 1 dB change in output. Its default value is 12, with a nominal range of 1 to 20. + */ + ratio: AudioParam; + + /** + * A read-only decibel value for metering purposes, representing the current amount of gain reduction that the compressor is applying to the signal. If fed no signal the value will be 0 (no gain reduction). The nominal range is -20 to 0. + */ + reduction: AudioParam; + + /** + * The amount of time (in seconds) to reduce the gain by 10dB. Its default value is 0.003, with a nominal range of 0 to 1. + */ + attack: AudioParam; + + /** + * The amount of time (in seconds) to increase the gain by 10dB. Its default value is 0.250, with a nominal range of 0 to 1. + */ + release: AudioParam; + +} + +declare enum BiquadFilterType { + /** + * A lowpass filter allows frequencies below the cutoff frequency to pass through and attenuates frequencies above the cutoff. It implements a standard second-order resonant lowpass filter with 12dB/octave rolloff. + * + * ## frequency + * The cutoff frequency + * ## Q + * Controls how peaked the response will be at the cutoff frequency. A large value makes the response more peaked. Please note that for this filter type, this value is not a traditional Q, but is a resonance value in decibels. + * ## gain + * Not used in this filter type + */ + lowpass, + + /** + * A highpass filter is the opposite of a lowpass filter. Frequencies above the cutoff frequency are passed through, but frequencies below the cutoff are attenuated. It implements a standard second-order resonant highpass filter with 12dB/octave rolloff. + * + * ## frequency + * The cutoff frequency below which the frequencies are attenuated + * ## Q + * Controls how peaked the response will be at the cutoff frequency. A large value makes the response more peaked. Please note that for this filter type, this value is not a traditional Q, but is a resonance value in decibels. + * ## gain + * Not used in this filter type + */ + highpass, + + /** + * A bandpass filter allows a range of frequencies to pass through and attenuates the frequencies below and above this frequency range. It implements a second-order bandpass filter. + * + * ## frequency + * The center of the frequency band + * ## Q + * Controls the width of the band. The width becomes narrower as the Q value increases. + * ## gain + * Not used in this filter type + */ + bandpass, + + /** + * The lowshelf filter allows all frequencies through, but adds a boost (or attenuation) to the lower frequencies. It implements a second-order lowshelf filter. + * + * ## frequency + * The upper limit of the frequences where the boost (or attenuation) is applied. + * ## Q + * Not used in this filter type. + * ## gain + * The boost, in dB, to be applied. If the value is negative, the frequencies are attenuated. + */ + lowshelf, + + /** + * The highshelf filter is the opposite of the lowshelf filter and allows all frequencies through, but adds a boost to the higher frequencies. It implements a second-order highshelf filter + * + * ## frequency + * The lower limit of the frequences where the boost (or attenuation) is applied. + * ## Q + * Not used in this filter type. + * ## gain + * The boost, in dB, to be applied. If the value is negative, the frequencies are attenuated. + */ + highshelf, + + /** + * The peaking filter allows all frequencies through, but adds a boost (or attenuation) to a range of frequencies. + * + * ## frequency + * The center frequency of where the boost is applied. + * ## Q + * Controls the width of the band of frequencies that are boosted. A large value implies a narrow width. + * ## gain + * The boost, in dB, to be applied. If the value is negative, the frequencies are attenuated. + */ + peaking, + + /** + * The notch filter (also known as a band-stop or band-rejection filter) is the opposite of a bandpass filter. It allows all frequencies through, except for a set of frequencies. + * + * ## frequency + * The center frequency of where the notch is applied. + * ## Q + * Controls the width of the band of frequencies that are attenuated. A large value implies a narrow width. + * ## gain + * Not used in this filter type. + */ + notch, + + /** + * An allpass filter allows all frequencies through, but changes the phase relationship between the various frequencies. It implements a second-order allpass filter + * + * ## frequency + * The frequency where the center of the phase transition occurs. Viewed another way, this is the frequency with maximal group delay. + * ## Q + * Controls how sharp the phase transition is at the center frequency. A larger value implies a sharper transition and a larger group delay. + * ## gain + * Not used in this filter type. + */ + allpass +} + +/** + * BiquadFilterNode is an AudioNode processor implementing very common low-order filters. + * + * Low-order filters are the building blocks of basic tone controls (bass, mid, treble), graphic equalizers, and more advanced filters. Multiple BiquadFilterNode filters can be combined to form more complex filters. The filter parameters such as "frequency" can be changed over time for filter sweeps, etc. Each BiquadFilterNode can be configured as one of a number of common filter types as shown in the IDL below. The default filter type is "lowpass" + * + * numberOfInputs : 1 + * numberOfOutputs : 1 + * + * The filter types are briefly described below. We note that all of these filters are very commonly used in audio processing. In terms of implementation, they have all been derived from standard analog filter prototypes. For more technical details, we refer the reader to the excellent reference by Robert Bristow-Johnson. + * + * All parameters are k-rate with the following default parameter values: + * + * ## frequency + * 350Hz, with a nominal range of 10 to the Nyquist frequency (half the sample-rate). + * ## Q + * 1, with a nominal range of 0.0001 to 1000. + * ## gain + * 0, with a nominal range of -40 to 40. + */ +interface BiquadFilterNode extends AudioNode { + + type: BiquadFilterType; + frequency: AudioParam; + Q: AudioParam; + gain: AudioParam; + + /** + * Given the current filter parameter settings, calculates the frequency response for the specified frequencies. + * @param frequencyHz an array of frequencies at which the response values will be calculated. + * @param magResponse an output array receiving the linear magnitude response values. + * @param phaseResponse an output array receiving the phase response values in radians. + */ + getFrequencyResponse(frequencyHz: any, magResponse: any, phaseResponse: any): void; +} + +/** + * WaveShaperNode is an AudioNode processor implementing non-linear distortion effects. + * + * Non-linear waveshaping distortion is commonly used for both subtle non-linear warming, or more obvious distortion effects. Arbitrary non-linear shaping curves may be specified. + * + * numberOfInputs : 1 + * numberOfOutputs : 1 + */ +interface WaveShaperNode extends AudioNode { + /** + * The shaping curve used for the waveshaping effect. The input signal is nominally within the range -1 -> +1. Each input sample within this range will index into the shaping curve with a signal level of zero corresponding to the center value of the curve array. Any sample value less than -1 will correspond to the first value in the curve array. Any sample value less greater than +1 will correspond to the last value in the curve array. + */ + curve: Float32Array; +} + +declare enum OscillatorType { + sine, + square, + sawtooth, + triangle, + custom +} + +/** + * OscillatorNode represents an audio source generating a periodic waveform. It can be set to a few commonly used waveforms. Additionally, it can be set to an arbitrary periodic waveform through the use of a WaveTable object. + * + * Oscillators are common foundational building blocks in audio synthesis. An OscillatorNode will start emitting sound at the time specified by the start() method. + * + * Mathematically speaking, a continuous-time periodic waveform can have very high (or infinitely high) frequency information when considered in the frequency domain. When this waveform is sampled as a discrete-time digital audio signal at a particular sample-rate, then care must be taken to discard (filter out) the high-frequency information higher than the Nyquist frequency (half the sample-rate) before converting the waveform to a digital form. If this is not done, then aliasing of higher frequencies (than the Nyquist frequency) will fold back as mirror images into frequencies lower than the Nyquist frequency. In many cases this will cause audibly objectionable artifacts. This is a basic and well understood principle of audio DSP. + * + * There are several practical approaches that an implementation may take to avoid this aliasing. But regardless of approach, the idealized discrete-time digital audio signal is well defined mathematically. The trade-off for the implementation is a matter of implementation cost (in terms of CPU usage) versus fidelity to achieving this ideal. + * + * It is expected that an implementation will take some care in achieving this ideal, but it is reasonable to consider lower-quality, less-costly approaches on lower-end hardware. + * + * Both .frequency and .detune are a-rate parameters and are used together to determine a computedFrequency value: + * + * computedFrequency(t) = frequency(t) * pow(2, detune(t) / 1200) + * + * The OscillatorNode's instantaneous phase at each time is the time integral of computedFrequency. + * + * numberOfInputs : 0 + * numberOfOutputs : 1 (mono output) + */ +interface OscillatorNode extends AudioSourceNode { + /** + * The shape of the periodic waveform. It may directly be set to any of the type constant values except for "custom". The setWaveTable() method can be used to set a custom waveform, which results in this attribute being set to "custom". The default value is "sine". + */ + type: OscillatorType; + + /** + * defined as in AudioBufferSourceNode. + * @readonly + */ + playbackState: number; + + /** + * The frequency (in Hertz) of the periodic waveform. This parameter is a-rate + * @readonly + */ + frequency: AudioParam; + + /** + * A detuning value (in Cents) which will offset the frequency by the given amount. This parameter is a-rate + */ + detune: AudioParam; // in Cents + + /** + * defined as in AudioBufferSourceNode. + */ + start(when: number): void; + + /** + * defined as in AudioBufferSourceNode. + */ + stop(when: number): void; + + /** + * Sets an arbitrary custom periodic waveform given a WaveTable. + */ + setWaveTable(waveTable: WaveTable): void; +} + +/** + * WaveTable represents an arbitrary periodic waveform to be used with an OscillatorNode. Please see createWaveTable() and setWaveTable() and for more details. + */ +interface WaveTable { +} + +/** + * This interface represents an audio source from a MediaStream. The first AudioMediaStreamTrack from the MediaStream will be used as a source of audio. + * + * numberOfInputs : 0 + * numberOfOutputs : 1 + */ +interface MediaStreamAudioSourceNode extends AudioSourceNode { +} diff --git a/jssrc/term.js b/jssrc/term.js index 2211c60..dd2a51a 100644 --- a/jssrc/term.js +++ b/jssrc/term.js @@ -1,6 +1,6 @@ /** Init the terminal sub-module - called from HTML */ window.termInit = function () { - Conn.init(); - Input.init(); - TermUpl.init(); -}; + Conn.init() + Input.init() + TermUpl.init() +} diff --git a/jssrc/term_conn.js b/jssrc/term_conn.js index 1de154f..b4255d5 100644 --- a/jssrc/term_conn.js +++ b/jssrc/term_conn.js @@ -1,31 +1,31 @@ /** Handle connections */ -var Conn = (function () { - var ws; - var heartbeatTout; - var pingIv; - var xoff = false; - var autoXoffTout; - var reconTout; +window.Conn = (function () { + let ws + let heartbeatTout + let pingIv + let xoff = false + let autoXoffTout + let reconTout - var pageShown = false; + let pageShown = false - function onOpen(evt) { - console.log("CONNECTED"); - heartbeat(); - doSend("i"); + function onOpen (evt) { + console.log('CONNECTED') + heartbeat() + doSend('i') } - function onClose(evt) { - console.warn("SOCKET CLOSED, code " + evt.code + ". Reconnecting..."); - clearTimeout(reconTout); + function onClose (evt) { + console.warn('SOCKET CLOSED, code ' + evt.code + '. Reconnecting...') + clearTimeout(reconTout) reconTout = setTimeout(function () { - init(); - }, 2000); + init() + }, 2000) // this happens when the buffer gets fucked up via invalid unicode. // we basically use polling instead of socket then } - function onMessage(evt) { + function onMessage (evt) { try { // . = heartbeat switch (evt.data.charAt(0)) { @@ -33,104 +33,104 @@ var Conn = (function () { case 'T': case 'S': case 'G': - Screen.load(evt.data); - if(!pageShown) { - showPage(); - pageShown = true; + Screen.load(evt.data) + if (!pageShown) { + showPage() + pageShown = true } - break; + break case '-': - //console.log('xoff'); - xoff = true; + // console.log('xoff'); + xoff = true autoXoffTout = setTimeout(function () { - xoff = false; - }, 250); - break; + xoff = false + }, 250) + break case '+': - //console.log('xon'); - xoff = false; - clearTimeout(autoXoffTout); - break; + // console.log('xon'); + xoff = false + clearTimeout(autoXoffTout) + break } - heartbeat(); + heartbeat() } catch (e) { - console.error(e); + console.error(e) } } - function canSend() { - return !xoff; + function canSend () { + return !xoff } - function doSend(message) { + function doSend (message) { if (_demo) { - console.log("TX: ", message); - return true; // Simulate success + console.log('TX: ', message) + return true // Simulate success } if (xoff) { // TODO queue - console.log("Can't send, flood control."); - return false; + console.log("Can't send, flood control.") + return false } - if (!ws) return false; // for dry testing + if (!ws) return false // for dry testing if (ws.readyState != 1) { - console.error("Socket not ready"); - return false; + console.error('Socket not ready') + return false } - if (typeof message != "string") { - message = JSON.stringify(message); + if (typeof message != 'string') { + message = JSON.stringify(message) } - ws.send(message); - return true; + ws.send(message) + return true } - function init() { + function init () { if (_demo) { - console.log("Demo mode!"); - Screen.load(_demo_screen); - showPage(); - return; + console.log('Demo mode!') + Screen.load(_demo_screen) + showPage() + return } - clearTimeout(reconTout); - clearTimeout(heartbeatTout); + clearTimeout(reconTout) + clearTimeout(heartbeatTout) - ws = new WebSocket("ws://" + _root + "/term/update.ws"); - ws.onopen = onOpen; - ws.onclose = onClose; - ws.onmessage = onMessage; - console.log("Opening socket."); - heartbeat(); + ws = new WebSocket('ws://' + _root + '/term/update.ws') + ws.onopen = onOpen + ws.onclose = onClose + ws.onmessage = onMessage + console.log('Opening socket.') + heartbeat() } - function heartbeat() { - clearTimeout(heartbeatTout); - heartbeatTout = setTimeout(heartbeatFail, 2000); + function heartbeat () { + clearTimeout(heartbeatTout) + heartbeatTout = setTimeout(heartbeatFail, 2000) } - function heartbeatFail() { - console.error("Heartbeat lost, probing server..."); + function heartbeatFail () { + console.error('Heartbeat lost, probing server...') pingIv = setInterval(function () { - console.log("> ping"); + console.log('> ping') $.get('http://' + _root + '/system/ping', function (resp, status) { if (status == 200) { - clearInterval(pingIv); - console.info("Server ready, reloading page..."); - location.reload(); + clearInterval(pingIv) + console.info('Server ready, reloading page...') + location.reload() } }, { - timeout: 100, - }); - }, 1000); + timeout: 100 + }) + }, 1000) } return { ws: null, init: init, send: doSend, - canSend: canSend, // check flood control - }; -})(); + canSend: canSend // check flood control + } +})() diff --git a/jssrc/term_input.js b/jssrc/term_input.js index 3fee7f0..31141ce 100644 --- a/jssrc/term_input.js +++ b/jssrc/term_input.js @@ -14,253 +14,252 @@ * r - mb release * m - mouse move */ -var Input = (function() { - var opts = { - np_alt: false, - cu_alt: false, - fn_alt: false, - mt_click: false, - mt_move: false, - no_keys: false, - crlf_mode: false, - }; +window.Input = (function () { + let opts = { + np_alt: false, + cu_alt: false, + fn_alt: false, + mt_click: false, + mt_move: false, + no_keys: false, + crlf_mode: false + } - /** Send a literal message */ - function sendStrMsg(str) { - return Conn.send("s"+str); - } + /** Send a literal message */ + function sendStrMsg (str) { + return Conn.send('s' + str) + } - /** Send a button event */ - function sendBtnMsg(n) { - Conn.send("b"+Chr(n)); - } + /** Send a button event */ + function sendBtnMsg (n) { + Conn.send('b' + Chr(n)) + } - /** Fn alt choice for key message */ - function fa(alt, normal) { - return opts.fn_alt ? alt : normal; - } + /** Fn alt choice for key message */ + function fa (alt, normal) { + return opts.fn_alt ? alt : normal + } - /** Cursor alt choice for key message */ - function ca(alt, normal) { - return opts.cu_alt ? alt : normal; - } + /** Cursor alt choice for key message */ + function ca (alt, normal) { + return opts.cu_alt ? alt : normal + } - /** Numpad alt choice for key message */ - function na(alt, normal) { - return opts.np_alt ? alt : normal; - } + /** Numpad alt choice for key message */ + function na (alt, normal) { + return opts.np_alt ? alt : normal + } - function _bindFnKeys() { - var keymap = { - 'tab': '\x09', - 'backspace': '\x08', - 'enter': opts.crlf_mode ? '\x0d\x0a' : '\x0d', - 'ctrl+enter': '\x0a', - 'esc': '\x1b', - 'up': ca('\x1bOA', '\x1b[A'), - 'down': ca('\x1bOB', '\x1b[B'), - 'right': ca('\x1bOC', '\x1b[C'), - 'left': ca('\x1bOD', '\x1b[D'), - 'home': ca('\x1bOH', fa('\x1b[H', '\x1b[1~')), - 'insert': '\x1b[2~', - 'delete': '\x1b[3~', - 'end': ca('\x1bOF', fa('\x1b[F', '\x1b[4~')), - 'pageup': '\x1b[5~', - 'pagedown': '\x1b[6~', - 'f1': fa('\x1bOP', '\x1b[11~'), - 'f2': fa('\x1bOQ', '\x1b[12~'), - 'f3': fa('\x1bOR', '\x1b[13~'), - 'f4': fa('\x1bOS', '\x1b[14~'), - 'f5': '\x1b[15~', // note the disconnect - 'f6': '\x1b[17~', - 'f7': '\x1b[18~', - 'f8': '\x1b[19~', - 'f9': '\x1b[20~', - 'f10': '\x1b[21~', // note the disconnect - 'f11': '\x1b[23~', - 'f12': '\x1b[24~', - 'shift+f1': fa('\x1bO1;2P', '\x1b[25~'), - 'shift+f2': fa('\x1bO1;2Q', '\x1b[26~'), // note the disconnect - 'shift+f3': fa('\x1bO1;2R', '\x1b[28~'), - 'shift+f4': fa('\x1bO1;2S', '\x1b[29~'), // note the disconnect - 'shift+f5': fa('\x1b[15;2~', '\x1b[31~'), - 'shift+f6': fa('\x1b[17;2~', '\x1b[32~'), - 'shift+f7': fa('\x1b[18;2~', '\x1b[33~'), - 'shift+f8': fa('\x1b[19;2~', '\x1b[34~'), - 'shift+f9': fa('\x1b[20;2~', '\x1b[35~'), // 35-38 are not standard - but what is? - 'shift+f10': fa('\x1b[21;2~', '\x1b[36~'), - 'shift+f11': fa('\x1b[22;2~', '\x1b[37~'), - 'shift+f12': fa('\x1b[23;2~', '\x1b[38~'), - 'np_0': na('\x1bOp', '0'), - 'np_1': na('\x1bOq', '1'), - 'np_2': na('\x1bOr', '2'), - 'np_3': na('\x1bOs', '3'), - 'np_4': na('\x1bOt', '4'), - 'np_5': na('\x1bOu', '5'), - 'np_6': na('\x1bOv', '6'), - 'np_7': na('\x1bOw', '7'), - 'np_8': na('\x1bOx', '8'), - 'np_9': na('\x1bOy', '9'), - 'np_mul': na('\x1bOR', '*'), - 'np_add': na('\x1bOl', '+'), - 'np_sub': na('\x1bOS', '-'), - 'np_point': na('\x1bOn', '.'), - 'np_div': na('\x1bOQ', '/'), - // we don't implement numlock key (should change in numpad_alt mode, but it's even more useless than the rest) - }; + function _bindFnKeys () { + const keymap = { + 'tab': '\x09', + 'backspace': '\x08', + 'enter': opts.crlf_mode ? '\x0d\x0a' : '\x0d', + 'ctrl+enter': '\x0a', + 'esc': '\x1b', + 'up': ca('\x1bOA', '\x1b[A'), + 'down': ca('\x1bOB', '\x1b[B'), + 'right': ca('\x1bOC', '\x1b[C'), + 'left': ca('\x1bOD', '\x1b[D'), + 'home': ca('\x1bOH', fa('\x1b[H', '\x1b[1~')), + 'insert': '\x1b[2~', + 'delete': '\x1b[3~', + 'end': ca('\x1bOF', fa('\x1b[F', '\x1b[4~')), + 'pageup': '\x1b[5~', + 'pagedown': '\x1b[6~', + 'f1': fa('\x1bOP', '\x1b[11~'), + 'f2': fa('\x1bOQ', '\x1b[12~'), + 'f3': fa('\x1bOR', '\x1b[13~'), + 'f4': fa('\x1bOS', '\x1b[14~'), + 'f5': '\x1b[15~', // note the disconnect + 'f6': '\x1b[17~', + 'f7': '\x1b[18~', + 'f8': '\x1b[19~', + 'f9': '\x1b[20~', + 'f10': '\x1b[21~', // note the disconnect + 'f11': '\x1b[23~', + 'f12': '\x1b[24~', + 'shift+f1': fa('\x1bO1;2P', '\x1b[25~'), + 'shift+f2': fa('\x1bO1;2Q', '\x1b[26~'), // note the disconnect + 'shift+f3': fa('\x1bO1;2R', '\x1b[28~'), + 'shift+f4': fa('\x1bO1;2S', '\x1b[29~'), // note the disconnect + 'shift+f5': fa('\x1b[15;2~', '\x1b[31~'), + 'shift+f6': fa('\x1b[17;2~', '\x1b[32~'), + 'shift+f7': fa('\x1b[18;2~', '\x1b[33~'), + 'shift+f8': fa('\x1b[19;2~', '\x1b[34~'), + 'shift+f9': fa('\x1b[20;2~', '\x1b[35~'), // 35-38 are not standard - but what is? + 'shift+f10': fa('\x1b[21;2~', '\x1b[36~'), + 'shift+f11': fa('\x1b[22;2~', '\x1b[37~'), + 'shift+f12': fa('\x1b[23;2~', '\x1b[38~'), + 'np_0': na('\x1bOp', '0'), + 'np_1': na('\x1bOq', '1'), + 'np_2': na('\x1bOr', '2'), + 'np_3': na('\x1bOs', '3'), + 'np_4': na('\x1bOt', '4'), + 'np_5': na('\x1bOu', '5'), + 'np_6': na('\x1bOv', '6'), + 'np_7': na('\x1bOw', '7'), + 'np_8': na('\x1bOx', '8'), + 'np_9': na('\x1bOy', '9'), + 'np_mul': na('\x1bOR', '*'), + 'np_add': na('\x1bOl', '+'), + 'np_sub': na('\x1bOS', '-'), + 'np_point': na('\x1bOn', '.'), + 'np_div': na('\x1bOQ', '/') + // we don't implement numlock key (should change in numpad_alt mode, but it's even more useless than the rest) + } - for (var k in keymap) { - if (keymap.hasOwnProperty(k)) { - bind(k, keymap[k]); - } - } - } + for (let k in keymap) { + if (keymap.hasOwnProperty(k)) { + bind(k, keymap[k]) + } + } + } - /** Bind a keystroke to message */ - function bind(combo, str) { - // mac fix - allow also cmd - if (combo.indexOf('ctrl+') !== -1) { - combo += ',' + combo.replace('ctrl', 'command'); - } + /** Bind a keystroke to message */ + function bind (combo, str) { + // mac fix - allow also cmd + if (combo.indexOf('ctrl+') !== -1) { + combo += ',' + combo.replace('ctrl', 'command') + } - // unbind possible old binding - key.unbind(combo); + // unbind possible old binding + key.unbind(combo) - key(combo, function (e) { - if (opts.no_keys) return; - e.preventDefault(); - sendStrMsg(str) - }); - } + key(combo, function (e) { + if (opts.no_keys) return + e.preventDefault() + sendStrMsg(str) + }) + } - /** Bind/rebind key messages */ - function _initKeys() { - // This takes care of text characters typed - window.addEventListener('keypress', function(evt) { - if (opts.no_keys) return; - var str = ''; - if (evt.key) str = evt.key; - else if (evt.which) str = String.fromCodePoint(evt.which); - if (str.length>0 && str.charCodeAt(0) >= 32) { -// console.log("Typed ", str); - // prevent space from scrolling - if (evt.which === 32) evt.preventDefault(); - sendStrMsg(str); - } - }); + /** Bind/rebind key messages */ + function _initKeys () { + // This takes care of text characters typed + window.addEventListener('keypress', function (evt) { + if (opts.no_keys) return + let str = '' + if (evt.key) str = evt.key + else if (evt.which) str = String.fromCodePoint(evt.which) + if (str.length > 0 && str.charCodeAt(0) >= 32) { + // console.log("Typed ", str); + // prevent space from scrolling + if (evt.which === 32) evt.preventDefault() + sendStrMsg(str) + } + }) - // ctrl-letter codes are sent as simple low ASCII codes - for (var i = 1; i<=26;i++) { - bind('ctrl+' + String.fromCharCode(96+i), String.fromCharCode(i)); - } - bind('ctrl+]', '\x1b'); // alternate way to enter ESC - bind('ctrl+\\', '\x1c'); - bind('ctrl+[', '\x1d'); - bind('ctrl+^', '\x1e'); - bind('ctrl+_', '\x1f'); + // ctrl-letter codes are sent as simple low ASCII codes + for (let i = 1; i <= 26; i++) { + bind('ctrl+' + String.fromCharCode(96 + i), String.fromCharCode(i)) + } + bind('ctrl+]', '\x1b') // alternate way to enter ESC + bind('ctrl+\\', '\x1c') + bind('ctrl+[', '\x1d') + bind('ctrl+^', '\x1e') + bind('ctrl+_', '\x1f') - _bindFnKeys(); - } + _bindFnKeys() + } - // mouse button states - var mb1 = 0; - var mb2 = 0; - var mb3 = 0; + // mouse button states + let mb1 = 0 + let mb2 = 0 + let mb3 = 0 - /** Init the Input module */ - function init() { - _initKeys(); + /** Init the Input module */ + function init () { + _initKeys() - // Button presses - $('#action-buttons button').forEach(function(s) { - s.addEventListener('click', function() { - sendBtnMsg(+this.dataset['n']); - }); - }); + // Button presses + $('#action-buttons button').forEach(function (s) { + s.addEventListener('click', function () { + sendBtnMsg(+this.dataset['n']) + }) + }) - // global mouse state tracking - for motion reporting - window.addEventListener('mousedown', function(evt) { - if (evt.button == 0) mb1 = 1; - if (evt.button == 1) mb2 = 1; - if (evt.button == 2) mb3 = 1; - }); + // global mouse state tracking - for motion reporting + window.addEventListener('mousedown', function (evt) { + if (evt.button == 0) mb1 = 1 + if (evt.button == 1) mb2 = 1 + if (evt.button == 2) mb3 = 1 + }) - window.addEventListener('mouseup', function(evt) { - if (evt.button == 0) mb1 = 0; - if (evt.button == 1) mb2 = 0; - if (evt.button == 2) mb3 = 0; - }); - } + window.addEventListener('mouseup', function (evt) { + if (evt.button == 0) mb1 = 0 + if (evt.button == 1) mb2 = 0 + if (evt.button == 2) mb3 = 0 + }) + } - /** Prepare modifiers byte for mouse message */ - function packModifiersForMouse() { - return (key.isModifier('ctrl')?1:0) | - (key.isModifier('shift')?2:0) | - (key.isModifier('alt')?4:0) | - (key.isModifier('meta')?8:0); - } + /** Prepare modifiers byte for mouse message */ + function packModifiersForMouse () { + return (key.isModifier('ctrl') ? 1 : 0) | + (key.isModifier('shift') ? 2 : 0) | + (key.isModifier('alt') ? 4 : 0) | + (key.isModifier('meta') ? 8 : 0) + } - return { - /** Init the Input module */ - init: init, + return { + /** Init the Input module */ + init: init, - /** Send a literal string message */ - sendString: sendStrMsg, + /** Send a literal string message */ + sendString: sendStrMsg, - /** Enable alternate key modes (cursors, numpad, fn) */ - setAlts: function(cu, np, fn, crlf) { - if (opts.cu_alt != cu || opts.np_alt != np || opts.fn_alt != fn || opts.crlf_mode != crlf) { - opts.cu_alt = cu; - opts.np_alt = np; - opts.fn_alt = fn; - opts.crlf_mode = crlf; + /** Enable alternate key modes (cursors, numpad, fn) */ + setAlts: function (cu, np, fn, crlf) { + if (opts.cu_alt != cu || opts.np_alt != np || opts.fn_alt != fn || opts.crlf_mode != crlf) { + opts.cu_alt = cu + opts.np_alt = np + opts.fn_alt = fn + opts.crlf_mode = crlf - // rebind keys - codes have changed - _bindFnKeys(); - } - }, + // rebind keys - codes have changed + _bindFnKeys() + } + }, - setMouseMode: function(click, move) { - opts.mt_click = click; - opts.mt_move = move; - }, - - // Mouse events - onMouseMove: function (x, y) { - if (!opts.mt_move) return; - var b = mb1 ? 1 : mb2 ? 2 : mb3 ? 3 : 0; - var m = packModifiersForMouse(); - Conn.send("m" + encode2B(y) + encode2B(x) + encode2B(b) + encode2B(m)); - }, - onMouseDown: function (x, y, b) { - if (!opts.mt_click) return; - if (b > 3 || b < 1) return; - var m = packModifiersForMouse(); - Conn.send("p" + encode2B(y) + encode2B(x) + encode2B(b) + encode2B(m)); - // console.log("B ",b," M ",m); - }, - onMouseUp: function (x, y, b) { - if (!opts.mt_click) return; - if (b > 3 || b < 1) return; - var m = packModifiersForMouse(); - Conn.send("r" + encode2B(y) + encode2B(x) + encode2B(b) + encode2B(m)); - // console.log("B ",b," M ",m); - }, - onMouseWheel: function (x, y, dir) { - if (!opts.mt_click) return; - // -1 ... btn 4 (away from user) - // +1 ... btn 5 (towards user) - var m = packModifiersForMouse(); - var b = (dir < 0 ? 4 : 5); - Conn.send("p" + encode2B(y) + encode2B(x) + encode2B(b) + encode2B(m)); - // console.log("B ",b," M ",m); - }, - mouseTracksClicks: function() { - return opts.mt_click; - }, - blockKeys: function(yes) { - opts.no_keys = yes; - } - }; -})(); + setMouseMode: function (click, move) { + opts.mt_click = click + opts.mt_move = move + }, + // Mouse events + onMouseMove: function (x, y) { + if (!opts.mt_move) return + const b = mb1 ? 1 : mb2 ? 2 : mb3 ? 3 : 0; + const m = packModifiersForMouse() + Conn.send('m' + encode2B(y) + encode2B(x) + encode2B(b) + encode2B(m)) + }, + onMouseDown: function (x, y, b) { + if (!opts.mt_click) return + if (b > 3 || b < 1) return + const m = packModifiersForMouse() + Conn.send('p' + encode2B(y) + encode2B(x) + encode2B(b) + encode2B(m)) + // console.log("B ",b," M ",m); + }, + onMouseUp: function (x, y, b) { + if (!opts.mt_click) return + if (b > 3 || b < 1) return + const m = packModifiersForMouse() + Conn.send('r' + encode2B(y) + encode2B(x) + encode2B(b) + encode2B(m)) + // console.log("B ",b," M ",m); + }, + onMouseWheel: function (x, y, dir) { + if (!opts.mt_click) return + // -1 ... btn 4 (away from user) + // +1 ... btn 5 (towards user) + const m = packModifiersForMouse() + const b = (dir < 0 ? 4 : 5) + Conn.send('p' + encode2B(y) + encode2B(x) + encode2B(b) + encode2B(m)) + // console.log("B ",b," M ",m); + }, + mouseTracksClicks: function () { + return opts.mt_click + }, + blockKeys: function (yes) { + opts.no_keys = yes + } + } +})() diff --git a/jssrc/term_screen.js b/jssrc/term_screen.js index 6c3db08..9331ff9 100644 --- a/jssrc/term_screen.js +++ b/jssrc/term_screen.js @@ -5,47 +5,47 @@ const frakturExceptions = { 'I': '\u2111', 'R': '\u211c', 'Z': '\u2128' -}; +} // constants for decoding the update blob -const SEQ_SET_COLOR_ATTR = 1; -const SEQ_REPEAT = 2; -const SEQ_SET_COLOR = 3; -const SEQ_SET_ATTR = 4; +const SEQ_SET_COLOR_ATTR = 1 +const SEQ_REPEAT = 2 +const SEQ_SET_COLOR = 3 +const SEQ_SET_ATTR = 4 -const SELECTION_BG = '#b2d7fe'; -const SELECTION_FG = '#333'; +const SELECTION_BG = '#b2d7fe' +const SELECTION_FG = '#333' const themes = [ [ // Tango '#111213', '#CC0000', '#4E9A06', '#C4A000', '#3465A4', '#75507B', '#06989A', '#D3D7CF', - '#555753', '#EF2929', '#8AE234', '#FCE94F', '#729FCF', '#AD7FA8', '#34E2E2', '#EEEEEC', + '#555753', '#EF2929', '#8AE234', '#FCE94F', '#729FCF', '#AD7FA8', '#34E2E2', '#EEEEEC' ], [ // Linux '#000000', '#aa0000', '#00aa00', '#aa5500', '#0000aa', '#aa00aa', '#00aaaa', '#aaaaaa', - '#555555', '#ff5555', '#55ff55', '#ffff55', '#5555ff', '#ff55ff', '#55ffff', '#ffffff', + '#555555', '#ff5555', '#55ff55', '#ffff55', '#5555ff', '#ff55ff', '#55ffff', '#ffffff' ], [ // xterm '#000000', '#cd0000', '#00cd00', '#cdcd00', '#0000ee', '#cd00cd', '#00cdcd', '#e5e5e5', - '#7f7f7f', '#ff0000', '#00ff00', '#ffff00', '#5c5cff', '#ff00ff', '#00ffff', '#ffffff', + '#7f7f7f', '#ff0000', '#00ff00', '#ffff00', '#5c5cff', '#ff00ff', '#00ffff', '#ffffff' ], [ // rxvt '#000000', '#cd0000', '#00cd00', '#cdcd00', '#0000cd', '#cd00cd', '#00cdcd', '#faebd7', - '#404040', '#ff0000', '#00ff00', '#ffff00', '#0000ff', '#ff00ff', '#00ffff', '#ffffff', + '#404040', '#ff0000', '#00ff00', '#ffff00', '#0000ff', '#ff00ff', '#00ffff', '#ffffff' ], [ // Ambience '#2e3436', '#cc0000', '#4e9a06', '#c4a000', '#3465a4', '#75507b', '#06989a', '#d3d7cf', - '#555753', '#ef2929', '#8ae234', '#fce94f', '#729fcf', '#ad7fa8', '#34e2e2', '#eeeeec', + '#555753', '#ef2929', '#8ae234', '#fce94f', '#729fcf', '#ad7fa8', '#34e2e2', '#eeeeec' ], [ // Solarized '#073642', '#dc322f', '#859900', '#b58900', '#268bd2', '#d33682', '#2aa198', '#eee8d5', - '#002b36', '#cb4b16', '#586e75', '#657b83', '#839496', '#6c71c4', '#93a1a1', '#fdf6e3', + '#002b36', '#cb4b16', '#586e75', '#657b83', '#839496', '#6c71c4', '#93a1a1', '#fdf6e3' ] -]; +] // 256color lookup table // should not be used to look up 0-15 (will return transparent) -const colorTable256 = new Array(16).fill('rgba(0, 0, 0, 0)'); +const colorTable256 = new Array(16).fill('rgba(0, 0, 0, 0)') { // fill color table @@ -71,8 +71,8 @@ const colorTable256 = new Array(16).fill('rgba(0, 0, 0, 0)'); class TermScreen { constructor () { - this.canvas = document.createElement('canvas'); - this.ctx = this.canvas.getContext('2d'); + this.canvas = document.createElement('canvas') + this.ctx = this.canvas.getContext('2d') if ('AudioContext' in window || 'webkitAudioContext' in window) { this.audioCtx = new (window.AudioContext || window.webkitAudioContext)() @@ -92,10 +92,10 @@ class TermScreen { hanging: false, style: 'block', blinkEnable: true, - blinkInterval: 0, - }; + blinkInterval: 0 + } - this._palette = themes[0]; + this._palette = themes[0] this._window = { width: 0, @@ -108,8 +108,8 @@ class TermScreen { blinkStyleOn: true, blinkInterval: null, fitIntoWidth: 0, - fitIntoHeight: 0, - }; + fitIntoHeight: 0 + } // properties of this.window that require updating size and redrawing this.windowState = { @@ -121,8 +121,8 @@ class TermScreen { fontFamily: '', fontSize: 0, fitIntoWidth: 0, - fitIntoHeight: 0, - }; + fitIntoHeight: 0 + } // current selection this.selection = { @@ -131,57 +131,57 @@ class TermScreen { selectable: true, start: [0, 0], - end: [0, 0], - }; + end: [0, 0] + } - this.mouseMode = { clicks: false, movement: false }; + this.mouseMode = { clicks: false, movement: false } // event listeners - this._listeners = {}; + this._listeners = {} - const self = this; + const self = this this.window = new Proxy(this._window, { set (target, key, value, receiver) { - target[key] = value; - self.scheduleSizeUpdate(); - self.scheduleDraw(); + target[key] = value + self.scheduleSizeUpdate() + self.scheduleDraw() return true } - }); + }) - this.screen = []; - this.screenFG = []; - this.screenBG = []; - this.screenAttrs = []; + this.screen = [] + this.screenFG = [] + this.screenBG = [] + this.screenAttrs = [] // used to determine if a cell should be redrawn - this.drawnScreen = []; - this.drawnScreenFG = []; - this.drawnScreenBG = []; - this.drawnScreenAttrs = []; + this.drawnScreen = [] + this.drawnScreenFG = [] + this.drawnScreenBG = [] + this.drawnScreenAttrs = [] - this.resetBlink(); - this.resetCursorBlink(); + this.resetBlink() + this.resetCursorBlink() - let selecting = false; + let selecting = false let selectStart = (x, y) => { - if (selecting) return; - selecting = true; - this.selection.start = this.selection.end = this.screenToGrid(x, y); - this.scheduleDraw(); - }; + if (selecting) return + selecting = true + this.selection.start = this.selection.end = this.screenToGrid(x, y) + this.scheduleDraw() + } let selectMove = (x, y) => { - if (!selecting) return; - this.selection.end = this.screenToGrid(x, y); - this.scheduleDraw(); - }; + if (!selecting) return + this.selection.end = this.screenToGrid(x, y) + this.scheduleDraw() + } let selectEnd = (x, y) => { - if (!selecting) return; - selecting = false; - this.selection.end = this.screenToGrid(x, y); - this.scheduleDraw(); - Object.assign(this.selection, this.getNormalizedSelection()); - }; + if (!selecting) return + selecting = false + this.selection.end = this.screenToGrid(x, y) + this.scheduleDraw() + Object.assign(this.selection, this.getNormalizedSelection()) + } this.canvas.addEventListener('mousedown', e => { if ((this.selection.selectable || e.altKey) && e.button === 0) { @@ -190,60 +190,60 @@ class TermScreen { Input.onMouseDown(...this.screenToGrid(e.offsetX, e.offsetY), e.button + 1) } - }); + }) window.addEventListener('mousemove', e => { selectMove(e.offsetX, e.offsetY) - }); + }) window.addEventListener('mouseup', e => { selectEnd(e.offsetX, e.offsetY) - }); + }) - let touchPosition = null; - let touchDownTime = 0; - let touchSelectMinTime = 500; - let touchDidMove = false; + let touchPosition = null + let touchDownTime = 0 + let touchSelectMinTime = 500 + let touchDidMove = false let getTouchPositionOffset = touch => { - let rect = this.canvas.getBoundingClientRect(); - return [touch.clientX - rect.left, touch.clientY - rect.top]; - }; + let rect = this.canvas.getBoundingClientRect() + return [touch.clientX - rect.left, touch.clientY - rect.top] + } this.canvas.addEventListener('touchstart', e => { - touchPosition = getTouchPositionOffset(e.touches[0]); - touchDidMove = false; - touchDownTime = Date.now(); - }); + touchPosition = getTouchPositionOffset(e.touches[0]) + touchDidMove = false + touchDownTime = Date.now() + }) this.canvas.addEventListener('touchmove', e => { - touchPosition = getTouchPositionOffset(e.touches[0]); + touchPosition = getTouchPositionOffset(e.touches[0]) if (!selecting && touchDidMove === false) { if (touchDownTime < Date.now() - touchSelectMinTime) { - selectStart(...touchPosition); + selectStart(...touchPosition) } } else if (selecting) { - e.preventDefault(); - selectMove(...touchPosition); + e.preventDefault() + selectMove(...touchPosition) } - touchDidMove = true; - }); + touchDidMove = true + }) this.canvas.addEventListener('touchend', e => { if (e.touches[0]) { - touchPosition = getTouchPositionOffset(e.touches[0]); + touchPosition = getTouchPositionOffset(e.touches[0]) } if (selecting) { - e.preventDefault(); - selectEnd(...touchPosition); + e.preventDefault() + selectEnd(...touchPosition) - let touchSelectMenu = qs('#touch-select-menu'); - touchSelectMenu.classList.add('open'); - let rect = touchSelectMenu.getBoundingClientRect(); + let touchSelectMenu = qs('#touch-select-menu') + touchSelectMenu.classList.add('open') + let rect = touchSelectMenu.getBoundingClientRect() // use middle position for x and one line above for y let selectionPos = this.gridToScreen( (this.selection.start[0] + this.selection.end[0]) / 2, this.selection.start[1] - 1 - ); - selectionPos[0] -= rect.width / 2; - selectionPos[1] -= rect.height / 2; + ) + selectionPos[0] -= rect.width / 2 + selectionPos[1] -= rect.height / 2 touchSelectMenu.style.transform = `translate(${selectionPos[0]}px, ${ selectionPos[1]}px)` } @@ -251,164 +251,164 @@ class TermScreen { if (!touchDidMove) { this.emit('tap', Object.assign(e, { x: touchPosition[0], - y: touchPosition[1], + y: touchPosition[1] })) } - touchPosition = null; - }); + touchPosition = null + }) this.on('tap', e => { if (this.selection.start[0] !== this.selection.end[0] || this.selection.start[1] !== this.selection.end[1]) { // selection is not empty // reset selection - this.selection.start = this.selection.end = [0, 0]; - qs('#touch-select-menu').classList.remove('open'); - this.scheduleDraw(); + this.selection.start = this.selection.end = [0, 0] + qs('#touch-select-menu').classList.remove('open') + this.scheduleDraw() } else { - e.preventDefault(); - this.emit('open-soft-keyboard'); + e.preventDefault() + this.emit('open-soft-keyboard') } - }); + }) $.ready(() => { - let copyButton = qs('#touch-select-copy-btn'); + let copyButton = qs('#touch-select-copy-btn') if (copyButton) { copyButton.addEventListener('click', () => { - this.copySelectionToClipboard(); - }); + this.copySelectionToClipboard() + }) } - }); + }) this.canvas.addEventListener('mousemove', e => { if (!selecting) { Input.onMouseMove(...this.screenToGrid(e.offsetX, e.offsetY)) } - }); + }) this.canvas.addEventListener('mouseup', e => { if (!selecting) { Input.onMouseUp(...this.screenToGrid(e.offsetX, e.offsetY), e.button + 1) } - }); + }) this.canvas.addEventListener('wheel', e => { if (this.mouseMode.clicks) { Input.onMouseWheel(...this.screenToGrid(e.offsetX, e.offsetY), - e.deltaY > 0 ? 1 : -1); + e.deltaY > 0 ? 1 : -1) // prevent page scrolling - e.preventDefault(); + e.preventDefault() } - }); + }) this.canvas.addEventListener('contextmenu', e => { if (this.mouseMode.clicks) { // prevent mouse keys getting stuck - e.preventDefault(); + e.preventDefault() } - selectEnd(e.offsetX, e.offsetY); - }); + selectEnd(e.offsetX, e.offsetY) + }) // bind ctrl+shift+c to copy key('⌃+⇧+c', e => { - e.preventDefault(); + e.preventDefault() this.copySelectionToClipboard() - }); + }) } on (event, listener) { - if (!this._listeners[event]) this._listeners[event] = []; - this._listeners[event].push({ listener }); + if (!this._listeners[event]) this._listeners[event] = [] + this._listeners[event].push({ listener }) } once (event, listener) { - if (!this._listeners[event]) this._listeners[event] = []; - this._listeners[event].push({ listener, once: true }); + if (!this._listeners[event]) this._listeners[event] = [] + this._listeners[event].push({ listener, once: true }) } off (event, listener) { - let listeners = this._listeners[event]; + let listeners = this._listeners[event] if (listeners) { for (let i in listeners) { if (listeners[i].listener === listener) { - listeners.splice(i, 1); - break; + listeners.splice(i, 1) + break } } } } emit (event, ...args) { - let listeners = this._listeners[event]; + let listeners = this._listeners[event] if (listeners) { - let remove = []; + let remove = [] for (let listener of listeners) { try { - listener.listener(...args); - if (listener.once) remove.push(listener); + listener.listener(...args) + if (listener.once) remove.push(listener) } catch (err) { - console.error(err); + console.error(err) } } // this needs to be done in this roundabout way because for loops // do not like arrays with changing lengths for (let listener of remove) { - listeners.splice(listeners.indexOf(listener), 1); + listeners.splice(listeners.indexOf(listener), 1) } } } get palette () { return this._palette } set palette (palette) { - this._palette = palette; - this.scheduleDraw(); + this._palette = palette + this.scheduleDraw() } getColor (i) { // return palette color if it exists - if (this.palette[i]) return this.palette[i]; + if (this.palette[i]) return this.palette[i] // -1 for selection foreground, -2 for selection background - if (i === -1) return SELECTION_FG; - if (i === -2) return SELECTION_BG; + if (i === -1) return SELECTION_FG + if (i === -2) return SELECTION_BG // 256 color - if (i > 15 && i < 256) return colorTable256[i]; + if (i > 15 && i < 256) return colorTable256[i] // true color, encoded as (hex) + 256 (such that #000 == 256) if (i > 255) { - i -= 256; - let red = (i >> 16) & 0xFF; - let green = (i >> 8) & 0xFF; - let blue = i & 0xFF; - return `rgb(${red}, ${green}, ${blue})`; + i -= 256 + let red = (i >> 16) & 0xFF + let green = (i >> 8) & 0xFF + let blue = i & 0xFF + return `rgb(${red}, ${green}, ${blue})` } // default to transparent - return 'rgba(0, 0, 0, 0)'; + return 'rgba(0, 0, 0, 0)' } // schedule a size update in the next tick scheduleSizeUpdate () { - clearTimeout(this._scheduledSizeUpdate); + clearTimeout(this._scheduledSizeUpdate) this._scheduledSizeUpdate = setTimeout(() => this.updateSize(), 1) } // schedule a draw in the next tick scheduleDraw (aggregateTime = 1) { - clearTimeout(this._scheduledDraw); + clearTimeout(this._scheduledDraw) this._scheduledDraw = setTimeout(() => this.draw(), aggregateTime) } getFont (modifiers = {}) { - let fontStyle = modifiers.style || 'normal'; - let fontWeight = modifiers.weight || 'normal'; + let fontStyle = modifiers.style || 'normal' + let fontWeight = modifiers.weight || 'normal' return `${fontStyle} normal ${fontWeight} ${this.window.fontSize}px ${this.window.fontFamily}` } getCharSize () { - this.ctx.font = this.getFont(); + this.ctx.font = this.getFont() return { width: Math.floor(this.ctx.measureText(' ').width), @@ -417,7 +417,7 @@ class TermScreen { } getCellSize () { - let charSize = this.getCharSize(); + let charSize = this.getCharSize() return { width: Math.ceil(charSize.width * this.window.gridScaleX), @@ -426,13 +426,13 @@ class TermScreen { } updateSize () { - this._window.devicePixelRatio = window.devicePixelRatio || 1; + this._window.devicePixelRatio = window.devicePixelRatio || 1 - let didChange = false; + let didChange = false for (let key in this.windowState) { if (this.windowState.hasOwnProperty(key) && this.windowState[key] !== this.window[key]) { - didChange = true; - this.windowState[key] = this.window[key]; + didChange = true + this.windowState[key] = this.window[key] } } @@ -445,294 +445,291 @@ class TermScreen { gridScaleY, fitIntoWidth, fitIntoHeight - } = this.window; - const cellSize = this.getCellSize(); + } = this.window + const cellSize = this.getCellSize() // real height of the canvas element in pixels - let realWidth = width * cellSize.width; - let realHeight = height * cellSize.height; + let realWidth = width * cellSize.width + let realHeight = height * cellSize.height if (fitIntoWidth && fitIntoHeight) { if (realWidth > fitIntoWidth || realHeight > fitIntoHeight) { - let terminalAspect = realWidth / realHeight; - let fitAspect = fitIntoWidth / fitIntoHeight; + let terminalAspect = realWidth / realHeight + let fitAspect = fitIntoWidth / fitIntoHeight if (terminalAspect < fitAspect) { // align heights - realHeight = fitIntoHeight; + realHeight = fitIntoHeight realWidth = realHeight * terminalAspect - } - else { + } else { // align widths - realWidth = fitIntoWidth; + realWidth = fitIntoWidth realHeight = realWidth / terminalAspect } } - } - else if (fitIntoWidth && realWidth > fitIntoWidth) { - realHeight = fitIntoWidth / (realWidth / realHeight); + } else if (fitIntoWidth && realWidth > fitIntoWidth) { + realHeight = fitIntoWidth / (realWidth / realHeight) realWidth = fitIntoWidth - } - else if (fitIntoHeight && realHeight > fitIntoHeight) { - realWidth = fitIntoHeight * (realWidth / realHeight); + } else if (fitIntoHeight && realHeight > fitIntoHeight) { + realWidth = fitIntoHeight * (realWidth / realHeight) realHeight = fitIntoHeight } - this.canvas.width = width * devicePixelRatio * cellSize.width; - this.canvas.style.width = `${realWidth}px`; - this.canvas.height = height * devicePixelRatio * cellSize.height; - this.canvas.style.height = `${realHeight}px`; + this.canvas.width = width * devicePixelRatio * cellSize.width + this.canvas.style.width = `${realWidth}px` + this.canvas.height = height * devicePixelRatio * cellSize.height + this.canvas.style.height = `${realHeight}px` // the screen has been cleared (by changing canvas width) - this.drawnScreen = []; - this.drawnScreenFG = []; - this.drawnScreenBG = []; - this.drawnScreenAttrs = []; + this.drawnScreen = [] + this.drawnScreenFG = [] + this.drawnScreenBG = [] + this.drawnScreenAttrs = [] // draw immediately; the canvas shouldn't flash - this.draw(); + this.draw() } } resetCursorBlink () { - this.cursor.blinkOn = true; - clearInterval(this.cursor.blinkInterval); + this.cursor.blinkOn = true + clearInterval(this.cursor.blinkInterval) this.cursor.blinkInterval = setInterval(() => { this.cursor.blinkOn = this.cursor.blinking ? !this.cursor.blinkOn - : true; - this.scheduleDraw(); - }, 500); + : true + this.scheduleDraw() + }, 500) } resetBlink () { - this.window.blinkStyleOn = true; - clearInterval(this.window.blinkInterval); - let intervals = 0; + this.window.blinkStyleOn = true + clearInterval(this.window.blinkInterval) + let intervals = 0 this.window.blinkInterval = setInterval(() => { - intervals++; + intervals++ if (intervals >= 4 && this.window.blinkStyleOn) { - this.window.blinkStyleOn = false; - intervals = 0; + this.window.blinkStyleOn = false + intervals = 0 } else if (intervals >= 1 && !this.window.blinkStyleOn) { - this.window.blinkStyleOn = true; - intervals = 0; + this.window.blinkStyleOn = true + intervals = 0 } - }, 200); + }, 200) } getNormalizedSelection () { - let { start, end } = this.selection; + let { start, end } = this.selection // if the start line is after the end line, or if they're both on the same // line but the start column comes after the end column, swap if (start[1] > end[1] || (start[1] === end[1] && start[0] > end[0])) { - [start, end] = [end, start]; + [start, end] = [end, start] } - return { start, end }; + return { start, end } } isInSelection (col, line) { - let { start, end } = this.getNormalizedSelection(); - let colAfterStart = start[0] <= col; - let colBeforeEnd = col < end[0]; - let onStartLine = line === start[1]; - let onEndLine = line === end[1]; - - if (onStartLine && onEndLine) return colAfterStart && colBeforeEnd; - else if (onStartLine) return colAfterStart; - else if (onEndLine) return colBeforeEnd; - else return start[1] < line && line < end[1]; + let { start, end } = this.getNormalizedSelection() + let colAfterStart = start[0] <= col + let colBeforeEnd = col < end[0] + let onStartLine = line === start[1] + let onEndLine = line === end[1] + + if (onStartLine && onEndLine) return colAfterStart && colBeforeEnd + else if (onStartLine) return colAfterStart + else if (onEndLine) return colBeforeEnd + else return start[1] < line && line < end[1] } getSelectedText () { - const screenLength = this.window.width * this.window.height; - let lines = []; - let previousLineIndex = -1; + const screenLength = this.window.width * this.window.height + let lines = [] + let previousLineIndex = -1 for (let cell = 0; cell < screenLength; cell++) { - let x = cell % this.window.width; - let y = Math.floor(cell / this.window.width); + let x = cell % this.window.width + let y = Math.floor(cell / this.window.width) if (this.isInSelection(x, y)) { if (previousLineIndex !== y) { - previousLineIndex = y; - lines.push(''); + previousLineIndex = y + lines.push('') } - lines[lines.length - 1] += this.screen[cell]; + lines[lines.length - 1] += this.screen[cell] } } - return lines.join('\n'); + return lines.join('\n') } copySelectionToClipboard () { - let selectedText = this.getSelectedText(); + let selectedText = this.getSelectedText() // don't copy anything if nothing is selected - if (!selectedText) return; - let textarea = document.createElement('textarea'); - document.body.appendChild(textarea); - textarea.value = selectedText; - textarea.select(); + if (!selectedText) return + let textarea = document.createElement('textarea') + document.body.appendChild(textarea) + textarea.value = selectedText + textarea.select() if (document.execCommand('copy')) { - Notify.show('Copied to clipboard'); + Notify.show('Copied to clipboard') } else { - Notify.show('Failed to copy'); + Notify.show('Failed to copy') // unsuccessful copy } - document.body.removeChild(textarea); + document.body.removeChild(textarea) } screenToGrid (x, y) { - let cellSize = this.getCellSize(); + let cellSize = this.getCellSize() return [ Math.floor((x + cellSize.width / 2) / cellSize.width), - Math.floor(y / cellSize.height), - ]; + Math.floor(y / cellSize.height) + ] } gridToScreen (x, y) { - let cellSize = this.getCellSize(); + let cellSize = this.getCellSize() - return [ x * cellSize.width, y * cellSize.height ]; + return [ x * cellSize.width, y * cellSize.height ] } drawCell ({ x, y, charSize, cellWidth, cellHeight, text, fg, bg, attrs }) { - const ctx = this.ctx; - ctx.fillStyle = this.getColor(bg); + const ctx = this.ctx + ctx.fillStyle = this.getColor(bg) ctx.fillRect(x * cellWidth, y * cellHeight, - Math.ceil(cellWidth), Math.ceil(cellHeight)); + Math.ceil(cellWidth), Math.ceil(cellHeight)) - if (!text) return; + if (!text) return - let underline = false; - let blink = false; - let strike = false; - let overline = false; - if (attrs & (1 << 1)) ctx.globalAlpha = 0.5; - if (attrs & (1 << 3)) underline = true; - if (attrs & (1 << 4)) blink = true; - if (attrs & (1 << 5)) text = TermScreen.alphaToFraktur(text); - if (attrs & (1 << 6)) strike = true; - if (attrs & (1 << 7)) overline = true; + let underline = false + let blink = false + let strike = false + let overline = false + if (attrs & (1 << 1)) ctx.globalAlpha = 0.5 + if (attrs & (1 << 3)) underline = true + if (attrs & (1 << 4)) blink = true + if (attrs & (1 << 5)) text = TermScreen.alphaToFraktur(text) + if (attrs & (1 << 6)) strike = true + if (attrs & (1 << 7)) overline = true if (!blink || this.window.blinkStyleOn) { - ctx.fillStyle = this.getColor(fg); - ctx.fillText(text, (x + 0.5) * cellWidth, (y + 0.5) * cellHeight); + ctx.fillStyle = this.getColor(fg) + ctx.fillText(text, (x + 0.5) * cellWidth, (y + 0.5) * cellHeight) if (underline || strike || overline) { - ctx.strokeStyle = this.getColor(fg); - ctx.lineWidth = 1; - ctx.lineCap = 'round'; - ctx.beginPath(); + ctx.strokeStyle = this.getColor(fg) + ctx.lineWidth = 1 + ctx.lineCap = 'round' + ctx.beginPath() if (underline) { - let lineY = Math.round(y * cellHeight + charSize.height) + 0.5; - ctx.moveTo(x * cellWidth, lineY); - ctx.lineTo((x + 1) * cellWidth, lineY); + let lineY = Math.round(y * cellHeight + charSize.height) + 0.5 + ctx.moveTo(x * cellWidth, lineY) + ctx.lineTo((x + 1) * cellWidth, lineY) } if (strike) { - let lineY = Math.round((y + 0.5) * cellHeight) + 0.5; - ctx.moveTo(x * cellWidth, lineY); - ctx.lineTo((x + 1) * cellWidth, lineY); + let lineY = Math.round((y + 0.5) * cellHeight) + 0.5 + ctx.moveTo(x * cellWidth, lineY) + ctx.lineTo((x + 1) * cellWidth, lineY) } if (overline) { - let lineY = Math.round(y * cellHeight) + 0.5; - ctx.moveTo(x * cellWidth, lineY); - ctx.lineTo((x + 1) * cellWidth, lineY); + let lineY = Math.round(y * cellHeight) + 0.5 + ctx.moveTo(x * cellWidth, lineY) + ctx.lineTo((x + 1) * cellWidth, lineY) } - ctx.stroke(); + ctx.stroke() } } - ctx.globalAlpha = 1; + ctx.globalAlpha = 1 } draw () { - const ctx = this.ctx; + const ctx = this.ctx const { width, height, devicePixelRatio, gridScaleX, gridScaleY - } = this.window; + } = this.window - const charSize = this.getCharSize(); - const { width: cellWidth, height: cellHeight } = this.getCellSize(); - const screenWidth = width * cellWidth; - const screenHeight = height * cellHeight; - const screenLength = width * height; + const charSize = this.getCharSize() + const { width: cellWidth, height: cellHeight } = this.getCellSize() + const screenWidth = width * cellWidth + const screenHeight = height * cellHeight + const screenLength = width * height - ctx.setTransform(devicePixelRatio, 0, 0, devicePixelRatio, 0, 0); + ctx.setTransform(devicePixelRatio, 0, 0, devicePixelRatio, 0, 0) - ctx.font = this.getFont(); - ctx.textAlign = 'center'; - ctx.textBaseline = 'middle'; + ctx.font = this.getFont() + ctx.textAlign = 'center' + ctx.textBaseline = 'middle' // bits in the attr value that affect the font - const FONT_MASK = 0b101; + const FONT_MASK = 0b101 // Map of (attrs & FONT_MASK) -> Array of cell indices - const fontGroups = new Map(); + const fontGroups = new Map() // Map of (cell index) -> boolean, whether or not a cell needs to be redrawn - const updateMap = new Map(); + const updateMap = new Map() for (let cell = 0; cell < screenLength; cell++) { - let x = cell % width; - let y = Math.floor(cell / width); - let isCursor = !this.cursor.hanging - && this.cursor.x === x - && this.cursor.y === y; + let x = cell % width + let y = Math.floor(cell / width) + let isCursor = !this.cursor.hanging && + this.cursor.x === x && + this.cursor.y === y - let invertForCursor = isCursor && this.cursor.blinkOn - && this.cursor.style === 'block'; + let invertForCursor = isCursor && this.cursor.blinkOn && + this.cursor.style === 'block' - let inSelection = this.isInSelection(x, y); + let inSelection = this.isInSelection(x, y) - let text = this.screen[cell]; - let fg = invertForCursor ? this.screenBG[cell] : this.screenFG[cell]; - let bg = invertForCursor ? this.screenFG[cell] : this.screenBG[cell]; - let attrs = this.screenAttrs[cell]; + let text = this.screen[cell] + let fg = invertForCursor ? this.screenBG[cell] : this.screenFG[cell] + let bg = invertForCursor ? this.screenFG[cell] : this.screenBG[cell] + let attrs = this.screenAttrs[cell] // HACK: ensure cursor is visible - if (invertForCursor && fg === bg) bg = fg === 0 ? 7 : 0; + if (invertForCursor && fg === bg) bg = fg === 0 ? 7 : 0 if (inSelection) { - fg = -1; - bg = -2; + fg = -1 + bg = -2 } let needsUpdate = text !== this.drawnScreen[cell] || fg !== this.drawnScreenFG[cell] || bg !== this.drawnScreenBG[cell] || attrs !== this.drawnScreenAttrs[cell] || - isCursor; + isCursor - let font = attrs & FONT_MASK; - if (!fontGroups.has(font)) fontGroups.set(font, []); + let font = attrs & FONT_MASK + if (!fontGroups.has(font)) fontGroups.set(font, []) - fontGroups.get(font).push([cell, x, y, text, fg, bg, attrs, isCursor]); - updateMap.set(cell, needsUpdate); + fontGroups.get(font).push([cell, x, y, text, fg, bg, attrs, isCursor]) + updateMap.set(cell, needsUpdate) } for (let font of fontGroups.keys()) { // set font once because in Firefox, this is a really slow action for some // reason - let modifiers = {}; - if (font & 1) modifiers.weight = 'bold'; - if (font & 1 << 2) modifiers.style = 'italic'; - ctx.font = this.getFont(modifiers); + let modifiers = {} + if (font & 1) modifiers.weight = 'bold' + if (font & 1 << 2) modifiers.style = 'italic' + ctx.font = this.getFont(modifiers) for (let data of fontGroups.get(font)) { - let [cell, x, y, text, fg, bg, attrs, isCursor] = data; + let [cell, x, y, text, fg, bg, attrs, isCursor] = data // check if this cell or any adjacent cells updated - let needsUpdate = false; + let needsUpdate = false let updateCells = [ cell, cell - 1, @@ -744,50 +741,50 @@ class TermScreen { cell - width + 1, cell + width - 1, cell + width + 1 - ]; + ] for (let index of updateCells) { if (updateMap.has(index) && updateMap.get(index)) { - needsUpdate = true; - break; + needsUpdate = true + break } } if (needsUpdate) { this.drawCell({ x, y, charSize, cellWidth, cellHeight, text, fg, bg, attrs - }); + }) - this.drawnScreen[cell] = text; - this.drawnScreenFG[cell] = fg; - this.drawnScreenBG[cell] = bg; - this.drawnScreenAttrs[cell] = attrs; + this.drawnScreen[cell] = text + this.drawnScreenFG[cell] = fg + this.drawnScreenBG[cell] = bg + this.drawnScreenAttrs[cell] = attrs } if (isCursor && this.cursor.blinkOn && this.cursor.style !== 'block') { - ctx.save(); - ctx.beginPath(); + ctx.save() + ctx.beginPath() if (this.cursor.style === 'bar') { // vertical bar - let barWidth = 2; + let barWidth = 2 ctx.rect(x * cellWidth, y * cellHeight, barWidth, cellHeight) } else if (this.cursor.style === 'line') { // underline - let lineHeight = 2; + let lineHeight = 2 ctx.rect(x * cellWidth, y * cellHeight + charSize.height, cellWidth, lineHeight) } - ctx.clip(); + ctx.clip() // swap foreground/background - fg = this.screenBG[cell]; - bg = this.screenFG[cell]; + fg = this.screenBG[cell] + bg = this.screenFG[cell] // HACK: ensure cursor is visible - if (fg === bg) bg = fg === 0 ? 7 : 0; + if (fg === bg) bg = fg === 0 ? 7 : 0 this.drawCell({ x, y, charSize, cellWidth, cellHeight, text, fg, bg, attrs - }); - ctx.restore(); + }) + ctx.restore() } } } @@ -795,237 +792,237 @@ class TermScreen { loadContent (str) { // current index - let i = 0; + let i = 0 // window size - this.window.height = parse2B(str, i); - this.window.width = parse2B(str, i + 2); - this.updateSize(); - i += 4; + this.window.height = parse2B(str, i) + this.window.width = parse2B(str, i + 2) + this.updateSize() + i += 4 // cursor position - let [cursorY, cursorX] = [parse2B(str, i), parse2B(str, i + 2)]; - i += 4; - let cursorMoved = (cursorX !== this.cursor.x || cursorY !== this.cursor.y); - this.cursor.x = cursorX; - this.cursor.y = cursorY; + let [cursorY, cursorX] = [parse2B(str, i), parse2B(str, i + 2)] + i += 4 + let cursorMoved = (cursorX !== this.cursor.x || cursorY !== this.cursor.y) + this.cursor.x = cursorX + this.cursor.y = cursorY if (cursorMoved) { - this.resetCursorBlink(); - this.emit('cursor-moved'); + this.resetCursorBlink() + this.emit('cursor-moved') } // attributes - let attributes = parse3B(str, i); - i += 3; + let attributes = parse3B(str, i) + i += 3 - this.cursor.visible = !!(attributes & 1); - this.cursor.hanging = !!(attributes & (1 << 1)); + this.cursor.visible = !!(attributes & 1) + this.cursor.hanging = !!(attributes & (1 << 1)) Input.setAlts( !!(attributes & (1 << 2)), // cursors alt !!(attributes & (1 << 3)), // numpad alt !!(attributes & (1 << 4)), // fn keys alt !!(attributes & (1 << 12)) // crlf mode - ); + ) - let trackMouseClicks = !!(attributes & (1 << 5)); - let trackMouseMovement = !!(attributes & (1 << 6)); + let trackMouseClicks = !!(attributes & (1 << 5)) + let trackMouseMovement = !!(attributes & (1 << 6)) // 0 - Block blink 2 - Block steady (1 is unused) // 3 - Underline blink 4 - Underline steady // 5 - I-bar blink 6 - I-bar steady - let cursorShape = (attributes >> 9) & 0x07; + let cursorShape = (attributes >> 9) & 0x07 // if it's not zero, decrement such that the two most significant bits // are the type and the least significant bit is the blink state - if (cursorShape > 0) cursorShape--; + if (cursorShape > 0) cursorShape-- - let cursorStyle = cursorShape >> 1; - let cursorBlinking = !(cursorShape & 1); + let cursorStyle = cursorShape >> 1 + let cursorBlinking = !(cursorShape & 1) - if (cursorStyle === 0) this.cursor.style = 'block'; - else if (cursorStyle === 1) this.cursor.style = 'line'; - else if (cursorStyle === 2) this.cursor.style = 'bar'; + if (cursorStyle === 0) this.cursor.style = 'block' + else if (cursorStyle === 1) this.cursor.style = 'line' + else if (cursorStyle === 2) this.cursor.style = 'bar' if (this.cursor.blinking !== cursorBlinking) { - this.cursor.blinking = cursorBlinking; - this.resetCursorBlink(); + this.cursor.blinking = cursorBlinking + this.resetCursorBlink() } - Input.setMouseMode(trackMouseClicks, trackMouseMovement); - this.selection.selectable = !trackMouseMovement; - $(this.canvas).toggleClass('selectable', !trackMouseMovement); + Input.setMouseMode(trackMouseClicks, trackMouseMovement) + this.selection.selectable = !trackMouseMovement + $(this.canvas).toggleClass('selectable', !trackMouseMovement) this.mouseMode = { clicks: trackMouseClicks, movement: trackMouseMovement - }; + } - let showButtons = !!(attributes & (1 << 7)); - let showConfigLinks = !!(attributes & (1 << 8)); + let showButtons = !!(attributes & (1 << 7)) + let showConfigLinks = !!(attributes & (1 << 8)) - $('.x-term-conf-btn').toggleClass('hidden', !showConfigLinks); - $('#action-buttons').toggleClass('hidden', !showButtons); + $('.x-term-conf-btn').toggleClass('hidden', !showConfigLinks) + $('#action-buttons').toggleClass('hidden', !showButtons) // content - let fg = 7; - let bg = 0; - let attrs = 0; - let cell = 0; // cell index - let lastChar = ' '; - let screenLength = this.window.width * this.window.height; - - this.screen = new Array(screenLength).fill(' '); - this.screenFG = new Array(screenLength).fill(' '); - this.screenBG = new Array(screenLength).fill(' '); - this.screenAttrs = new Array(screenLength).fill(' '); + let fg = 7 + let bg = 0 + let attrs = 0 + let cell = 0 // cell index + let lastChar = ' ' + let screenLength = this.window.width * this.window.height + + this.screen = new Array(screenLength).fill(' ') + this.screenFG = new Array(screenLength).fill(' ') + this.screenBG = new Array(screenLength).fill(' ') + this.screenAttrs = new Array(screenLength).fill(' ') let strArray = typeof Array.from !== 'undefined' ? Array.from(str) - : str.split(''); + : str.split('') while (i < strArray.length && cell < screenLength) { - let character = strArray[i++]; - let charCode = character.codePointAt(0); + let character = strArray[i++] + let charCode = character.codePointAt(0) - let data; + let data switch (charCode) { case SEQ_SET_COLOR_ATTR: - data = parse3B(strArray[i] + strArray[i + 1] + strArray[i + 2]); - i += 3; - fg = data & 0xF; - bg = data >> 4 & 0xF; - attrs = data >> 8 & 0xFF; - break; + data = parse3B(strArray[i] + strArray[i + 1] + strArray[i + 2]) + i += 3 + fg = data & 0xF + bg = data >> 4 & 0xF + attrs = data >> 8 & 0xFF + break case SEQ_SET_COLOR: - data = parse2B(strArray[i] + strArray[i + 1]); - i += 2; - fg = data & 0xF; - bg = data >> 4 & 0xF; - break; + data = parse2B(strArray[i] + strArray[i + 1]) + i += 2 + fg = data & 0xF + bg = data >> 4 & 0xF + break case SEQ_SET_ATTR: - data = parse2B(strArray[i] + strArray[i + 1]); - i += 2; - attrs = data & 0xFF; - break; + data = parse2B(strArray[i] + strArray[i + 1]) + i += 2 + attrs = data & 0xFF + break case SEQ_REPEAT: - let count = parse2B(strArray[i] + strArray[i + 1]); - i += 2; + let count = parse2B(strArray[i] + strArray[i + 1]) + i += 2 for (let j = 0; j < count; j++) { - this.screen[cell] = lastChar; - this.screenFG[cell] = fg; - this.screenBG[cell] = bg; - this.screenAttrs[cell] = attrs; + this.screen[cell] = lastChar + this.screenFG[cell] = fg + this.screenBG[cell] = bg + this.screenAttrs[cell] = attrs - if (++cell > screenLength) break; + if (++cell > screenLength) break } - break; + break default: // safety replacement - if (charCode < 32) character = '\ufffd'; + if (charCode < 32) character = '\ufffd' // unique cell character - this.screen[cell] = lastChar = character; - this.screenFG[cell] = fg; - this.screenBG[cell] = bg; - this.screenAttrs[cell] = attrs; - cell++; + this.screen[cell] = lastChar = character + this.screenFG[cell] = fg + this.screenBG[cell] = bg + this.screenAttrs[cell] = attrs + cell++ } } - this.scheduleDraw(16); - this.emit('load'); + this.scheduleDraw(16) + this.emit('load') } /** Apply labels to buttons and screen title (leading T removed already) */ loadLabels (str) { - let pieces = str.split('\x01'); - qs('h1').textContent = pieces[0]; + let pieces = str.split('\x01') + qs('h1').textContent = pieces[0] $('#action-buttons button').forEach((button, i) => { - let label = pieces[i + 1].trim(); + let label = pieces[i + 1].trim() // if empty string, use the "dim" effect and put nbsp instead to // stretch the button vertically - button.innerHTML = label ? e(label) : ' '; - button.style.opacity = label ? 1 : 0.2; + button.innerHTML = label ? e(label) : ' ' + button.style.opacity = label ? 1 : 0.2 }) } showNotification (text) { - console.log(`Notification: ${text}`); + console.log(`Notification: ${text}`) // TODO: request permission earlier // the requestPermission should be user-triggered; asking upfront seems // a little awkward if (Notification && Notification.permission === 'granted') { let notification = new Notification('ESPTerm', { body: text - }); - notification.addEventListener('click', () => window.focus()); + }) + notification.addEventListener('click', () => window.focus()) } else { - Notify.show(text); + Notify.show(text) } } load (str) { - const content = str.substr(1); + const content = str.substr(1) switch (str[0]) { case 'S': - this.loadContent(content); - break; + this.loadContent(content) + break case 'T': - this.loadLabels(content); - break; + this.loadLabels(content) + break case 'B': - this.beep(); - break; + this.beep() + break case 'G': - this.showNotification(content); - break; + this.showNotification(content) + break default: console.warn(`Bad data message type; ignoring.\n${JSON.stringify(content)}`) } } beep () { - const audioCtx = this.audioCtx; - if (!audioCtx) return; + const audioCtx = this.audioCtx + if (!audioCtx) return // prevent screeching - if (this._lastBeep && this._lastBeep > Date.now() - 50) return; - this._lastBeep = Date.now(); + if (this._lastBeep && this._lastBeep > Date.now() - 50) return + this._lastBeep = Date.now() - let osc, gain; + let osc, gain // main beep - osc = audioCtx.createOscillator(); - gain = audioCtx.createGain(); - osc.connect(gain); - gain.connect(audioCtx.destination); - gain.gain.value = 0.5; - osc.frequency.value = 750; - osc.type = 'sine'; - osc.start(); - osc.stop(audioCtx.currentTime + 0.05); + osc = audioCtx.createOscillator() + gain = audioCtx.createGain() + osc.connect(gain) + gain.connect(audioCtx.destination) + gain.gain.value = 0.5 + osc.frequency.value = 750 + osc.type = 'sine' + osc.start() + osc.stop(audioCtx.currentTime + 0.05) // surrogate beep (making it sound like 'oops') - osc = audioCtx.createOscillator(); - gain = audioCtx.createGain(); - osc.connect(gain); - gain.connect(audioCtx.destination); - gain.gain.value = 0.2; - osc.frequency.value = 400; - osc.type = 'sine'; - osc.start(audioCtx.currentTime + 0.05); - osc.stop(audioCtx.currentTime + 0.08); + osc = audioCtx.createOscillator() + gain = audioCtx.createGain() + osc.connect(gain) + gain.connect(audioCtx.destination) + gain.gain.value = 0.2 + osc.frequency.value = 400 + osc.type = 'sine' + osc.start(audioCtx.currentTime + 0.05) + osc.stop(audioCtx.currentTime + 0.08) } static alphaToFraktur (character) { - if ('a' <= character && character <= 'z') { + if (character >= 'a' && character <= 'z') { character = String.fromCodePoint(0x1d51e - 0x61 + character.charCodeAt(0)) - } else if ('A' <= character && character <= 'Z') { + } else if (character >= 'A' && character <= 'Z') { character = frakturExceptions[character] || String.fromCodePoint( 0x1d504 - 0x41 + character.charCodeAt(0)) } @@ -1033,34 +1030,34 @@ class TermScreen { } } -const Screen = new TermScreen(); +const Screen = new TermScreen() Screen.once('load', () => { - qs('#screen').appendChild(Screen.canvas); + qs('#screen').appendChild(Screen.canvas) for (let item of qs('#screen').classList) { if (item.startsWith('theme-')) { Screen.colors = themes[item.substr(6)] } } -}); +}) -let fitScreen = false; +let fitScreen = false function fitScreenIfNeeded () { - Screen.window.fitIntoWidth = fitScreen ? window.innerWidth : 0; + Screen.window.fitIntoWidth = fitScreen ? window.innerWidth : 0 Screen.window.fitIntoHeight = fitScreen ? window.innerHeight : 0 } -fitScreenIfNeeded(); -window.addEventListener('resize', fitScreenIfNeeded); +fitScreenIfNeeded() +window.addEventListener('resize', fitScreenIfNeeded) window.toggleFitScreen = function () { - fitScreen = !fitScreen; - const resizeButtonIcon = qs('#resize-button-icon'); + fitScreen = !fitScreen + const resizeButtonIcon = qs('#resize-button-icon') if (fitScreen) { - resizeButtonIcon.classList.remove('icn-resize-small'); + resizeButtonIcon.classList.remove('icn-resize-small') resizeButtonIcon.classList.add('icn-resize-full') } else { - resizeButtonIcon.classList.remove('icn-resize-full'); + resizeButtonIcon.classList.remove('icn-resize-full') resizeButtonIcon.classList.add('icn-resize-small') } - fitScreenIfNeeded(); -}; + fitScreenIfNeeded() +} diff --git a/jssrc/term_upload.js b/jssrc/term_upload.js index 5d138d1..5f7d720 100644 --- a/jssrc/term_upload.js +++ b/jssrc/term_upload.js @@ -1,146 +1,146 @@ /** File upload utility */ -var TermUpl = (function () { - var lines, // array of lines without newlines +window.TermUpl = (function () { + let lines, // array of lines without newlines line_i, // current line index fuTout, // timeout handle for line sending send_delay_ms, // delay between lines (ms) nl_str, // newline string to use curLine, // current line (when using fuOil) - inline_pos; // Offset in line (for long lines) + inline_pos // Offset in line (for long lines) // lines longer than this are split to chunks // sending a super-ling string through the socket is not a good idea - var MAX_LINE_LEN = 128; + const MAX_LINE_LEN = 128 - function fuOpen() { - fuStatus("Ready..."); - Modal.show('#fu_modal', onClose); - $('#fu_form').toggleClass('busy', false); - Input.blockKeys(true); + function fuOpen () { + fuStatus('Ready...') + Modal.show('#fu_modal', onClose) + $('#fu_form').toggleClass('busy', false) + Input.blockKeys(true) } - function onClose() { - console.log("Upload modal closed."); - clearTimeout(fuTout); - line_i = 0; - Input.blockKeys(false); + function onClose () { + console.log('Upload modal closed.') + clearTimeout(fuTout) + line_i = 0 + Input.blockKeys(false) } - function fuStatus(msg) { - qs('#fu_prog').textContent = msg; + function fuStatus (msg) { + qs('#fu_prog').textContent = msg } - function fuSend() { - var v = qs('#fu_text').value; + function fuSend () { + let v = qs('#fu_text').value if (!v.length) { - fuClose(); - return; + fuClose() + return } - lines = v.split('\n'); - line_i = 0; - inline_pos = 0; // offset in line - send_delay_ms = qs('#fu_delay').value; + lines = v.split('\n') + line_i = 0 + inline_pos = 0 // offset in line + send_delay_ms = qs('#fu_delay').value // sanitize - 0 causes overflows if (send_delay_ms < 0) { - send_delay_ms = 0; - qs('#fu_delay').value = send_delay_ms; + send_delay_ms = 0 + qs('#fu_delay').value = send_delay_ms } nl_str = { 'CR': '\r', 'LF': '\n', - 'CRLF': '\r\n', - }[qs('#fu_crlf').value]; + 'CRLF': '\r\n' + }[qs('#fu_crlf').value] - $('#fu_form').toggleClass('busy', true); - fuStatus("Starting..."); - fuSendLine(); + $('#fu_form').toggleClass('busy', true) + fuStatus('Starting...') + fuSendLine() } - function fuSendLine() { + function fuSendLine () { if (!$('#fu_modal').hasClass('visible')) { // Modal is closed, cancel - return; + return } if (!Conn.canSend()) { // postpone - fuTout = setTimeout(fuSendLine, 1); - return; + fuTout = setTimeout(fuSendLine, 1) + return } if (inline_pos == 0) { - curLine = lines[line_i++] + nl_str; + curLine = lines[line_i++] + nl_str } - var chunk; + let chunk if ((curLine.length - inline_pos) <= MAX_LINE_LEN) { - chunk = curLine.substr(inline_pos, MAX_LINE_LEN); - inline_pos = 0; + chunk = curLine.substr(inline_pos, MAX_LINE_LEN) + inline_pos = 0 } else { - chunk = curLine.substr(inline_pos, MAX_LINE_LEN); - inline_pos += MAX_LINE_LEN; + chunk = curLine.substr(inline_pos, MAX_LINE_LEN) + inline_pos += MAX_LINE_LEN } if (!Input.sendString(chunk)) { - fuStatus("FAILED!"); - return; + fuStatus('FAILED!') + return } - var all = lines.length; + let all = lines.length - fuStatus(line_i + " / " + all + " (" + (Math.round((line_i / all) * 1000) / 10) + "%)"); + fuStatus(line_i + ' / ' + all + ' (' + (Math.round((line_i / all) * 1000) / 10) + '%)') if (lines.length > line_i || inline_pos > 0) { - fuTout = setTimeout(fuSendLine, send_delay_ms); + fuTout = setTimeout(fuSendLine, send_delay_ms) } else { - closeWhenReady(); + closeWhenReady() } } - function closeWhenReady() { + function closeWhenReady () { if (!Conn.canSend()) { // stuck in XOFF still, wait to process... - fuStatus("Waiting for Tx buffer..."); - setTimeout(closeWhenReady, 100); + fuStatus('Waiting for Tx buffer...') + setTimeout(closeWhenReady, 100) } else { - fuStatus("Done."); + fuStatus('Done.') // delay to show it setTimeout(function () { - fuClose(); - }, 100); + fuClose() + }, 100) } } - function fuClose() { - Modal.hide('#fu_modal'); + function fuClose () { + Modal.hide('#fu_modal') } return { init: function () { qs('#fu_file').addEventListener('change', function (evt) { - var reader = new FileReader(); - var file = evt.target.files[0]; - console.log("Selected file type: " + file.type); + let reader = new FileReader() + let file = evt.target.files[0] + console.log('Selected file type: ' + file.type) if (!file.type.match(/text\/.*|application\/(json|csv|.*xml.*|.*script.*)/)) { // Deny load of blobs like img - can crash browser and will get corrupted anyway - if (!confirm("This does not look like a text file: " + file.type + "\nReally load?")) { - qs('#fu_file').value = ''; - return; + if (!confirm('This does not look like a text file: ' + file.type + '\nReally load?')) { + qs('#fu_file').value = '' + return } } reader.onload = function (e) { - var txt = e.target.result.replace(/[\r\n]+/, '\n'); - qs('#fu_text').value = txt; - }; - console.log("Loading file..."); - reader.readAsText(file); - }, false); + const txt = e.target.result.replace(/[\r\n]+/, '\n') + qs('#fu_text').value = txt + } + console.log('Loading file...') + reader.readAsText(file) + }, false) }, close: fuClose, start: fuSend, - open: fuOpen, + open: fuOpen } -})(); +})() diff --git a/jssrc/utils.js b/jssrc/utils.js index dcd35fa..a0c055b 100755 --- a/jssrc/utils.js +++ b/jssrc/utils.js @@ -1,21 +1,21 @@ /** Make a node */ -function mk(e) { +function mk (e) { return document.createElement(e) } /** Find one by query */ -function qs(s) { +function qs (s) { return document.querySelector(s) } /** Find all by query */ -function qsa(s) { +function qsa (s) { return document.querySelectorAll(s) } /** Convert any to bool safely */ -function bool(x) { - return (x === 1 || x === '1' || x === true || x === 'true'); +function bool (x) { + return (x === 1 || x === '1' || x === true || x === 'true') } /** @@ -23,54 +23,54 @@ function bool(x) { * and when they're pressed, fire the callback. * use $(...).on('keypress', cr(handler)) */ -function cr(hdl) { +function cr (hdl) { return function (e) { if (e.which == 10 || e.which == 13 || e.which == 32) { - hdl(); + hdl() } - }; + } } /** Extend an objects with options */ -function extend(defaults, options) { - var target = {}; +function extend (defaults, options) { + var target = {} Object.keys(defaults).forEach(function (k) { - target[k] = defaults[k]; - }); + target[k] = defaults[k] + }) Object.keys(options).forEach(function (k) { - target[k] = options[k]; - }); + target[k] = options[k] + }) - return target; + return target } /** Escape string for use as literal in RegExp */ -function rgxe(str) { - return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); +function rgxe (str) { + return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, '\\$&') } /** Format number to N decimal places, output as string */ -function numfmt(x, places) { - var pow = Math.pow(10, places); - return Math.round(x * pow) / pow; +function numfmt (x, places) { + var pow = Math.pow(10, places) + return Math.round(x * pow) / pow } /** Get millisecond timestamp */ -function msNow() { - return +(new Date); +function msNow () { + return +(new Date()) } /** Get ms elapsed since msNow() */ -function msElapsed(start) { - return msNow() - start; +function msElapsed (start) { + return msNow() - start } /** Shim for log base 10 */ Math.log10 = Math.log10 || function (x) { - return Math.log(x) / Math.LN10; -}; + return Math.log(x) / Math.LN10 +} /** * Perform a substitution in the given string. @@ -84,84 +84,84 @@ Math.log10 = Math.log10 || function (x) { * @returns {String} result */ String.prototype.format = function () { - var out = this; - var repl = arguments; + let out = this + let repl = arguments if (arguments.length == 1 && (Array.isArray(arguments[0]) || typeof arguments[0] == 'object')) { - repl = arguments[0]; + repl = arguments[0] } - for (var ph in repl) { + for (let ph in repl) { if (repl.hasOwnProperty(ph)) { - var ph_orig = ph; + const ph_orig = ph if (!ph.match(/^\{.*\}$/)) { - ph = '{' + ph + '}'; + ph = '{' + ph + '}' } // replace all occurrences - var pattern = new RegExp(rgxe(ph), "g"); - out = out.replace(pattern, repl[ph_orig]); + const pattern = new RegExp(rgxe(ph), 'g') + out = out.replace(pattern, repl[ph_orig]) } } - return out; -}; + return out +} /** HTML escape */ -function e(str) { - return $.htmlEscape(str); +function e (str) { + return $.htmlEscape(str) } /** Check for undefined */ -function undef(x) { - return typeof x == 'undefined'; +function undef (x) { + return typeof x == 'undefined' } /** Safe json parse */ -function jsp(str) { +function jsp (str) { try { - return JSON.parse(str); + return JSON.parse(str) } catch (e) { - console.error(e); - return null; + console.error(e) + return null } } /** Create a character from ASCII code */ -function Chr(n) { - return String.fromCharCode(n); +function Chr (n) { + return String.fromCharCode(n) } /** Decode number from 2B encoding */ -function parse2B(s, i = 0) { - return (s.charCodeAt(i++) - 1) + (s.charCodeAt(i) - 1) * 127; +function parse2B (s, i = 0) { + return (s.charCodeAt(i++) - 1) + (s.charCodeAt(i) - 1) * 127 } /** Decode number from 3B encoding */ -function parse3B(s, i = 0) { - return (s.charCodeAt(i) - 1) + (s.charCodeAt(i + 1) - 1) * 127 + (s.charCodeAt(i + 2) - 1) * 127 * 127; +function parse3B (s, i = 0) { + return (s.charCodeAt(i) - 1) + (s.charCodeAt(i + 1) - 1) * 127 + (s.charCodeAt(i + 2) - 1) * 127 * 127 } /** Encode using 2B encoding, returns string. */ -function encode2B(n) { - var lsb, msb; - lsb = (n % 127); - n = ((n - lsb) / 127); - lsb += 1; - msb = (n + 1); - return Chr(lsb) + Chr(msb); +function encode2B (n) { + let lsb, msb + lsb = (n % 127) + n = ((n - lsb) / 127) + lsb += 1 + msb = (n + 1) + return Chr(lsb) + Chr(msb) } /** Encode using 3B encoding, returns string. */ -function encode3B(n) { - var lsb, msb, xsb; - lsb = (n % 127); - n = (n - lsb) / 127; - lsb += 1; - msb = (n % 127); - n = (n - msb) / 127; - msb += 1; - xsb = (n + 1); - return Chr(lsb) + Chr(msb) + Chr(xsb); +function encode3B (n) { + let lsb, msb, xsb + lsb = (n % 127) + n = (n - lsb) / 127 + lsb += 1 + msb = (n % 127) + n = (n - msb) / 127 + msb += 1 + xsb = (n + 1) + return Chr(lsb) + Chr(msb) + Chr(xsb) } diff --git a/jssrc/wifi.js b/jssrc/wifi.js index 62f7b1c..cab085b 100644 --- a/jssrc/wifi.js +++ b/jssrc/wifi.js @@ -1,163 +1,162 @@ (function (w) { - var authStr = ['Open', 'WEP', 'WPA', 'WPA2', 'WPA/WPA2']; - var curSSID; + const authStr = ['Open', 'WEP', 'WPA', 'WPA2', 'WPA/WPA2'] + let curSSID // Get XX % for a slider input - function rangePt(inp) { - return Math.round(((inp.value / inp.max) * 100)) + '%'; + function rangePt (inp) { + return Math.round(((inp.value / inp.max) * 100)) + '%' } // Display selected STA SSID etc - function selectSta(name, password, ip) { - $('#sta_ssid').val(name); - $('#sta_password').val(password); - - $('#sta-nw').toggleClass('hidden', name.length == 0); - $('#sta-nw-nil').toggleClass('hidden', name.length > 0); - - $('#sta-nw .essid').html(e(name)); - var nopw = undef(password) || password.length == 0; - $('#sta-nw .passwd').toggleClass('hidden', nopw); - $('#sta-nw .nopasswd').toggleClass('hidden', !nopw); - $('#sta-nw .ip').html(ip.length > 0 ? tr('wifi.connected_ip_is') + ip : tr('wifi.not_conn')); + function selectSta (name, password, ip) { + $('#sta_ssid').val(name) + $('#sta_password').val(password) + + $('#sta-nw').toggleClass('hidden', name.length == 0) + $('#sta-nw-nil').toggleClass('hidden', name.length > 0) + + $('#sta-nw .essid').html(e(name)) + const nopw = undef(password) || password.length == 0 + $('#sta-nw .passwd').toggleClass('hidden', nopw) + $('#sta-nw .nopasswd').toggleClass('hidden', !nopw) + $('#sta-nw .ip').html(ip.length > 0 ? tr('wifi.connected_ip_is') + ip : tr('wifi.not_conn')) } /** Update display for received response */ - function onScan(resp, status) { - //var ap_json = { - // "result": { - // "inProgress": "0", - // "APs": [ - // {"essid": "Chlivek", "bssid": "88:f7:c7:52:b3:99", "rssi": "204", "enc": "4", "channel": "1"}, - // {"essid": "TyNikdy", "bssid": "5c:f4:ab:0d:f1:1b", "rssi": "164", "enc": "3", "channel": "1"}, - // ] - // } - //}; + function onScan (resp, status) { + // var ap_json = { + // "result": { + // "inProgress": "0", + // "APs": [ + // {"essid": "Chlivek", "bssid": "88:f7:c7:52:b3:99", "rssi": "204", "enc": "4", "channel": "1"}, + // {"essid": "TyNikdy", "bssid": "5c:f4:ab:0d:f1:1b", "rssi": "164", "enc": "3", "channel": "1"}, + // ] + // } + // }; if (status != 200) { // bad response - rescan(5000); // wait 5sm then retry - return; + rescan(5000) // wait 5sm then retry + return } try { - resp = JSON.parse(resp); + resp = JSON.parse(resp) } catch (e) { - console.log(e); - rescan(5000); - return; + console.log(e) + rescan(5000) + return } - var done = !bool(resp.result.inProgress) && (resp.result.APs.length > 0); - rescan(done ? 15000 : 1000); - if (!done) return; // no redraw yet + const done = !bool(resp.result.inProgress) && (resp.result.APs.length > 0) + rescan(done ? 15000 : 1000) + if (!done) return // no redraw yet // clear the AP list - var $list = $('#ap-list'); + let $list = $('#ap-list') // remove old APs - $('#ap-list .AP').remove(); + $('#ap-list .AP').remove() - $list.toggleClass('hidden', !done); - $('#ap-loader').toggleClass('hidden', done); + $list.toggleClass('hidden', !done) + $('#ap-loader').toggleClass('hidden', done) // scan done resp.result.APs.sort(function (a, b) { - return b.rssi - a.rssi; + return b.rssi - a.rssi }).forEach(function (ap) { - ap.enc = parseInt(ap.enc); + ap.enc = parseInt(ap.enc) - if (ap.enc > 4) return; // hide unsupported auths + if (ap.enc > 4) return // hide unsupported auths - var item = mk('div'); + let item = mk('div') - var $item = $(item) + let $item = $(item) .data('ssid', ap.essid) .data('pwd', ap.enc) .attr('tabindex', 0) - .addClass('AP'); + .addClass('AP') // mark current SSID if (ap.essid == curSSID) { - $item.addClass('selected'); + $item.addClass('selected') } - var inner = mk('div'); + let inner = mk('div') $(inner).addClass('inner') .htmlAppend('
{0}
'.format(ap.rssi_perc)) .htmlAppend('
{0}
'.format($.htmlEscape(ap.essid))) - .htmlAppend('
{0}
'.format(authStr[ap.enc])); + .htmlAppend('
{0}
'.format(authStr[ap.enc])) $item.on('click', function () { - var $th = $(this); + let $th = $(this) - var conn_ssid = $th.data('ssid'); - var conn_pass = ''; + const conn_ssid = $th.data('ssid') + let conn_pass = '' if (+$th.data('pwd')) { // this AP needs a password - conn_pass = prompt(tr("wifi.enter_passwd").replace(":ssid:", conn_ssid)); - if (!conn_pass) return; + conn_pass = prompt(tr('wifi.enter_passwd').replace(':ssid:', conn_ssid)) + if (!conn_pass) return } - $('#sta_password').val(conn_pass); - $('#sta_ssid').val(conn_ssid); - selectSta(conn_ssid, conn_pass, ''); - }); + $('#sta_password').val(conn_pass) + $('#sta_ssid').val(conn_ssid) + selectSta(conn_ssid, conn_pass, '') + }) - - item.appendChild(inner); - $list[0].appendChild(item); - }); + item.appendChild(inner) + $list[0].appendChild(item) + }) } - function startScanning() { - $('#ap-loader').removeClass('hidden'); - $('#ap-scan').addClass('hidden'); - $('#ap-loader .anim-dots').html('.'); + function startScanning () { + $('#ap-loader').removeClass('hidden') + $('#ap-scan').addClass('hidden') + $('#ap-loader .anim-dots').html('.') - scanAPs(); + scanAPs() } /** Ask the CGI what APs are visible (async) */ - function scanAPs() { + function scanAPs () { if (_demo) { - onScan(_demo_aps, 200); + onScan(_demo_aps, 200) } else { - $.get('http://' + _root + '/cfg/wifi/scan', onScan); + $.get('http://' + _root + '/cfg/wifi/scan', onScan) } } - function rescan(time) { - setTimeout(scanAPs, time); + function rescan (time) { + setTimeout(scanAPs, time) } /** Set up the WiFi page */ - function wifiInit(cfg) { + function wifiInit (cfg) { // Update slider value displays $('.Row.range').forEach(function (x) { - var inp = x.querySelector('input'); - var disp1 = x.querySelector('.x-disp1'); - var disp2 = x.querySelector('.x-disp2'); - var t = rangePt(inp); - $(disp1).html(t); - $(disp2).html(t); + let inp = x.querySelector('input') + let disp1 = x.querySelector('.x-disp1') + let disp2 = x.querySelector('.x-disp2') + let t = rangePt(inp) + $(disp1).html(t) + $(disp2).html(t) $(inp).on('input', function () { - t = rangePt(inp); - $(disp1).html(t); - $(disp2).html(t); - }); - }); + t = rangePt(inp) + $(disp1).html(t) + $(disp2).html(t) + }) + }) // Forget STA credentials $('#forget-sta').on('click', function () { - selectSta('', '', ''); - return false; - }); + selectSta('', '', '') + return false + }) - selectSta(cfg.sta_ssid, cfg.sta_password, cfg.sta_active_ip); - curSSID = cfg.sta_active_ssid; + selectSta(cfg.sta_ssid, cfg.sta_password, cfg.sta_active_ip) + curSSID = cfg.sta_active_ssid } - w.init = wifiInit; - w.startScanning = startScanning; -})(window.WiFi = {}); + w.init = wifiInit + w.startScanning = startScanning +})(window.WiFi = {}) diff --git a/pages/term.php b/pages/term.php index d2baca1..7376c3f 100644 --- a/pages/term.php +++ b/pages/term.php @@ -1,3 +1,4 @@ + +