mirror of
https://github.com/quine-global/hyper.git
synced 2026-01-12 20:18:41 -09:00
Performance - Data batching (#3336)
* Bumping electron to 3.0.10 * Updating node version in travis and appveyor * Fixing incorrect require of electron-fetch * Fix zoom to match previous versions Additionally I'm removing a call to disable pinch-zoom, it's disable by default since Electron 2 (https://electronjs.org/releases#2.0.0) * Bumping electron to 4.0.0-beta.8 * Bumping electron to 4.0.0-beta.9 * Work around for Copy accelerator not firing on electron v4 * Batch session data before sending it to renderer * Fix linting issues * Fixing header/titlebar in MacOS * Upgrading to electron 4.0.0 and node-pty 0.8.0 * Adding yarn.lock changes for electron 4.0.0 * Adding comments for editor:copy workaround. Scaling issue is only on Linux * Upgrading node-abi to support electron 4.0.0 * Adding isDestroyed check
This commit is contained in:
parent
7a40fd7c97
commit
c07700af49
3 changed files with 67 additions and 8 deletions
|
|
@ -34,7 +34,11 @@ class Server extends EventEmitter {
|
|||
}
|
||||
|
||||
emit(ch, data) {
|
||||
this.wc.send(this.id, {ch, data});
|
||||
// This check is needed because data-batching can cause extra data to be
|
||||
// emitted after the window has already closed
|
||||
if (!this.win.isDestroyed()) {
|
||||
this.wc.send(this.id, {ch, data});
|
||||
}
|
||||
}
|
||||
|
||||
destroy() {
|
||||
|
|
|
|||
|
|
@ -21,8 +21,60 @@ try {
|
|||
|
||||
const envFromConfig = config.getConfig().env || {};
|
||||
|
||||
// Max duration to batch session data before sending it to the renderer process.
|
||||
const BATCH_DURATION_MS = 16;
|
||||
|
||||
// Max size of a session data batch. Note that this value can be exceeded by ~4k
|
||||
// (chunk sizes seem to be 4k at the most)
|
||||
const BATCH_MAX_SIZE = 200 * 1024;
|
||||
|
||||
// Data coming from the pty is sent to the renderer process for further
|
||||
// vt parsing and rendering. This class batches data to minimize the number of
|
||||
// IPC calls. It also reduces GC pressure and CPU cost: each chunk is prefixed
|
||||
// with the window ID which is then stripped on the renderer process and this
|
||||
// overhead is reduced with batching.
|
||||
class DataBatcher extends EventEmitter {
|
||||
constructor(uid) {
|
||||
super();
|
||||
this.uid = uid;
|
||||
this.decoder = new StringDecoder('utf8');
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset() {
|
||||
this.data = this.uid;
|
||||
this.timeout = null;
|
||||
}
|
||||
|
||||
write(chunk) {
|
||||
if (this.data.length + chunk.length >= BATCH_MAX_SIZE) {
|
||||
// We've reached the max batch size. Flush it and start another one
|
||||
if (this.timeout) {
|
||||
clearTimeout(this.timeout);
|
||||
this.timeout = null;
|
||||
}
|
||||
this.flush();
|
||||
}
|
||||
|
||||
this.data += this.decoder.write(chunk);
|
||||
|
||||
if (!this.timeout) {
|
||||
this.timeout = setTimeout(() => this.flush(), BATCH_DURATION_MS);
|
||||
}
|
||||
}
|
||||
|
||||
flush() {
|
||||
// Reset before emitting to allow for potential reentrancy
|
||||
const data = this.data;
|
||||
this.reset();
|
||||
|
||||
this.emit('flush', data);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = class Session extends EventEmitter {
|
||||
constructor({rows, cols: columns, cwd, shell, shellArgs}) {
|
||||
constructor({uid, rows, cols: columns, cwd, shell, shellArgs}) {
|
||||
const osLocale = require('os-locale');
|
||||
super();
|
||||
const baseEnv = Object.assign(
|
||||
|
|
@ -45,8 +97,6 @@ module.exports = class Session extends EventEmitter {
|
|||
delete baseEnv.GOOGLE_API_KEY;
|
||||
}
|
||||
|
||||
const decoder = new StringDecoder('utf8');
|
||||
|
||||
const defaultShellArgs = ['--login'];
|
||||
|
||||
try {
|
||||
|
|
@ -64,11 +114,16 @@ module.exports = class Session extends EventEmitter {
|
|||
}
|
||||
}
|
||||
|
||||
this.pty.on('data', data => {
|
||||
this.batcher = new DataBatcher(uid);
|
||||
this.pty.on('data', chunk => {
|
||||
if (this.ended) {
|
||||
return;
|
||||
}
|
||||
this.emit('data', decoder.write(data));
|
||||
this.batcher.write(chunk);
|
||||
});
|
||||
|
||||
this.batcher.on('flush', data => {
|
||||
this.emit('data', data);
|
||||
});
|
||||
|
||||
this.pty.on('exit', () => {
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ module.exports = class Window {
|
|||
function createInitialSession() {
|
||||
let {session, options} = createSession();
|
||||
const initialEvents = [];
|
||||
const handleData = data => initialEvents.push(['session data', options.uid + data]);
|
||||
const handleData = data => initialEvents.push(['session data', data]);
|
||||
const handleExit = () => initialEvents.push(['session exit']);
|
||||
session.on('data', handleData);
|
||||
session.on('exit', handleExit);
|
||||
|
|
@ -148,7 +148,7 @@ module.exports = class Window {
|
|||
}
|
||||
|
||||
session.on('data', data => {
|
||||
rpc.emit('session data', options.uid + data);
|
||||
rpc.emit('session data', data);
|
||||
});
|
||||
|
||||
session.on('exit', () => {
|
||||
|
|
|
|||
Loading…
Reference in a new issue