Changes of Linux canary v0.0.102
This commit is contained in:
parent
747890c6ad
commit
3185a0ea9f
3 changed files with 320 additions and 255 deletions
|
@ -20,6 +20,8 @@ function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj;
|
|||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
|
||||
|
||||
function versionParse(verString) {
|
||||
return verString.split('.').map(i => parseInt(i));
|
||||
}
|
||||
|
@ -135,42 +137,42 @@ class AutoUpdaterLinux extends _events.EventEmitter {
|
|||
}
|
||||
|
||||
checkForUpdates() {
|
||||
const currVersion = versionParse(_electron.app.getVersion());
|
||||
this.emit('checking-for-update');
|
||||
var _this = this;
|
||||
|
||||
_request2.default.get({ url: this.updateUrl, encoding: null }, (error, response, body) => {
|
||||
if (error) {
|
||||
console.error('[Updates] Error fetching ' + this.updateUrl + ': ' + error);
|
||||
this.emit('error', error);
|
||||
return;
|
||||
}
|
||||
return _asyncToGenerator(function* () {
|
||||
const currVersion = versionParse(_electron.app.getVersion());
|
||||
_this.emit('checking-for-update');
|
||||
|
||||
try {
|
||||
const response = yield _request2.default.get(_this.updateUrl);
|
||||
|
||||
if (response.statusCode === 204) {
|
||||
// you are up to date
|
||||
_this.emit('update-not-available');
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.statusCode === 204) {
|
||||
// you are up to date
|
||||
this.emit('update-not-available');
|
||||
} else if (response.statusCode === 200) {
|
||||
let latestVerStr = '';
|
||||
let latestVersion = [];
|
||||
try {
|
||||
const latestMetadata = JSON.parse(body);
|
||||
const latestMetadata = JSON.parse(response.body);
|
||||
latestVerStr = latestMetadata.name;
|
||||
latestVersion = versionParse(latestVerStr);
|
||||
} catch (e) {}
|
||||
} catch (_) {}
|
||||
|
||||
if (versionNewer(latestVersion, currVersion)) {
|
||||
console.log('[Updates] You are out of date!');
|
||||
// you need to update
|
||||
this.emit('update-manually', latestVerStr);
|
||||
_this.emit('update-manually', latestVerStr);
|
||||
} else {
|
||||
console.log('[Updates] You are living in the future!');
|
||||
this.emit('update-not-available');
|
||||
_this.emit('update-not-available');
|
||||
}
|
||||
} else {
|
||||
// something is wrong
|
||||
console.error(`[Updates] Error: fetch returned: ${response.statusCode}`);
|
||||
this.emit('update-not-available');
|
||||
} catch (err) {
|
||||
console.error('[Updates] Error fetching ' + _this.updateUrl + ': ' + err.message);
|
||||
_this.emit('error', err);
|
||||
}
|
||||
});
|
||||
})();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,84 @@ Object.defineProperty(exports, "__esModule", {
|
|||
|
||||
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
|
||||
|
||||
let electronRequest = (() => {
|
||||
var _ref = _asyncToGenerator(function* ({ method, url, headers, qs, timeout, body, stream }) {
|
||||
yield _electron.app.whenReady();
|
||||
|
||||
const { net, session } = require('electron');
|
||||
const req = net.request({
|
||||
method,
|
||||
url: `${url}${qs != null ? `?${_querystring2.default.stringify(qs)}` : ''}`,
|
||||
redirect: 'follow',
|
||||
session: session.defaultSession
|
||||
});
|
||||
|
||||
if (headers != null) {
|
||||
for (const headerKey of Object.keys(headers)) {
|
||||
req.setHeader(headerKey, headers[headerKey]);
|
||||
}
|
||||
}
|
||||
|
||||
if (body != null) {
|
||||
yield (0, _util.promisify)(req.write)(body, 'utf-8');
|
||||
}
|
||||
|
||||
return new Promise(function (resolve, reject) {
|
||||
const reqTimeout = setTimeout(function () {
|
||||
req.abort();
|
||||
reject(new Error(`network timeout: ${url}`));
|
||||
}, timeout != null ? timeout : DEFAULT_REQUEST_TIMEOUT);
|
||||
|
||||
req.on('login', function (callback) {
|
||||
return callback();
|
||||
});
|
||||
|
||||
req.on('response', function (response) {
|
||||
clearTimeout(reqTimeout);
|
||||
handleHTTPResponse(resolve, reject, response, stream);
|
||||
});
|
||||
|
||||
req.on('error', function (err) {
|
||||
clearTimeout(reqTimeout);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
req.end();
|
||||
});
|
||||
});
|
||||
|
||||
return function electronRequest(_x) {
|
||||
return _ref.apply(this, arguments);
|
||||
};
|
||||
})();
|
||||
|
||||
let requestWithMethod = (() => {
|
||||
var _ref2 = _asyncToGenerator(function* (method, options) {
|
||||
if (typeof options === 'string') {
|
||||
options = { url: options };
|
||||
}
|
||||
|
||||
options = _extends({}, options, { method });
|
||||
|
||||
try {
|
||||
return yield electronRequest(options);
|
||||
} catch (err) {
|
||||
console.log(`Error downloading with electron net: ${err.message}`);
|
||||
console.log('Falling back to node net library..');
|
||||
}
|
||||
|
||||
return nodeRequest(options);
|
||||
});
|
||||
|
||||
return function requestWithMethod(_x2, _x3) {
|
||||
return _ref2.apply(this, arguments);
|
||||
};
|
||||
})();
|
||||
|
||||
// only supports get for now, since retrying is non-idempotent and
|
||||
// we'd want to grovel the errors to make sure it's safe to retry
|
||||
|
||||
|
||||
var _electron = require('electron');
|
||||
|
||||
var _querystring = require('querystring');
|
||||
|
@ -16,100 +94,90 @@ var _request = require('request');
|
|||
|
||||
var _request2 = _interopRequireDefault(_request);
|
||||
|
||||
var _events = require('events');
|
||||
|
||||
var _events2 = _interopRequireDefault(_events);
|
||||
var _util = require('util');
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function _log(_msg) {
|
||||
// console.log('[Request] ' + _msg);
|
||||
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; }
|
||||
|
||||
const DEFAULT_REQUEST_TIMEOUT = 30000;
|
||||
|
||||
function makeHTTPResponse({ method, url, headers, statusCode, statusMessage }, body) {
|
||||
return {
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
statusCode,
|
||||
statusMessage,
|
||||
body
|
||||
};
|
||||
}
|
||||
|
||||
function requestWithMethod(method, origOpts, origCallback) {
|
||||
if (typeof origOpts == 'string') {
|
||||
origOpts = { url: origOpts };
|
||||
}
|
||||
|
||||
const opts = _extends({}, origOpts, { method });
|
||||
|
||||
let callback;
|
||||
if (origCallback || opts.callback) {
|
||||
const origOptsCallback = opts.callback;
|
||||
delete opts.callback;
|
||||
callback = (...args) => {
|
||||
if (origCallback) {
|
||||
origCallback.apply(this, args);
|
||||
}
|
||||
if (origOptsCallback) {
|
||||
origOptsCallback.apply(this, args);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const strictOpts = _extends({}, opts, { strictSSL: true });
|
||||
const laxOpts = _extends({}, opts, { strictSSL: false });
|
||||
|
||||
const rv = new _events2.default();
|
||||
|
||||
if (callback) {
|
||||
_log('have callback, so wrapping');
|
||||
rv.on('response', response => {
|
||||
const chunks = [];
|
||||
response.on('data', chunk => chunks.push(chunk));
|
||||
response.on('end', () => {
|
||||
callback(null, response, Buffer.concat(chunks));
|
||||
});
|
||||
});
|
||||
rv.on('error', error => callback(error));
|
||||
}
|
||||
|
||||
const requestTypes = [{
|
||||
factory: function () {
|
||||
return (0, _request2.default)(strictOpts);
|
||||
},
|
||||
method: 'node_request_strict'
|
||||
}, {
|
||||
factory: function () {
|
||||
const qs = _querystring2.default.stringify(strictOpts.qs);
|
||||
const nr = _electron.net.request(_extends({}, strictOpts, { url: `${strictOpts.url}?${qs}` }));
|
||||
nr.end();
|
||||
return nr;
|
||||
},
|
||||
method: 'electron_net_request_strict'
|
||||
}, {
|
||||
factory: function () {
|
||||
return (0, _request2.default)(laxOpts);
|
||||
},
|
||||
method: 'node_request_lax'
|
||||
}];
|
||||
|
||||
function attempt(index) {
|
||||
const { factory, method } = requestTypes[index];
|
||||
_log(`Attempt #${index + 1}: ${method}`);
|
||||
factory().on('response', response => {
|
||||
_log(`${method} success! emitting response ${response}`);
|
||||
rv.emit('response', response);
|
||||
}).on('error', error => {
|
||||
if (index + 1 < requestTypes.length) {
|
||||
_log(`${method} failure, trying next option`);
|
||||
attempt(index + 1);
|
||||
} else {
|
||||
_log(`${method} failure, out of options`);
|
||||
rv.emit('error', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
attempt(0);
|
||||
|
||||
return rv;
|
||||
function makeHTTPStatusError(response) {
|
||||
const err = new Error(`HTTP Error: Status Code ${response.statusCode}`);
|
||||
err.response = response;
|
||||
return err;
|
||||
}
|
||||
|
||||
function handleHTTPResponse(resolve, reject, response, stream) {
|
||||
const totalBytes = parseInt(response.headers['content-length'] || 1, 10);
|
||||
let receivedBytes = 0;
|
||||
const chunks = [];
|
||||
|
||||
// don't stream response if it's a failure
|
||||
if (response.statusCode >= 300) {
|
||||
stream = null;
|
||||
}
|
||||
|
||||
response.on('data', chunk => {
|
||||
if (stream != null) {
|
||||
receivedBytes += chunk.length;
|
||||
stream.write(chunk);
|
||||
stream.emit('progress', { totalBytes, receivedBytes });
|
||||
return;
|
||||
}
|
||||
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
response.on('end', () => {
|
||||
if (stream != null) {
|
||||
stream.on('finish', () => resolve(makeHTTPResponse(response, null)));
|
||||
stream.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const res = makeHTTPResponse(response, Buffer.concat(chunks));
|
||||
|
||||
if (res.statusCode >= 300) {
|
||||
reject(makeHTTPStatusError(res));
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
||||
function nodeRequest({ method, url, headers, qs, timeout, body, stream }) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = (0, _request2.default)({
|
||||
method,
|
||||
url,
|
||||
qs,
|
||||
headers,
|
||||
followAllRedirects: true,
|
||||
encoding: null,
|
||||
timeout: timeout != null ? timeout : DEFAULT_REQUEST_TIMEOUT,
|
||||
body
|
||||
});
|
||||
|
||||
req.on('response', response => handleHTTPResponse(resolve, reject, response, stream));
|
||||
req.on('error', err => reject(err));
|
||||
});
|
||||
}
|
||||
|
||||
// only supports get for now, since retrying is non-idempotent and
|
||||
// we'd want to grovel the errors to make sure it's safe to retry
|
||||
for (const method of ['get']) {
|
||||
requestWithMethod[method] = requestWithMethod.bind(null, method);
|
||||
requestWithMethod[method] = requestWithMethod.bind(null, method.toUpperCase());
|
||||
}
|
||||
|
||||
exports.default = requestWithMethod;
|
||||
|
|
|
@ -5,11 +5,145 @@ Object.defineProperty(exports, "__esModule", {
|
|||
});
|
||||
exports.supportsEventObjects = exports.events = exports.NO_PENDING_UPDATES = exports.INSTALLING_MODULE_PROGRESS = exports.INSTALLING_MODULE = exports.INSTALLING_MODULES_FINISHED = exports.DOWNLOADED_MODULE = exports.UPDATE_MANUALLY = exports.DOWNLOADING_MODULES_FINISHED = exports.DOWNLOADING_MODULE_PROGRESS = exports.DOWNLOADING_MODULE = exports.UPDATE_CHECK_FINISHED = exports.INSTALLED_MODULE = exports.CHECKING_FOR_UPDATES = undefined;
|
||||
|
||||
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; // Manages additional module installation and management.
|
||||
// We add the module folder path to require() lookup paths here.
|
||||
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
|
||||
|
||||
// undocumented node API
|
||||
let checkForModuleUpdates = (() => {
|
||||
var _ref = _asyncToGenerator(function* () {
|
||||
const query = _extends({}, remoteQuery, { _: Math.floor(Date.now() / 1000 / 60 / 5) });
|
||||
const url = `${remoteBaseURL}/versions.json`;
|
||||
logger.log(`Checking for module updates at ${url}`);
|
||||
|
||||
let response;
|
||||
try {
|
||||
response = yield request.get({ url, qs: query, timeout: REQUEST_TIMEOUT });
|
||||
checkingForUpdates = false;
|
||||
} catch (err) {
|
||||
checkingForUpdates = false;
|
||||
logger.log(`Failed fetching module versions: ${String(err)}`);
|
||||
|
||||
events.append({
|
||||
type: UPDATE_CHECK_FINISHED,
|
||||
succeeded: false,
|
||||
updateCount: 0,
|
||||
manualRequired: false
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
remoteModuleVersions = JSON.parse(response.body);
|
||||
if (settings.get(USE_LOCAL_MODULE_VERSIONS)) {
|
||||
try {
|
||||
remoteModuleVersions = JSON.parse(_fs2.default.readFileSync(localModuleVersionsFilePath));
|
||||
console.log('Using local module versions: ', remoteModuleVersions);
|
||||
} catch (err) {
|
||||
console.warn('Failed to parse local module versions: ', err);
|
||||
}
|
||||
}
|
||||
|
||||
const updatesToDownload = [];
|
||||
for (const moduleName of Object.keys(installedModules)) {
|
||||
const installedModule = installedModules[moduleName];
|
||||
const installed = installedModule.installedVersion;
|
||||
if (installed === null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const update = installedModule.updateVersion || 0;
|
||||
const remote = remoteModuleVersions[getRemoteModuleName(moduleName)] || 0;
|
||||
if (installed !== remote && update !== remote) {
|
||||
logger.log(`Module update available: ${moduleName}@${remote} [installed: ${installed}]`);
|
||||
updatesToDownload.push({ name: moduleName, version: remote });
|
||||
}
|
||||
}
|
||||
|
||||
events.append({
|
||||
type: UPDATE_CHECK_FINISHED,
|
||||
succeeded: true,
|
||||
updateCount: updatesToDownload.length,
|
||||
manualRequired: false
|
||||
});
|
||||
|
||||
if (updatesToDownload.length === 0) {
|
||||
logger.log(`No module updates available.`);
|
||||
} else {
|
||||
updatesToDownload.forEach(function (e) {
|
||||
return addModuleToDownloadQueue(e.name, e.version);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return function checkForModuleUpdates() {
|
||||
return _ref.apply(this, arguments);
|
||||
};
|
||||
})();
|
||||
|
||||
let processDownloadQueue = (() => {
|
||||
var _ref2 = _asyncToGenerator(function* () {
|
||||
if (download.active) return;
|
||||
if (download.queue.length === 0) return;
|
||||
|
||||
download.active = true;
|
||||
|
||||
const queuedModule = download.queue[download.next];
|
||||
download.next += 1;
|
||||
|
||||
events.append({
|
||||
type: DOWNLOADING_MODULE,
|
||||
name: queuedModule.name,
|
||||
current: download.next,
|
||||
total: download.queue.length,
|
||||
foreground: !runningInBackground
|
||||
});
|
||||
|
||||
let progress = 0;
|
||||
let receivedBytes = 0;
|
||||
|
||||
const url = `${remoteBaseURL}/${encodeURIComponent(getRemoteModuleName(queuedModule.name))}/${encodeURIComponent(queuedModule.version)}`;
|
||||
logger.log(`Fetching ${queuedModule.name}@${queuedModule.version} from ${url}`);
|
||||
const headers = {};
|
||||
if (queuedModule.authToken) {
|
||||
headers['Authorization'] = queuedModule.authToken;
|
||||
}
|
||||
|
||||
const moduleZipPath = _path2.default.join(moduleDownloadPath, `${queuedModule.name}-${queuedModule.version}.zip`);
|
||||
const stream = _fs2.default.createWriteStream(moduleZipPath);
|
||||
stream.on('progress', function ({ receivedBytes: newReceivedBytes, totalBytes }) {
|
||||
receivedBytes = newReceivedBytes;
|
||||
const newProgress = Math.min(Math.floor(100 * (receivedBytes / totalBytes)), 100);
|
||||
if (progress !== newProgress) {
|
||||
progress = newProgress;
|
||||
logger.log(`Streaming ${queuedModule.name}@${queuedModule.version} to ${moduleZipPath}: ${progress}%`);
|
||||
events.append({
|
||||
type: DOWNLOADING_MODULE_PROGRESS,
|
||||
name: queuedModule.name,
|
||||
progress: progress
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
logger.log(`Streaming ${queuedModule.name}@${queuedModule.version} to ${moduleZipPath}`);
|
||||
|
||||
try {
|
||||
const response = yield request.get({
|
||||
url,
|
||||
qs: remoteQuery,
|
||||
headers,
|
||||
timeout: REQUEST_TIMEOUT,
|
||||
stream
|
||||
});
|
||||
|
||||
finishModuleDownload(queuedModule.name, queuedModule.version, moduleZipPath, receivedBytes, response.statusCode === 200);
|
||||
} catch (err) {
|
||||
logger.log(`Failed fetching module ${queuedModule.name}@${queuedModule.version}: ${String(err)}`);
|
||||
finishModuleDownload(queuedModule.name, queuedModule.version, null, false);
|
||||
}
|
||||
});
|
||||
|
||||
return function processDownloadQueue() {
|
||||
return _ref2.apply(this, arguments);
|
||||
};
|
||||
})();
|
||||
|
||||
exports.initPathsOnly = initPathsOnly;
|
||||
exports.init = init;
|
||||
|
@ -57,6 +191,12 @@ function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj;
|
|||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; } // Manages additional module installation and management.
|
||||
// We add the module folder path to require() lookup paths here.
|
||||
|
||||
// undocumented node API
|
||||
|
||||
|
||||
const originalFs = require('original-fs');
|
||||
|
||||
// events
|
||||
|
@ -443,156 +583,11 @@ function getRemoteModuleName(name) {
|
|||
return name;
|
||||
}
|
||||
|
||||
function checkForModuleUpdates() {
|
||||
const query = _extends({}, remoteQuery, { _: Math.floor(Date.now() / 1000 / 60 / 5) });
|
||||
const url = `${remoteBaseURL}/versions.json`;
|
||||
logger.log(`Checking for module updates at ${url}`);
|
||||
|
||||
request.get({
|
||||
url,
|
||||
agent: false,
|
||||
encoding: null,
|
||||
qs: query,
|
||||
timeout: REQUEST_TIMEOUT,
|
||||
strictSSL: false
|
||||
}, (err, response, body) => {
|
||||
checkingForUpdates = false;
|
||||
|
||||
if (!err && response.statusCode !== 200) {
|
||||
err = new Error(`Non-200 response code: ${response.statusCode}`);
|
||||
}
|
||||
|
||||
if (err) {
|
||||
logger.log(`Failed fetching module versions: ${String(err)}`);
|
||||
events.append({
|
||||
type: UPDATE_CHECK_FINISHED,
|
||||
succeeded: false,
|
||||
updateCount: 0,
|
||||
manualRequired: false
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
remoteModuleVersions = JSON.parse(body);
|
||||
if (settings.get(USE_LOCAL_MODULE_VERSIONS)) {
|
||||
try {
|
||||
remoteModuleVersions = JSON.parse(_fs2.default.readFileSync(localModuleVersionsFilePath));
|
||||
console.log('Using local module versions: ', remoteModuleVersions);
|
||||
} catch (err) {
|
||||
console.warn('Failed to parse local module versions: ', err);
|
||||
}
|
||||
}
|
||||
|
||||
const updatesToDownload = [];
|
||||
for (const moduleName of Object.keys(installedModules)) {
|
||||
const installedModule = installedModules[moduleName];
|
||||
const installed = installedModule.installedVersion;
|
||||
if (installed === null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const update = installedModule.updateVersion || 0;
|
||||
const remote = remoteModuleVersions[getRemoteModuleName(moduleName)] || 0;
|
||||
// TODO: strict equality?
|
||||
if (installed != remote && update != remote) {
|
||||
logger.log(`Module update available: ${moduleName}@${remote} [installed: ${installed}]`);
|
||||
updatesToDownload.push({ name: moduleName, version: remote });
|
||||
}
|
||||
}
|
||||
|
||||
events.append({
|
||||
type: UPDATE_CHECK_FINISHED,
|
||||
succeeded: true,
|
||||
updateCount: updatesToDownload.length,
|
||||
manualRequired: false
|
||||
});
|
||||
if (updatesToDownload.length === 0) {
|
||||
logger.log(`No module updates available.`);
|
||||
} else {
|
||||
updatesToDownload.forEach(e => addModuleToDownloadQueue(e.name, e.version));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function addModuleToDownloadQueue(name, version, authToken) {
|
||||
download.queue.push({ name, version, authToken });
|
||||
process.nextTick(() => processDownloadQueue());
|
||||
}
|
||||
|
||||
function processDownloadQueue() {
|
||||
if (download.active) return;
|
||||
if (download.queue.length === 0) return;
|
||||
|
||||
download.active = true;
|
||||
|
||||
const queuedModule = download.queue[download.next];
|
||||
download.next += 1;
|
||||
|
||||
events.append({
|
||||
type: DOWNLOADING_MODULE,
|
||||
name: queuedModule.name,
|
||||
current: download.next,
|
||||
total: download.queue.length,
|
||||
foreground: !runningInBackground
|
||||
});
|
||||
|
||||
let totalBytes = 1;
|
||||
let receivedBytes = 0;
|
||||
let progress = 0;
|
||||
let hasErrored = false;
|
||||
|
||||
const url = `${remoteBaseURL}/${getRemoteModuleName(queuedModule.name)}/${queuedModule.version}`;
|
||||
logger.log(`Fetching ${queuedModule.name}@${queuedModule.version} from ${url}`);
|
||||
const headers = {};
|
||||
if (queuedModule.authToken) {
|
||||
headers['Authorization'] = queuedModule.authToken;
|
||||
}
|
||||
request.get({
|
||||
url,
|
||||
agent: false,
|
||||
encoding: null,
|
||||
followAllRedirects: true,
|
||||
qs: remoteQuery,
|
||||
timeout: REQUEST_TIMEOUT,
|
||||
strictSSL: false,
|
||||
headers
|
||||
}).on('error', err => {
|
||||
if (hasErrored) return;
|
||||
hasErrored = true;
|
||||
logger.log(`Failed fetching ${queuedModule.name}@${queuedModule.version}: ${String(err)}`);
|
||||
finishModuleDownload(queuedModule.name, queuedModule.version, null, false);
|
||||
}).on('response', response => {
|
||||
totalBytes = response.headers['content-length'] || 1;
|
||||
|
||||
const moduleZipPath = _path2.default.join(moduleDownloadPath, `${queuedModule.name}-${queuedModule.version}.zip`);
|
||||
logger.log(`Streaming ${queuedModule.name}@${queuedModule.version} [${totalBytes} bytes] to ${moduleZipPath}`);
|
||||
|
||||
const stream = _fs2.default.createWriteStream(moduleZipPath);
|
||||
stream.on('finish', () => finishModuleDownload(queuedModule.name, queuedModule.version, moduleZipPath, receivedBytes, response.statusCode === 200));
|
||||
|
||||
response.on('data', chunk => {
|
||||
receivedBytes += chunk.length;
|
||||
stream.write(chunk);
|
||||
|
||||
const fraction = receivedBytes / totalBytes;
|
||||
const newProgress = Math.min(Math.floor(100 * fraction), 100);
|
||||
if (progress != newProgress) {
|
||||
progress = newProgress;
|
||||
events.append({
|
||||
type: DOWNLOADING_MODULE_PROGRESS,
|
||||
name: queuedModule.name,
|
||||
progress: progress
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// TODO: on response error
|
||||
// TODO: on stream error
|
||||
|
||||
response.on('end', () => stream.end());
|
||||
});
|
||||
}
|
||||
|
||||
function commitInstalledModules() {
|
||||
const data = JSON.stringify(installedModules, null, 2);
|
||||
_fs2.default.writeFileSync(installedModulesFilePath, data);
|
||||
|
|
Loading…
Reference in a new issue