9799 lines
1.1 MiB
JavaScript
9799 lines
1.1 MiB
JavaScript
|
'use strict';
|
|||
|
|
|||
|
var obsidian = require('obsidian');
|
|||
|
var require$$0$1 = require('path');
|
|||
|
var require$$0 = require('fs');
|
|||
|
var process$2 = require('node:process');
|
|||
|
var require$$0$2 = require('child_process');
|
|||
|
var require$$0$3 = require('os');
|
|||
|
var require$$0$4 = require('assert');
|
|||
|
var require$$2 = require('events');
|
|||
|
var require$$0$6 = require('buffer');
|
|||
|
var require$$0$5 = require('stream');
|
|||
|
var require$$2$1 = require('util');
|
|||
|
var node_os = require('node:os');
|
|||
|
var node_buffer = require('node:buffer');
|
|||
|
require('electron');
|
|||
|
|
|||
|
/******************************************************************************
|
|||
|
Copyright (c) Microsoft Corporation.
|
|||
|
|
|||
|
Permission to use, copy, modify, and/or distribute this software for any
|
|||
|
purpose with or without fee is hereby granted.
|
|||
|
|
|||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
|||
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
|||
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
|||
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
|||
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
|||
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
|||
|
PERFORMANCE OF THIS SOFTWARE.
|
|||
|
***************************************************************************** */
|
|||
|
/* global Reflect, Promise */
|
|||
|
|
|||
|
var extendStatics = function(d, b) {
|
|||
|
extendStatics = Object.setPrototypeOf ||
|
|||
|
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
|||
|
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
|||
|
return extendStatics(d, b);
|
|||
|
};
|
|||
|
|
|||
|
function __extends(d, b) {
|
|||
|
if (typeof b !== "function" && b !== null)
|
|||
|
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
|||
|
extendStatics(d, b);
|
|||
|
function __() { this.constructor = d; }
|
|||
|
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
|||
|
}
|
|||
|
|
|||
|
function __awaiter(thisArg, _arguments, P, generator) {
|
|||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function __generator(thisArg, body) {
|
|||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
|||
|
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
|||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
|||
|
function step(op) {
|
|||
|
if (f) throw new TypeError("Generator is already executing.");
|
|||
|
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
|||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
|||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
|||
|
switch (op[0]) {
|
|||
|
case 0: case 1: t = op; break;
|
|||
|
case 4: _.label++; return { value: op[1], done: false };
|
|||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
|||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
|||
|
default:
|
|||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
|||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
|||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
|||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
|||
|
if (t[2]) _.ops.pop();
|
|||
|
_.trys.pop(); continue;
|
|||
|
}
|
|||
|
op = body.call(thisArg, _);
|
|||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
|||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
function __values(o) {
|
|||
|
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
|
|||
|
if (m) return m.call(o);
|
|||
|
if (o && typeof o.length === "number") return {
|
|||
|
next: function () {
|
|||
|
if (o && i >= o.length) o = void 0;
|
|||
|
return { value: o && o[i++], done: !o };
|
|||
|
}
|
|||
|
};
|
|||
|
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
|
|||
|
}
|
|||
|
|
|||
|
function __read(o, n) {
|
|||
|
var m = typeof Symbol === "function" && o[Symbol.iterator];
|
|||
|
if (!m) return o;
|
|||
|
var i = m.call(o), r, ar = [], e;
|
|||
|
try {
|
|||
|
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
|
|||
|
}
|
|||
|
catch (error) { e = { error: error }; }
|
|||
|
finally {
|
|||
|
try {
|
|||
|
if (r && !r.done && (m = i["return"])) m.call(i);
|
|||
|
}
|
|||
|
finally { if (e) throw e.error; }
|
|||
|
}
|
|||
|
return ar;
|
|||
|
}
|
|||
|
|
|||
|
function __spreadArray(to, from, pack) {
|
|||
|
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
|
|||
|
if (ar || !(i in from)) {
|
|||
|
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
|
|||
|
ar[i] = from[i];
|
|||
|
}
|
|||
|
}
|
|||
|
return to.concat(ar || Array.prototype.slice.call(from));
|
|||
|
}
|
|||
|
|
|||
|
function __asyncValues(o) {
|
|||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|||
|
var m = o[Symbol.asyncIterator], i;
|
|||
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|||
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|||
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|||
|
}
|
|||
|
|
|||
|
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
|||
|
|
|||
|
function getDefaultExportFromCjs (x) {
|
|||
|
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
|
|||
|
}
|
|||
|
|
|||
|
var execa$2 = {exports: {}};
|
|||
|
|
|||
|
var crossSpawn$1 = {exports: {}};
|
|||
|
|
|||
|
var windows;
|
|||
|
var hasRequiredWindows;
|
|||
|
|
|||
|
function requireWindows () {
|
|||
|
if (hasRequiredWindows) return windows;
|
|||
|
hasRequiredWindows = 1;
|
|||
|
windows = isexe;
|
|||
|
isexe.sync = sync;
|
|||
|
|
|||
|
var fs = require$$0;
|
|||
|
|
|||
|
function checkPathExt (path, options) {
|
|||
|
var pathext = options.pathExt !== undefined ?
|
|||
|
options.pathExt : process.env.PATHEXT;
|
|||
|
|
|||
|
if (!pathext) {
|
|||
|
return true
|
|||
|
}
|
|||
|
|
|||
|
pathext = pathext.split(';');
|
|||
|
if (pathext.indexOf('') !== -1) {
|
|||
|
return true
|
|||
|
}
|
|||
|
for (var i = 0; i < pathext.length; i++) {
|
|||
|
var p = pathext[i].toLowerCase();
|
|||
|
if (p && path.substr(-p.length).toLowerCase() === p) {
|
|||
|
return true
|
|||
|
}
|
|||
|
}
|
|||
|
return false
|
|||
|
}
|
|||
|
|
|||
|
function checkStat (stat, path, options) {
|
|||
|
if (!stat.isSymbolicLink() && !stat.isFile()) {
|
|||
|
return false
|
|||
|
}
|
|||
|
return checkPathExt(path, options)
|
|||
|
}
|
|||
|
|
|||
|
function isexe (path, options, cb) {
|
|||
|
fs.stat(path, function (er, stat) {
|
|||
|
cb(er, er ? false : checkStat(stat, path, options));
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function sync (path, options) {
|
|||
|
return checkStat(fs.statSync(path), path, options)
|
|||
|
}
|
|||
|
return windows;
|
|||
|
}
|
|||
|
|
|||
|
var mode;
|
|||
|
var hasRequiredMode;
|
|||
|
|
|||
|
function requireMode () {
|
|||
|
if (hasRequiredMode) return mode;
|
|||
|
hasRequiredMode = 1;
|
|||
|
mode = isexe;
|
|||
|
isexe.sync = sync;
|
|||
|
|
|||
|
var fs = require$$0;
|
|||
|
|
|||
|
function isexe (path, options, cb) {
|
|||
|
fs.stat(path, function (er, stat) {
|
|||
|
cb(er, er ? false : checkStat(stat, options));
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function sync (path, options) {
|
|||
|
return checkStat(fs.statSync(path), options)
|
|||
|
}
|
|||
|
|
|||
|
function checkStat (stat, options) {
|
|||
|
return stat.isFile() && checkMode(stat, options)
|
|||
|
}
|
|||
|
|
|||
|
function checkMode (stat, options) {
|
|||
|
var mod = stat.mode;
|
|||
|
var uid = stat.uid;
|
|||
|
var gid = stat.gid;
|
|||
|
|
|||
|
var myUid = options.uid !== undefined ?
|
|||
|
options.uid : process.getuid && process.getuid();
|
|||
|
var myGid = options.gid !== undefined ?
|
|||
|
options.gid : process.getgid && process.getgid();
|
|||
|
|
|||
|
var u = parseInt('100', 8);
|
|||
|
var g = parseInt('010', 8);
|
|||
|
var o = parseInt('001', 8);
|
|||
|
var ug = u | g;
|
|||
|
|
|||
|
var ret = (mod & o) ||
|
|||
|
(mod & g) && gid === myGid ||
|
|||
|
(mod & u) && uid === myUid ||
|
|||
|
(mod & ug) && myUid === 0;
|
|||
|
|
|||
|
return ret
|
|||
|
}
|
|||
|
return mode;
|
|||
|
}
|
|||
|
|
|||
|
var core$1;
|
|||
|
if (process.platform === 'win32' || commonjsGlobal.TESTING_WINDOWS) {
|
|||
|
core$1 = requireWindows();
|
|||
|
} else {
|
|||
|
core$1 = requireMode();
|
|||
|
}
|
|||
|
|
|||
|
var isexe_1 = isexe$1;
|
|||
|
isexe$1.sync = sync;
|
|||
|
|
|||
|
function isexe$1 (path, options, cb) {
|
|||
|
if (typeof options === 'function') {
|
|||
|
cb = options;
|
|||
|
options = {};
|
|||
|
}
|
|||
|
|
|||
|
if (!cb) {
|
|||
|
if (typeof Promise !== 'function') {
|
|||
|
throw new TypeError('callback not provided')
|
|||
|
}
|
|||
|
|
|||
|
return new Promise(function (resolve, reject) {
|
|||
|
isexe$1(path, options || {}, function (er, is) {
|
|||
|
if (er) {
|
|||
|
reject(er);
|
|||
|
} else {
|
|||
|
resolve(is);
|
|||
|
}
|
|||
|
});
|
|||
|
})
|
|||
|
}
|
|||
|
|
|||
|
core$1(path, options || {}, function (er, is) {
|
|||
|
// ignore EACCES because that just means we aren't allowed to run it
|
|||
|
if (er) {
|
|||
|
if (er.code === 'EACCES' || options && options.ignoreErrors) {
|
|||
|
er = null;
|
|||
|
is = false;
|
|||
|
}
|
|||
|
}
|
|||
|
cb(er, is);
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function sync (path, options) {
|
|||
|
// my kingdom for a filtered catch
|
|||
|
try {
|
|||
|
return core$1.sync(path, options || {})
|
|||
|
} catch (er) {
|
|||
|
if (options && options.ignoreErrors || er.code === 'EACCES') {
|
|||
|
return false
|
|||
|
} else {
|
|||
|
throw er
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
const isWindows = process.platform === 'win32' ||
|
|||
|
process.env.OSTYPE === 'cygwin' ||
|
|||
|
process.env.OSTYPE === 'msys';
|
|||
|
|
|||
|
const path$3 = require$$0$1;
|
|||
|
const COLON = isWindows ? ';' : ':';
|
|||
|
const isexe = isexe_1;
|
|||
|
|
|||
|
const getNotFoundError = (cmd) =>
|
|||
|
Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' });
|
|||
|
|
|||
|
const getPathInfo = (cmd, opt) => {
|
|||
|
const colon = opt.colon || COLON;
|
|||
|
|
|||
|
// If it has a slash, then we don't bother searching the pathenv.
|
|||
|
// just check the file itself, and that's it.
|
|||
|
const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? ['']
|
|||
|
: (
|
|||
|
[
|
|||
|
// windows always checks the cwd first
|
|||
|
...(isWindows ? [process.cwd()] : []),
|
|||
|
...(opt.path || process.env.PATH ||
|
|||
|
/* istanbul ignore next: very unusual */ '').split(colon),
|
|||
|
]
|
|||
|
);
|
|||
|
const pathExtExe = isWindows
|
|||
|
? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM'
|
|||
|
: '';
|
|||
|
const pathExt = isWindows ? pathExtExe.split(colon) : [''];
|
|||
|
|
|||
|
if (isWindows) {
|
|||
|
if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
|
|||
|
pathExt.unshift('');
|
|||
|
}
|
|||
|
|
|||
|
return {
|
|||
|
pathEnv,
|
|||
|
pathExt,
|
|||
|
pathExtExe,
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
const which$1 = (cmd, opt, cb) => {
|
|||
|
if (typeof opt === 'function') {
|
|||
|
cb = opt;
|
|||
|
opt = {};
|
|||
|
}
|
|||
|
if (!opt)
|
|||
|
opt = {};
|
|||
|
|
|||
|
const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt);
|
|||
|
const found = [];
|
|||
|
|
|||
|
const step = i => new Promise((resolve, reject) => {
|
|||
|
if (i === pathEnv.length)
|
|||
|
return opt.all && found.length ? resolve(found)
|
|||
|
: reject(getNotFoundError(cmd))
|
|||
|
|
|||
|
const ppRaw = pathEnv[i];
|
|||
|
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
|
|||
|
|
|||
|
const pCmd = path$3.join(pathPart, cmd);
|
|||
|
const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd
|
|||
|
: pCmd;
|
|||
|
|
|||
|
resolve(subStep(p, i, 0));
|
|||
|
});
|
|||
|
|
|||
|
const subStep = (p, i, ii) => new Promise((resolve, reject) => {
|
|||
|
if (ii === pathExt.length)
|
|||
|
return resolve(step(i + 1))
|
|||
|
const ext = pathExt[ii];
|
|||
|
isexe(p + ext, { pathExt: pathExtExe }, (er, is) => {
|
|||
|
if (!er && is) {
|
|||
|
if (opt.all)
|
|||
|
found.push(p + ext);
|
|||
|
else
|
|||
|
return resolve(p + ext)
|
|||
|
}
|
|||
|
return resolve(subStep(p, i, ii + 1))
|
|||
|
});
|
|||
|
});
|
|||
|
|
|||
|
return cb ? step(0).then(res => cb(null, res), cb) : step(0)
|
|||
|
};
|
|||
|
|
|||
|
const whichSync = (cmd, opt) => {
|
|||
|
opt = opt || {};
|
|||
|
|
|||
|
const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt);
|
|||
|
const found = [];
|
|||
|
|
|||
|
for (let i = 0; i < pathEnv.length; i ++) {
|
|||
|
const ppRaw = pathEnv[i];
|
|||
|
const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw;
|
|||
|
|
|||
|
const pCmd = path$3.join(pathPart, cmd);
|
|||
|
const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd
|
|||
|
: pCmd;
|
|||
|
|
|||
|
for (let j = 0; j < pathExt.length; j ++) {
|
|||
|
const cur = p + pathExt[j];
|
|||
|
try {
|
|||
|
const is = isexe.sync(cur, { pathExt: pathExtExe });
|
|||
|
if (is) {
|
|||
|
if (opt.all)
|
|||
|
found.push(cur);
|
|||
|
else
|
|||
|
return cur
|
|||
|
}
|
|||
|
} catch (ex) {}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (opt.all && found.length)
|
|||
|
return found
|
|||
|
|
|||
|
if (opt.nothrow)
|
|||
|
return null
|
|||
|
|
|||
|
throw getNotFoundError(cmd)
|
|||
|
};
|
|||
|
|
|||
|
var which_1 = which$1;
|
|||
|
which$1.sync = whichSync;
|
|||
|
|
|||
|
var pathKey$1 = {exports: {}};
|
|||
|
|
|||
|
const pathKey = (options = {}) => {
|
|||
|
const environment = options.env || process.env;
|
|||
|
const platform = options.platform || process.platform;
|
|||
|
|
|||
|
if (platform !== 'win32') {
|
|||
|
return 'PATH';
|
|||
|
}
|
|||
|
|
|||
|
return Object.keys(environment).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path';
|
|||
|
};
|
|||
|
|
|||
|
pathKey$1.exports = pathKey;
|
|||
|
// TODO: Remove this for the next major release
|
|||
|
pathKey$1.exports.default = pathKey;
|
|||
|
|
|||
|
var pathKeyExports = pathKey$1.exports;
|
|||
|
|
|||
|
const path$2 = require$$0$1;
|
|||
|
const which = which_1;
|
|||
|
const getPathKey = pathKeyExports;
|
|||
|
|
|||
|
function resolveCommandAttempt(parsed, withoutPathExt) {
|
|||
|
const env = parsed.options.env || process.env;
|
|||
|
const cwd = process.cwd();
|
|||
|
const hasCustomCwd = parsed.options.cwd != null;
|
|||
|
// Worker threads do not have process.chdir()
|
|||
|
const shouldSwitchCwd = hasCustomCwd && process.chdir !== undefined && !process.chdir.disabled;
|
|||
|
|
|||
|
// If a custom `cwd` was specified, we need to change the process cwd
|
|||
|
// because `which` will do stat calls but does not support a custom cwd
|
|||
|
if (shouldSwitchCwd) {
|
|||
|
try {
|
|||
|
process.chdir(parsed.options.cwd);
|
|||
|
} catch (err) {
|
|||
|
/* Empty */
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
let resolved;
|
|||
|
|
|||
|
try {
|
|||
|
resolved = which.sync(parsed.command, {
|
|||
|
path: env[getPathKey({ env })],
|
|||
|
pathExt: withoutPathExt ? path$2.delimiter : undefined,
|
|||
|
});
|
|||
|
} catch (e) {
|
|||
|
/* Empty */
|
|||
|
} finally {
|
|||
|
if (shouldSwitchCwd) {
|
|||
|
process.chdir(cwd);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// If we successfully resolved, ensure that an absolute path is returned
|
|||
|
// Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it
|
|||
|
if (resolved) {
|
|||
|
resolved = path$2.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved);
|
|||
|
}
|
|||
|
|
|||
|
return resolved;
|
|||
|
}
|
|||
|
|
|||
|
function resolveCommand$1(parsed) {
|
|||
|
return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true);
|
|||
|
}
|
|||
|
|
|||
|
var resolveCommand_1 = resolveCommand$1;
|
|||
|
|
|||
|
var _escape = {};
|
|||
|
|
|||
|
// See http://www.robvanderwoude.com/escapechars.php
|
|||
|
const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g;
|
|||
|
|
|||
|
function escapeCommand(arg) {
|
|||
|
// Escape meta chars
|
|||
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
|||
|
|
|||
|
return arg;
|
|||
|
}
|
|||
|
|
|||
|
function escapeArgument(arg, doubleEscapeMetaChars) {
|
|||
|
// Convert to string
|
|||
|
arg = `${arg}`;
|
|||
|
|
|||
|
// Algorithm below is based on https://qntm.org/cmd
|
|||
|
|
|||
|
// Sequence of backslashes followed by a double quote:
|
|||
|
// double up all the backslashes and escape the double quote
|
|||
|
arg = arg.replace(/(\\*)"/g, '$1$1\\"');
|
|||
|
|
|||
|
// Sequence of backslashes followed by the end of the string
|
|||
|
// (which will become a double quote later):
|
|||
|
// double up all the backslashes
|
|||
|
arg = arg.replace(/(\\*)$/, '$1$1');
|
|||
|
|
|||
|
// All other backslashes occur literally
|
|||
|
|
|||
|
// Quote the whole thing:
|
|||
|
arg = `"${arg}"`;
|
|||
|
|
|||
|
// Escape meta chars
|
|||
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
|||
|
|
|||
|
// Double escape meta chars if necessary
|
|||
|
if (doubleEscapeMetaChars) {
|
|||
|
arg = arg.replace(metaCharsRegExp, '^$1');
|
|||
|
}
|
|||
|
|
|||
|
return arg;
|
|||
|
}
|
|||
|
|
|||
|
_escape.command = escapeCommand;
|
|||
|
_escape.argument = escapeArgument;
|
|||
|
|
|||
|
var shebangRegex$1 = /^#!(.*)/;
|
|||
|
|
|||
|
const shebangRegex = shebangRegex$1;
|
|||
|
|
|||
|
var shebangCommand$1 = (string = '') => {
|
|||
|
const match = string.match(shebangRegex);
|
|||
|
|
|||
|
if (!match) {
|
|||
|
return null;
|
|||
|
}
|
|||
|
|
|||
|
const [path, argument] = match[0].replace(/#! ?/, '').split(' ');
|
|||
|
const binary = path.split('/').pop();
|
|||
|
|
|||
|
if (binary === 'env') {
|
|||
|
return argument;
|
|||
|
}
|
|||
|
|
|||
|
return argument ? `${binary} ${argument}` : binary;
|
|||
|
};
|
|||
|
|
|||
|
const fs = require$$0;
|
|||
|
const shebangCommand = shebangCommand$1;
|
|||
|
|
|||
|
function readShebang$1(command) {
|
|||
|
// Read the first 150 bytes from the file
|
|||
|
const size = 150;
|
|||
|
const buffer = Buffer.alloc(size);
|
|||
|
|
|||
|
let fd;
|
|||
|
|
|||
|
try {
|
|||
|
fd = fs.openSync(command, 'r');
|
|||
|
fs.readSync(fd, buffer, 0, size, 0);
|
|||
|
fs.closeSync(fd);
|
|||
|
} catch (e) { /* Empty */ }
|
|||
|
|
|||
|
// Attempt to extract shebang (null is returned if not a shebang)
|
|||
|
return shebangCommand(buffer.toString());
|
|||
|
}
|
|||
|
|
|||
|
var readShebang_1 = readShebang$1;
|
|||
|
|
|||
|
const path$1 = require$$0$1;
|
|||
|
const resolveCommand = resolveCommand_1;
|
|||
|
const escape = _escape;
|
|||
|
const readShebang = readShebang_1;
|
|||
|
|
|||
|
const isWin$2 = process.platform === 'win32';
|
|||
|
const isExecutableRegExp = /\.(?:com|exe)$/i;
|
|||
|
const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i;
|
|||
|
|
|||
|
function detectShebang(parsed) {
|
|||
|
parsed.file = resolveCommand(parsed);
|
|||
|
|
|||
|
const shebang = parsed.file && readShebang(parsed.file);
|
|||
|
|
|||
|
if (shebang) {
|
|||
|
parsed.args.unshift(parsed.file);
|
|||
|
parsed.command = shebang;
|
|||
|
|
|||
|
return resolveCommand(parsed);
|
|||
|
}
|
|||
|
|
|||
|
return parsed.file;
|
|||
|
}
|
|||
|
|
|||
|
function parseNonShell(parsed) {
|
|||
|
if (!isWin$2) {
|
|||
|
return parsed;
|
|||
|
}
|
|||
|
|
|||
|
// Detect & add support for shebangs
|
|||
|
const commandFile = detectShebang(parsed);
|
|||
|
|
|||
|
// We don't need a shell if the command filename is an executable
|
|||
|
const needsShell = !isExecutableRegExp.test(commandFile);
|
|||
|
|
|||
|
// If a shell is required, use cmd.exe and take care of escaping everything correctly
|
|||
|
// Note that `forceShell` is an hidden option used only in tests
|
|||
|
if (parsed.options.forceShell || needsShell) {
|
|||
|
// Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/`
|
|||
|
// The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument
|
|||
|
// Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called,
|
|||
|
// we need to double escape them
|
|||
|
const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile);
|
|||
|
|
|||
|
// Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar)
|
|||
|
// This is necessary otherwise it will always fail with ENOENT in those cases
|
|||
|
parsed.command = path$1.normalize(parsed.command);
|
|||
|
|
|||
|
// Escape command & arguments
|
|||
|
parsed.command = escape.command(parsed.command);
|
|||
|
parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars));
|
|||
|
|
|||
|
const shellCommand = [parsed.command].concat(parsed.args).join(' ');
|
|||
|
|
|||
|
parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`];
|
|||
|
parsed.command = process.env.comspec || 'cmd.exe';
|
|||
|
parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped
|
|||
|
}
|
|||
|
|
|||
|
return parsed;
|
|||
|
}
|
|||
|
|
|||
|
function parse$1(command, args, options) {
|
|||
|
// Normalize arguments, similar to nodejs
|
|||
|
if (args && !Array.isArray(args)) {
|
|||
|
options = args;
|
|||
|
args = null;
|
|||
|
}
|
|||
|
|
|||
|
args = args ? args.slice(0) : []; // Clone array to avoid changing the original
|
|||
|
options = Object.assign({}, options); // Clone object to avoid changing the original
|
|||
|
|
|||
|
// Build our parsed object
|
|||
|
const parsed = {
|
|||
|
command,
|
|||
|
args,
|
|||
|
options,
|
|||
|
file: undefined,
|
|||
|
original: {
|
|||
|
command,
|
|||
|
args,
|
|||
|
},
|
|||
|
};
|
|||
|
|
|||
|
// Delegate further parsing to shell or non-shell
|
|||
|
return options.shell ? parsed : parseNonShell(parsed);
|
|||
|
}
|
|||
|
|
|||
|
var parse_1 = parse$1;
|
|||
|
|
|||
|
const isWin$1 = process.platform === 'win32';
|
|||
|
|
|||
|
function notFoundError(original, syscall) {
|
|||
|
return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), {
|
|||
|
code: 'ENOENT',
|
|||
|
errno: 'ENOENT',
|
|||
|
syscall: `${syscall} ${original.command}`,
|
|||
|
path: original.command,
|
|||
|
spawnargs: original.args,
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function hookChildProcess(cp, parsed) {
|
|||
|
if (!isWin$1) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
const originalEmit = cp.emit;
|
|||
|
|
|||
|
cp.emit = function (name, arg1) {
|
|||
|
// If emitting "exit" event and exit code is 1, we need to check if
|
|||
|
// the command exists and emit an "error" instead
|
|||
|
// See https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
|||
|
if (name === 'exit') {
|
|||
|
const err = verifyENOENT(arg1, parsed);
|
|||
|
|
|||
|
if (err) {
|
|||
|
return originalEmit.call(cp, 'error', err);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
function verifyENOENT(status, parsed) {
|
|||
|
if (isWin$1 && status === 1 && !parsed.file) {
|
|||
|
return notFoundError(parsed.original, 'spawn');
|
|||
|
}
|
|||
|
|
|||
|
return null;
|
|||
|
}
|
|||
|
|
|||
|
function verifyENOENTSync(status, parsed) {
|
|||
|
if (isWin$1 && status === 1 && !parsed.file) {
|
|||
|
return notFoundError(parsed.original, 'spawnSync');
|
|||
|
}
|
|||
|
|
|||
|
return null;
|
|||
|
}
|
|||
|
|
|||
|
var enoent$1 = {
|
|||
|
hookChildProcess,
|
|||
|
verifyENOENT,
|
|||
|
verifyENOENTSync,
|
|||
|
notFoundError,
|
|||
|
};
|
|||
|
|
|||
|
const cp = require$$0$2;
|
|||
|
const parse = parse_1;
|
|||
|
const enoent = enoent$1;
|
|||
|
|
|||
|
function spawn(command, args, options) {
|
|||
|
// Parse the arguments
|
|||
|
const parsed = parse(command, args, options);
|
|||
|
|
|||
|
// Spawn the child process
|
|||
|
const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
|
|||
|
|
|||
|
// Hook into child process "exit" event to emit an error if the command
|
|||
|
// does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
|||
|
enoent.hookChildProcess(spawned, parsed);
|
|||
|
|
|||
|
return spawned;
|
|||
|
}
|
|||
|
|
|||
|
function spawnSync(command, args, options) {
|
|||
|
// Parse the arguments
|
|||
|
const parsed = parse(command, args, options);
|
|||
|
|
|||
|
// Spawn the child process
|
|||
|
const result = cp.spawnSync(parsed.command, parsed.args, parsed.options);
|
|||
|
|
|||
|
// Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16
|
|||
|
result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
|
|||
|
|
|||
|
return result;
|
|||
|
}
|
|||
|
|
|||
|
crossSpawn$1.exports = spawn;
|
|||
|
crossSpawn$1.exports.spawn = spawn;
|
|||
|
crossSpawn$1.exports.sync = spawnSync;
|
|||
|
|
|||
|
crossSpawn$1.exports._parse = parse;
|
|||
|
crossSpawn$1.exports._enoent = enoent;
|
|||
|
|
|||
|
var crossSpawnExports = crossSpawn$1.exports;
|
|||
|
|
|||
|
var stripFinalNewline$1 = input => {
|
|||
|
const LF = typeof input === 'string' ? '\n' : '\n'.charCodeAt();
|
|||
|
const CR = typeof input === 'string' ? '\r' : '\r'.charCodeAt();
|
|||
|
|
|||
|
if (input[input.length - 1] === LF) {
|
|||
|
input = input.slice(0, input.length - 1);
|
|||
|
}
|
|||
|
|
|||
|
if (input[input.length - 1] === CR) {
|
|||
|
input = input.slice(0, input.length - 1);
|
|||
|
}
|
|||
|
|
|||
|
return input;
|
|||
|
};
|
|||
|
|
|||
|
var npmRunPath$1 = {exports: {}};
|
|||
|
|
|||
|
npmRunPath$1.exports;
|
|||
|
|
|||
|
(function (module) {
|
|||
|
const path = require$$0$1;
|
|||
|
const pathKey = pathKeyExports;
|
|||
|
|
|||
|
const npmRunPath = options => {
|
|||
|
options = {
|
|||
|
cwd: process.cwd(),
|
|||
|
path: process.env[pathKey()],
|
|||
|
execPath: process.execPath,
|
|||
|
...options
|
|||
|
};
|
|||
|
|
|||
|
let previous;
|
|||
|
let cwdPath = path.resolve(options.cwd);
|
|||
|
const result = [];
|
|||
|
|
|||
|
while (previous !== cwdPath) {
|
|||
|
result.push(path.join(cwdPath, 'node_modules/.bin'));
|
|||
|
previous = cwdPath;
|
|||
|
cwdPath = path.resolve(cwdPath, '..');
|
|||
|
}
|
|||
|
|
|||
|
// Ensure the running `node` binary is used
|
|||
|
const execPathDir = path.resolve(options.cwd, options.execPath, '..');
|
|||
|
result.push(execPathDir);
|
|||
|
|
|||
|
return result.concat(options.path).join(path.delimiter);
|
|||
|
};
|
|||
|
|
|||
|
module.exports = npmRunPath;
|
|||
|
// TODO: Remove this for the next major release
|
|||
|
module.exports.default = npmRunPath;
|
|||
|
|
|||
|
module.exports.env = options => {
|
|||
|
options = {
|
|||
|
env: process.env,
|
|||
|
...options
|
|||
|
};
|
|||
|
|
|||
|
const env = {...options.env};
|
|||
|
const path = pathKey({env});
|
|||
|
|
|||
|
options.path = env[path];
|
|||
|
env[path] = module.exports(options);
|
|||
|
|
|||
|
return env;
|
|||
|
};
|
|||
|
} (npmRunPath$1));
|
|||
|
|
|||
|
var npmRunPathExports = npmRunPath$1.exports;
|
|||
|
|
|||
|
var onetime$2 = {exports: {}};
|
|||
|
|
|||
|
var mimicFn$2 = {exports: {}};
|
|||
|
|
|||
|
const mimicFn$1 = (to, from) => {
|
|||
|
for (const prop of Reflect.ownKeys(from)) {
|
|||
|
Object.defineProperty(to, prop, Object.getOwnPropertyDescriptor(from, prop));
|
|||
|
}
|
|||
|
|
|||
|
return to;
|
|||
|
};
|
|||
|
|
|||
|
mimicFn$2.exports = mimicFn$1;
|
|||
|
// TODO: Remove this for the next major release
|
|||
|
mimicFn$2.exports.default = mimicFn$1;
|
|||
|
|
|||
|
var mimicFnExports = mimicFn$2.exports;
|
|||
|
|
|||
|
const mimicFn = mimicFnExports;
|
|||
|
|
|||
|
const calledFunctions = new WeakMap();
|
|||
|
|
|||
|
const onetime$1 = (function_, options = {}) => {
|
|||
|
if (typeof function_ !== 'function') {
|
|||
|
throw new TypeError('Expected a function');
|
|||
|
}
|
|||
|
|
|||
|
let returnValue;
|
|||
|
let callCount = 0;
|
|||
|
const functionName = function_.displayName || function_.name || '<anonymous>';
|
|||
|
|
|||
|
const onetime = function (...arguments_) {
|
|||
|
calledFunctions.set(onetime, ++callCount);
|
|||
|
|
|||
|
if (callCount === 1) {
|
|||
|
returnValue = function_.apply(this, arguments_);
|
|||
|
function_ = null;
|
|||
|
} else if (options.throw === true) {
|
|||
|
throw new Error(`Function \`${functionName}\` can only be called once`);
|
|||
|
}
|
|||
|
|
|||
|
return returnValue;
|
|||
|
};
|
|||
|
|
|||
|
mimicFn(onetime, function_);
|
|||
|
calledFunctions.set(onetime, callCount);
|
|||
|
|
|||
|
return onetime;
|
|||
|
};
|
|||
|
|
|||
|
onetime$2.exports = onetime$1;
|
|||
|
// TODO: Remove this for the next major release
|
|||
|
onetime$2.exports.default = onetime$1;
|
|||
|
|
|||
|
onetime$2.exports.callCount = function_ => {
|
|||
|
if (!calledFunctions.has(function_)) {
|
|||
|
throw new Error(`The given function \`${function_.name}\` is not wrapped by the \`onetime\` package`);
|
|||
|
}
|
|||
|
|
|||
|
return calledFunctions.get(function_);
|
|||
|
};
|
|||
|
|
|||
|
var onetimeExports = onetime$2.exports;
|
|||
|
|
|||
|
var main = {};
|
|||
|
|
|||
|
var signals$2 = {};
|
|||
|
|
|||
|
var core = {};
|
|||
|
|
|||
|
Object.defineProperty(core,"__esModule",{value:true});core.SIGNALS=void 0;
|
|||
|
|
|||
|
const SIGNALS=[
|
|||
|
{
|
|||
|
name:"SIGHUP",
|
|||
|
number:1,
|
|||
|
action:"terminate",
|
|||
|
description:"Terminal closed",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGINT",
|
|||
|
number:2,
|
|||
|
action:"terminate",
|
|||
|
description:"User interruption with CTRL-C",
|
|||
|
standard:"ansi"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGQUIT",
|
|||
|
number:3,
|
|||
|
action:"core",
|
|||
|
description:"User interruption with CTRL-\\",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGILL",
|
|||
|
number:4,
|
|||
|
action:"core",
|
|||
|
description:"Invalid machine instruction",
|
|||
|
standard:"ansi"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGTRAP",
|
|||
|
number:5,
|
|||
|
action:"core",
|
|||
|
description:"Debugger breakpoint",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGABRT",
|
|||
|
number:6,
|
|||
|
action:"core",
|
|||
|
description:"Aborted",
|
|||
|
standard:"ansi"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGIOT",
|
|||
|
number:6,
|
|||
|
action:"core",
|
|||
|
description:"Aborted",
|
|||
|
standard:"bsd"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGBUS",
|
|||
|
number:7,
|
|||
|
action:"core",
|
|||
|
description:
|
|||
|
"Bus error due to misaligned, non-existing address or paging error",
|
|||
|
standard:"bsd"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGEMT",
|
|||
|
number:7,
|
|||
|
action:"terminate",
|
|||
|
description:"Command should be emulated but is not implemented",
|
|||
|
standard:"other"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGFPE",
|
|||
|
number:8,
|
|||
|
action:"core",
|
|||
|
description:"Floating point arithmetic error",
|
|||
|
standard:"ansi"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGKILL",
|
|||
|
number:9,
|
|||
|
action:"terminate",
|
|||
|
description:"Forced termination",
|
|||
|
standard:"posix",
|
|||
|
forced:true},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGUSR1",
|
|||
|
number:10,
|
|||
|
action:"terminate",
|
|||
|
description:"Application-specific signal",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGSEGV",
|
|||
|
number:11,
|
|||
|
action:"core",
|
|||
|
description:"Segmentation fault",
|
|||
|
standard:"ansi"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGUSR2",
|
|||
|
number:12,
|
|||
|
action:"terminate",
|
|||
|
description:"Application-specific signal",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGPIPE",
|
|||
|
number:13,
|
|||
|
action:"terminate",
|
|||
|
description:"Broken pipe or socket",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGALRM",
|
|||
|
number:14,
|
|||
|
action:"terminate",
|
|||
|
description:"Timeout or timer",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGTERM",
|
|||
|
number:15,
|
|||
|
action:"terminate",
|
|||
|
description:"Termination",
|
|||
|
standard:"ansi"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGSTKFLT",
|
|||
|
number:16,
|
|||
|
action:"terminate",
|
|||
|
description:"Stack is empty or overflowed",
|
|||
|
standard:"other"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGCHLD",
|
|||
|
number:17,
|
|||
|
action:"ignore",
|
|||
|
description:"Child process terminated, paused or unpaused",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGCLD",
|
|||
|
number:17,
|
|||
|
action:"ignore",
|
|||
|
description:"Child process terminated, paused or unpaused",
|
|||
|
standard:"other"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGCONT",
|
|||
|
number:18,
|
|||
|
action:"unpause",
|
|||
|
description:"Unpaused",
|
|||
|
standard:"posix",
|
|||
|
forced:true},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGSTOP",
|
|||
|
number:19,
|
|||
|
action:"pause",
|
|||
|
description:"Paused",
|
|||
|
standard:"posix",
|
|||
|
forced:true},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGTSTP",
|
|||
|
number:20,
|
|||
|
action:"pause",
|
|||
|
description:"Paused using CTRL-Z or \"suspend\"",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGTTIN",
|
|||
|
number:21,
|
|||
|
action:"pause",
|
|||
|
description:"Background process cannot read terminal input",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGBREAK",
|
|||
|
number:21,
|
|||
|
action:"terminate",
|
|||
|
description:"User interruption with CTRL-BREAK",
|
|||
|
standard:"other"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGTTOU",
|
|||
|
number:22,
|
|||
|
action:"pause",
|
|||
|
description:"Background process cannot write to terminal output",
|
|||
|
standard:"posix"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGURG",
|
|||
|
number:23,
|
|||
|
action:"ignore",
|
|||
|
description:"Socket received out-of-band data",
|
|||
|
standard:"bsd"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGXCPU",
|
|||
|
number:24,
|
|||
|
action:"core",
|
|||
|
description:"Process timed out",
|
|||
|
standard:"bsd"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGXFSZ",
|
|||
|
number:25,
|
|||
|
action:"core",
|
|||
|
description:"File too big",
|
|||
|
standard:"bsd"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGVTALRM",
|
|||
|
number:26,
|
|||
|
action:"terminate",
|
|||
|
description:"Timeout or timer",
|
|||
|
standard:"bsd"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGPROF",
|
|||
|
number:27,
|
|||
|
action:"terminate",
|
|||
|
description:"Timeout or timer",
|
|||
|
standard:"bsd"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGWINCH",
|
|||
|
number:28,
|
|||
|
action:"ignore",
|
|||
|
description:"Terminal window size changed",
|
|||
|
standard:"bsd"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGIO",
|
|||
|
number:29,
|
|||
|
action:"terminate",
|
|||
|
description:"I/O is available",
|
|||
|
standard:"other"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGPOLL",
|
|||
|
number:29,
|
|||
|
action:"terminate",
|
|||
|
description:"Watched event",
|
|||
|
standard:"other"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGINFO",
|
|||
|
number:29,
|
|||
|
action:"ignore",
|
|||
|
description:"Request for process information",
|
|||
|
standard:"other"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGPWR",
|
|||
|
number:30,
|
|||
|
action:"terminate",
|
|||
|
description:"Device running out of power",
|
|||
|
standard:"systemv"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGSYS",
|
|||
|
number:31,
|
|||
|
action:"core",
|
|||
|
description:"Invalid system call",
|
|||
|
standard:"other"},
|
|||
|
|
|||
|
{
|
|||
|
name:"SIGUNUSED",
|
|||
|
number:31,
|
|||
|
action:"terminate",
|
|||
|
description:"Invalid system call",
|
|||
|
standard:"other"}];core.SIGNALS=SIGNALS;
|
|||
|
|
|||
|
var realtime = {};
|
|||
|
|
|||
|
Object.defineProperty(realtime,"__esModule",{value:true});realtime.SIGRTMAX=realtime.getRealtimeSignals=void 0;
|
|||
|
const getRealtimeSignals=function(){
|
|||
|
const length=SIGRTMAX-SIGRTMIN+1;
|
|||
|
return Array.from({length},getRealtimeSignal);
|
|||
|
};realtime.getRealtimeSignals=getRealtimeSignals;
|
|||
|
|
|||
|
const getRealtimeSignal=function(value,index){
|
|||
|
return {
|
|||
|
name:`SIGRT${index+1}`,
|
|||
|
number:SIGRTMIN+index,
|
|||
|
action:"terminate",
|
|||
|
description:"Application-specific signal (realtime)",
|
|||
|
standard:"posix"};
|
|||
|
|
|||
|
};
|
|||
|
|
|||
|
const SIGRTMIN=34;
|
|||
|
const SIGRTMAX=64;realtime.SIGRTMAX=SIGRTMAX;
|
|||
|
|
|||
|
Object.defineProperty(signals$2,"__esModule",{value:true});signals$2.getSignals=void 0;var _os$1=require$$0$3;
|
|||
|
|
|||
|
var _core=core;
|
|||
|
var _realtime$1=realtime;
|
|||
|
|
|||
|
|
|||
|
|
|||
|
const getSignals=function(){
|
|||
|
const realtimeSignals=(0, _realtime$1.getRealtimeSignals)();
|
|||
|
const signals=[..._core.SIGNALS,...realtimeSignals].map(normalizeSignal);
|
|||
|
return signals;
|
|||
|
};signals$2.getSignals=getSignals;
|
|||
|
|
|||
|
|
|||
|
|
|||
|
|
|||
|
|
|||
|
|
|||
|
|
|||
|
const normalizeSignal=function({
|
|||
|
name,
|
|||
|
number:defaultNumber,
|
|||
|
description,
|
|||
|
action,
|
|||
|
forced=false,
|
|||
|
standard})
|
|||
|
{
|
|||
|
const{
|
|||
|
signals:{[name]:constantSignal}}=
|
|||
|
_os$1.constants;
|
|||
|
const supported=constantSignal!==undefined;
|
|||
|
const number=supported?constantSignal:defaultNumber;
|
|||
|
return {name,number,description,supported,action,forced,standard};
|
|||
|
};
|
|||
|
|
|||
|
Object.defineProperty(main,"__esModule",{value:true});main.signalsByNumber=main.signalsByName=void 0;var _os=require$$0$3;
|
|||
|
|
|||
|
var _signals=signals$2;
|
|||
|
var _realtime=realtime;
|
|||
|
|
|||
|
|
|||
|
|
|||
|
const getSignalsByName=function(){
|
|||
|
const signals=(0, _signals.getSignals)();
|
|||
|
return signals.reduce(getSignalByName,{});
|
|||
|
};
|
|||
|
|
|||
|
const getSignalByName=function(
|
|||
|
signalByNameMemo,
|
|||
|
{name,number,description,supported,action,forced,standard})
|
|||
|
{
|
|||
|
return {
|
|||
|
...signalByNameMemo,
|
|||
|
[name]:{name,number,description,supported,action,forced,standard}};
|
|||
|
|
|||
|
};
|
|||
|
|
|||
|
const signalsByName$1=getSignalsByName();main.signalsByName=signalsByName$1;
|
|||
|
|
|||
|
|
|||
|
|
|||
|
|
|||
|
const getSignalsByNumber=function(){
|
|||
|
const signals=(0, _signals.getSignals)();
|
|||
|
const length=_realtime.SIGRTMAX+1;
|
|||
|
const signalsA=Array.from({length},(value,number)=>
|
|||
|
getSignalByNumber(number,signals));
|
|||
|
|
|||
|
return Object.assign({},...signalsA);
|
|||
|
};
|
|||
|
|
|||
|
const getSignalByNumber=function(number,signals){
|
|||
|
const signal=findSignalByNumber(number,signals);
|
|||
|
|
|||
|
if(signal===undefined){
|
|||
|
return {};
|
|||
|
}
|
|||
|
|
|||
|
const{name,description,supported,action,forced,standard}=signal;
|
|||
|
return {
|
|||
|
[number]:{
|
|||
|
name,
|
|||
|
number,
|
|||
|
description,
|
|||
|
supported,
|
|||
|
action,
|
|||
|
forced,
|
|||
|
standard}};
|
|||
|
|
|||
|
|
|||
|
};
|
|||
|
|
|||
|
|
|||
|
|
|||
|
const findSignalByNumber=function(number,signals){
|
|||
|
const signal=signals.find(({name})=>_os.constants.signals[name]===number);
|
|||
|
|
|||
|
if(signal!==undefined){
|
|||
|
return signal;
|
|||
|
}
|
|||
|
|
|||
|
return signals.find(signalA=>signalA.number===number);
|
|||
|
};
|
|||
|
|
|||
|
const signalsByNumber=getSignalsByNumber();main.signalsByNumber=signalsByNumber;
|
|||
|
|
|||
|
const {signalsByName} = main;
|
|||
|
|
|||
|
const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => {
|
|||
|
if (timedOut) {
|
|||
|
return `timed out after ${timeout} milliseconds`;
|
|||
|
}
|
|||
|
|
|||
|
if (isCanceled) {
|
|||
|
return 'was canceled';
|
|||
|
}
|
|||
|
|
|||
|
if (errorCode !== undefined) {
|
|||
|
return `failed with ${errorCode}`;
|
|||
|
}
|
|||
|
|
|||
|
if (signal !== undefined) {
|
|||
|
return `was killed with ${signal} (${signalDescription})`;
|
|||
|
}
|
|||
|
|
|||
|
if (exitCode !== undefined) {
|
|||
|
return `failed with exit code ${exitCode}`;
|
|||
|
}
|
|||
|
|
|||
|
return 'failed';
|
|||
|
};
|
|||
|
|
|||
|
const makeError$1 = ({
|
|||
|
stdout,
|
|||
|
stderr,
|
|||
|
all,
|
|||
|
error,
|
|||
|
signal,
|
|||
|
exitCode,
|
|||
|
command,
|
|||
|
escapedCommand,
|
|||
|
timedOut,
|
|||
|
isCanceled,
|
|||
|
killed,
|
|||
|
parsed: {options: {timeout}}
|
|||
|
}) => {
|
|||
|
// `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`.
|
|||
|
// We normalize them to `undefined`
|
|||
|
exitCode = exitCode === null ? undefined : exitCode;
|
|||
|
signal = signal === null ? undefined : signal;
|
|||
|
const signalDescription = signal === undefined ? undefined : signalsByName[signal].description;
|
|||
|
|
|||
|
const errorCode = error && error.code;
|
|||
|
|
|||
|
const prefix = getErrorPrefix({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled});
|
|||
|
const execaMessage = `Command ${prefix}: ${command}`;
|
|||
|
const isError = Object.prototype.toString.call(error) === '[object Error]';
|
|||
|
const shortMessage = isError ? `${execaMessage}\n${error.message}` : execaMessage;
|
|||
|
const message = [shortMessage, stderr, stdout].filter(Boolean).join('\n');
|
|||
|
|
|||
|
if (isError) {
|
|||
|
error.originalMessage = error.message;
|
|||
|
error.message = message;
|
|||
|
} else {
|
|||
|
error = new Error(message);
|
|||
|
}
|
|||
|
|
|||
|
error.shortMessage = shortMessage;
|
|||
|
error.command = command;
|
|||
|
error.escapedCommand = escapedCommand;
|
|||
|
error.exitCode = exitCode;
|
|||
|
error.signal = signal;
|
|||
|
error.signalDescription = signalDescription;
|
|||
|
error.stdout = stdout;
|
|||
|
error.stderr = stderr;
|
|||
|
|
|||
|
if (all !== undefined) {
|
|||
|
error.all = all;
|
|||
|
}
|
|||
|
|
|||
|
if ('bufferedData' in error) {
|
|||
|
delete error.bufferedData;
|
|||
|
}
|
|||
|
|
|||
|
error.failed = true;
|
|||
|
error.timedOut = Boolean(timedOut);
|
|||
|
error.isCanceled = isCanceled;
|
|||
|
error.killed = killed && !timedOut;
|
|||
|
|
|||
|
return error;
|
|||
|
};
|
|||
|
|
|||
|
var error = makeError$1;
|
|||
|
|
|||
|
var stdio = {exports: {}};
|
|||
|
|
|||
|
const aliases = ['stdin', 'stdout', 'stderr'];
|
|||
|
|
|||
|
const hasAlias = options => aliases.some(alias => options[alias] !== undefined);
|
|||
|
|
|||
|
const normalizeStdio$1 = options => {
|
|||
|
if (!options) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
const {stdio} = options;
|
|||
|
|
|||
|
if (stdio === undefined) {
|
|||
|
return aliases.map(alias => options[alias]);
|
|||
|
}
|
|||
|
|
|||
|
if (hasAlias(options)) {
|
|||
|
throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${aliases.map(alias => `\`${alias}\``).join(', ')}`);
|
|||
|
}
|
|||
|
|
|||
|
if (typeof stdio === 'string') {
|
|||
|
return stdio;
|
|||
|
}
|
|||
|
|
|||
|
if (!Array.isArray(stdio)) {
|
|||
|
throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``);
|
|||
|
}
|
|||
|
|
|||
|
const length = Math.max(stdio.length, aliases.length);
|
|||
|
return Array.from({length}, (value, index) => stdio[index]);
|
|||
|
};
|
|||
|
|
|||
|
stdio.exports = normalizeStdio$1;
|
|||
|
|
|||
|
// `ipc` is pushed unless it is already present
|
|||
|
stdio.exports.node = options => {
|
|||
|
const stdio = normalizeStdio$1(options);
|
|||
|
|
|||
|
if (stdio === 'ipc') {
|
|||
|
return 'ipc';
|
|||
|
}
|
|||
|
|
|||
|
if (stdio === undefined || typeof stdio === 'string') {
|
|||
|
return [stdio, stdio, stdio, 'ipc'];
|
|||
|
}
|
|||
|
|
|||
|
if (stdio.includes('ipc')) {
|
|||
|
return stdio;
|
|||
|
}
|
|||
|
|
|||
|
return [...stdio, 'ipc'];
|
|||
|
};
|
|||
|
|
|||
|
var stdioExports = stdio.exports;
|
|||
|
|
|||
|
var signalExit = {exports: {}};
|
|||
|
|
|||
|
var signals$1 = {exports: {}};
|
|||
|
|
|||
|
var hasRequiredSignals;
|
|||
|
|
|||
|
function requireSignals () {
|
|||
|
if (hasRequiredSignals) return signals$1.exports;
|
|||
|
hasRequiredSignals = 1;
|
|||
|
(function (module) {
|
|||
|
// This is not the set of all possible signals.
|
|||
|
//
|
|||
|
// It IS, however, the set of all signals that trigger
|
|||
|
// an exit on either Linux or BSD systems. Linux is a
|
|||
|
// superset of the signal names supported on BSD, and
|
|||
|
// the unknown signals just fail to register, so we can
|
|||
|
// catch that easily enough.
|
|||
|
//
|
|||
|
// Don't bother with SIGKILL. It's uncatchable, which
|
|||
|
// means that we can't fire any callbacks anyway.
|
|||
|
//
|
|||
|
// If a user does happen to register a handler on a non-
|
|||
|
// fatal signal like SIGWINCH or something, and then
|
|||
|
// exit, it'll end up firing `process.emit('exit')`, so
|
|||
|
// the handler will be fired anyway.
|
|||
|
//
|
|||
|
// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
|
|||
|
// artificially, inherently leave the process in a
|
|||
|
// state from which it is not safe to try and enter JS
|
|||
|
// listeners.
|
|||
|
module.exports = [
|
|||
|
'SIGABRT',
|
|||
|
'SIGALRM',
|
|||
|
'SIGHUP',
|
|||
|
'SIGINT',
|
|||
|
'SIGTERM'
|
|||
|
];
|
|||
|
|
|||
|
if (process.platform !== 'win32') {
|
|||
|
module.exports.push(
|
|||
|
'SIGVTALRM',
|
|||
|
'SIGXCPU',
|
|||
|
'SIGXFSZ',
|
|||
|
'SIGUSR2',
|
|||
|
'SIGTRAP',
|
|||
|
'SIGSYS',
|
|||
|
'SIGQUIT',
|
|||
|
'SIGIOT'
|
|||
|
// should detect profiler and enable/disable accordingly.
|
|||
|
// see #21
|
|||
|
// 'SIGPROF'
|
|||
|
);
|
|||
|
}
|
|||
|
|
|||
|
if (process.platform === 'linux') {
|
|||
|
module.exports.push(
|
|||
|
'SIGIO',
|
|||
|
'SIGPOLL',
|
|||
|
'SIGPWR',
|
|||
|
'SIGSTKFLT',
|
|||
|
'SIGUNUSED'
|
|||
|
);
|
|||
|
}
|
|||
|
} (signals$1));
|
|||
|
return signals$1.exports;
|
|||
|
}
|
|||
|
|
|||
|
// Note: since nyc uses this module to output coverage, any lines
|
|||
|
// that are in the direct sync flow of nyc's outputCoverage are
|
|||
|
// ignored, since we can never get coverage for them.
|
|||
|
// grab a reference to node's real process object right away
|
|||
|
var process$1 = commonjsGlobal.process;
|
|||
|
|
|||
|
const processOk = function (process) {
|
|||
|
return process &&
|
|||
|
typeof process === 'object' &&
|
|||
|
typeof process.removeListener === 'function' &&
|
|||
|
typeof process.emit === 'function' &&
|
|||
|
typeof process.reallyExit === 'function' &&
|
|||
|
typeof process.listeners === 'function' &&
|
|||
|
typeof process.kill === 'function' &&
|
|||
|
typeof process.pid === 'number' &&
|
|||
|
typeof process.on === 'function'
|
|||
|
};
|
|||
|
|
|||
|
// some kind of non-node environment, just no-op
|
|||
|
/* istanbul ignore if */
|
|||
|
if (!processOk(process$1)) {
|
|||
|
signalExit.exports = function () {
|
|||
|
return function () {}
|
|||
|
};
|
|||
|
} else {
|
|||
|
var assert = require$$0$4;
|
|||
|
var signals = requireSignals();
|
|||
|
var isWin = /^win/i.test(process$1.platform);
|
|||
|
|
|||
|
var EE = require$$2;
|
|||
|
/* istanbul ignore if */
|
|||
|
if (typeof EE !== 'function') {
|
|||
|
EE = EE.EventEmitter;
|
|||
|
}
|
|||
|
|
|||
|
var emitter;
|
|||
|
if (process$1.__signal_exit_emitter__) {
|
|||
|
emitter = process$1.__signal_exit_emitter__;
|
|||
|
} else {
|
|||
|
emitter = process$1.__signal_exit_emitter__ = new EE();
|
|||
|
emitter.count = 0;
|
|||
|
emitter.emitted = {};
|
|||
|
}
|
|||
|
|
|||
|
// Because this emitter is a global, we have to check to see if a
|
|||
|
// previous version of this library failed to enable infinite listeners.
|
|||
|
// I know what you're about to say. But literally everything about
|
|||
|
// signal-exit is a compromise with evil. Get used to it.
|
|||
|
if (!emitter.infinite) {
|
|||
|
emitter.setMaxListeners(Infinity);
|
|||
|
emitter.infinite = true;
|
|||
|
}
|
|||
|
|
|||
|
signalExit.exports = function (cb, opts) {
|
|||
|
/* istanbul ignore if */
|
|||
|
if (!processOk(commonjsGlobal.process)) {
|
|||
|
return function () {}
|
|||
|
}
|
|||
|
assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler');
|
|||
|
|
|||
|
if (loaded === false) {
|
|||
|
load();
|
|||
|
}
|
|||
|
|
|||
|
var ev = 'exit';
|
|||
|
if (opts && opts.alwaysLast) {
|
|||
|
ev = 'afterexit';
|
|||
|
}
|
|||
|
|
|||
|
var remove = function () {
|
|||
|
emitter.removeListener(ev, cb);
|
|||
|
if (emitter.listeners('exit').length === 0 &&
|
|||
|
emitter.listeners('afterexit').length === 0) {
|
|||
|
unload();
|
|||
|
}
|
|||
|
};
|
|||
|
emitter.on(ev, cb);
|
|||
|
|
|||
|
return remove
|
|||
|
};
|
|||
|
|
|||
|
var unload = function unload () {
|
|||
|
if (!loaded || !processOk(commonjsGlobal.process)) {
|
|||
|
return
|
|||
|
}
|
|||
|
loaded = false;
|
|||
|
|
|||
|
signals.forEach(function (sig) {
|
|||
|
try {
|
|||
|
process$1.removeListener(sig, sigListeners[sig]);
|
|||
|
} catch (er) {}
|
|||
|
});
|
|||
|
process$1.emit = originalProcessEmit;
|
|||
|
process$1.reallyExit = originalProcessReallyExit;
|
|||
|
emitter.count -= 1;
|
|||
|
};
|
|||
|
signalExit.exports.unload = unload;
|
|||
|
|
|||
|
var emit = function emit (event, code, signal) {
|
|||
|
/* istanbul ignore if */
|
|||
|
if (emitter.emitted[event]) {
|
|||
|
return
|
|||
|
}
|
|||
|
emitter.emitted[event] = true;
|
|||
|
emitter.emit(event, code, signal);
|
|||
|
};
|
|||
|
|
|||
|
// { <signal>: <listener fn>, ... }
|
|||
|
var sigListeners = {};
|
|||
|
signals.forEach(function (sig) {
|
|||
|
sigListeners[sig] = function listener () {
|
|||
|
/* istanbul ignore if */
|
|||
|
if (!processOk(commonjsGlobal.process)) {
|
|||
|
return
|
|||
|
}
|
|||
|
// If there are no other listeners, an exit is coming!
|
|||
|
// Simplest way: remove us and then re-send the signal.
|
|||
|
// We know that this will kill the process, so we can
|
|||
|
// safely emit now.
|
|||
|
var listeners = process$1.listeners(sig);
|
|||
|
if (listeners.length === emitter.count) {
|
|||
|
unload();
|
|||
|
emit('exit', null, sig);
|
|||
|
/* istanbul ignore next */
|
|||
|
emit('afterexit', null, sig);
|
|||
|
/* istanbul ignore next */
|
|||
|
if (isWin && sig === 'SIGHUP') {
|
|||
|
// "SIGHUP" throws an `ENOSYS` error on Windows,
|
|||
|
// so use a supported signal instead
|
|||
|
sig = 'SIGINT';
|
|||
|
}
|
|||
|
/* istanbul ignore next */
|
|||
|
process$1.kill(process$1.pid, sig);
|
|||
|
}
|
|||
|
};
|
|||
|
});
|
|||
|
|
|||
|
signalExit.exports.signals = function () {
|
|||
|
return signals
|
|||
|
};
|
|||
|
|
|||
|
var loaded = false;
|
|||
|
|
|||
|
var load = function load () {
|
|||
|
if (loaded || !processOk(commonjsGlobal.process)) {
|
|||
|
return
|
|||
|
}
|
|||
|
loaded = true;
|
|||
|
|
|||
|
// This is the number of onSignalExit's that are in play.
|
|||
|
// It's important so that we can count the correct number of
|
|||
|
// listeners on signals, and don't wait for the other one to
|
|||
|
// handle it instead of us.
|
|||
|
emitter.count += 1;
|
|||
|
|
|||
|
signals = signals.filter(function (sig) {
|
|||
|
try {
|
|||
|
process$1.on(sig, sigListeners[sig]);
|
|||
|
return true
|
|||
|
} catch (er) {
|
|||
|
return false
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
process$1.emit = processEmit;
|
|||
|
process$1.reallyExit = processReallyExit;
|
|||
|
};
|
|||
|
signalExit.exports.load = load;
|
|||
|
|
|||
|
var originalProcessReallyExit = process$1.reallyExit;
|
|||
|
var processReallyExit = function processReallyExit (code) {
|
|||
|
/* istanbul ignore if */
|
|||
|
if (!processOk(commonjsGlobal.process)) {
|
|||
|
return
|
|||
|
}
|
|||
|
process$1.exitCode = code || /* istanbul ignore next */ 0;
|
|||
|
emit('exit', process$1.exitCode, null);
|
|||
|
/* istanbul ignore next */
|
|||
|
emit('afterexit', process$1.exitCode, null);
|
|||
|
/* istanbul ignore next */
|
|||
|
originalProcessReallyExit.call(process$1, process$1.exitCode);
|
|||
|
};
|
|||
|
|
|||
|
var originalProcessEmit = process$1.emit;
|
|||
|
var processEmit = function processEmit (ev, arg) {
|
|||
|
if (ev === 'exit' && processOk(commonjsGlobal.process)) {
|
|||
|
/* istanbul ignore else */
|
|||
|
if (arg !== undefined) {
|
|||
|
process$1.exitCode = arg;
|
|||
|
}
|
|||
|
var ret = originalProcessEmit.apply(this, arguments);
|
|||
|
/* istanbul ignore next */
|
|||
|
emit('exit', process$1.exitCode, null);
|
|||
|
/* istanbul ignore next */
|
|||
|
emit('afterexit', process$1.exitCode, null);
|
|||
|
/* istanbul ignore next */
|
|||
|
return ret
|
|||
|
} else {
|
|||
|
return originalProcessEmit.apply(this, arguments)
|
|||
|
}
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
var signalExitExports = signalExit.exports;
|
|||
|
|
|||
|
const os = require$$0$3;
|
|||
|
const onExit = signalExitExports;
|
|||
|
|
|||
|
const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5;
|
|||
|
|
|||
|
// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior
|
|||
|
const spawnedKill$1 = (kill, signal = 'SIGTERM', options = {}) => {
|
|||
|
const killResult = kill(signal);
|
|||
|
setKillTimeout(kill, signal, options, killResult);
|
|||
|
return killResult;
|
|||
|
};
|
|||
|
|
|||
|
const setKillTimeout = (kill, signal, options, killResult) => {
|
|||
|
if (!shouldForceKill(signal, options, killResult)) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
const timeout = getForceKillAfterTimeout(options);
|
|||
|
const t = setTimeout(() => {
|
|||
|
kill('SIGKILL');
|
|||
|
}, timeout);
|
|||
|
|
|||
|
// Guarded because there's no `.unref()` when `execa` is used in the renderer
|
|||
|
// process in Electron. This cannot be tested since we don't run tests in
|
|||
|
// Electron.
|
|||
|
// istanbul ignore else
|
|||
|
if (t.unref) {
|
|||
|
t.unref();
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => {
|
|||
|
return isSigterm(signal) && forceKillAfterTimeout !== false && killResult;
|
|||
|
};
|
|||
|
|
|||
|
const isSigterm = signal => {
|
|||
|
return signal === os.constants.signals.SIGTERM ||
|
|||
|
(typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM');
|
|||
|
};
|
|||
|
|
|||
|
const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => {
|
|||
|
if (forceKillAfterTimeout === true) {
|
|||
|
return DEFAULT_FORCE_KILL_TIMEOUT;
|
|||
|
}
|
|||
|
|
|||
|
if (!Number.isFinite(forceKillAfterTimeout) || forceKillAfterTimeout < 0) {
|
|||
|
throw new TypeError(`Expected the \`forceKillAfterTimeout\` option to be a non-negative integer, got \`${forceKillAfterTimeout}\` (${typeof forceKillAfterTimeout})`);
|
|||
|
}
|
|||
|
|
|||
|
return forceKillAfterTimeout;
|
|||
|
};
|
|||
|
|
|||
|
// `childProcess.cancel()`
|
|||
|
const spawnedCancel$1 = (spawned, context) => {
|
|||
|
const killResult = spawned.kill();
|
|||
|
|
|||
|
if (killResult) {
|
|||
|
context.isCanceled = true;
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
const timeoutKill = (spawned, signal, reject) => {
|
|||
|
spawned.kill(signal);
|
|||
|
reject(Object.assign(new Error('Timed out'), {timedOut: true, signal}));
|
|||
|
};
|
|||
|
|
|||
|
// `timeout` option handling
|
|||
|
const setupTimeout$1 = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => {
|
|||
|
if (timeout === 0 || timeout === undefined) {
|
|||
|
return spawnedPromise;
|
|||
|
}
|
|||
|
|
|||
|
let timeoutId;
|
|||
|
const timeoutPromise = new Promise((resolve, reject) => {
|
|||
|
timeoutId = setTimeout(() => {
|
|||
|
timeoutKill(spawned, killSignal, reject);
|
|||
|
}, timeout);
|
|||
|
});
|
|||
|
|
|||
|
const safeSpawnedPromise = spawnedPromise.finally(() => {
|
|||
|
clearTimeout(timeoutId);
|
|||
|
});
|
|||
|
|
|||
|
return Promise.race([timeoutPromise, safeSpawnedPromise]);
|
|||
|
};
|
|||
|
|
|||
|
const validateTimeout$1 = ({timeout}) => {
|
|||
|
if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) {
|
|||
|
throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`);
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// `cleanup` option handling
|
|||
|
const setExitHandler$1 = async (spawned, {cleanup, detached}, timedPromise) => {
|
|||
|
if (!cleanup || detached) {
|
|||
|
return timedPromise;
|
|||
|
}
|
|||
|
|
|||
|
const removeExitHandler = onExit(() => {
|
|||
|
spawned.kill();
|
|||
|
});
|
|||
|
|
|||
|
return timedPromise.finally(() => {
|
|||
|
removeExitHandler();
|
|||
|
});
|
|||
|
};
|
|||
|
|
|||
|
var kill = {
|
|||
|
spawnedKill: spawnedKill$1,
|
|||
|
spawnedCancel: spawnedCancel$1,
|
|||
|
setupTimeout: setupTimeout$1,
|
|||
|
validateTimeout: validateTimeout$1,
|
|||
|
setExitHandler: setExitHandler$1
|
|||
|
};
|
|||
|
|
|||
|
const isStream$1 = stream =>
|
|||
|
stream !== null &&
|
|||
|
typeof stream === 'object' &&
|
|||
|
typeof stream.pipe === 'function';
|
|||
|
|
|||
|
isStream$1.writable = stream =>
|
|||
|
isStream$1(stream) &&
|
|||
|
stream.writable !== false &&
|
|||
|
typeof stream._write === 'function' &&
|
|||
|
typeof stream._writableState === 'object';
|
|||
|
|
|||
|
isStream$1.readable = stream =>
|
|||
|
isStream$1(stream) &&
|
|||
|
stream.readable !== false &&
|
|||
|
typeof stream._read === 'function' &&
|
|||
|
typeof stream._readableState === 'object';
|
|||
|
|
|||
|
isStream$1.duplex = stream =>
|
|||
|
isStream$1.writable(stream) &&
|
|||
|
isStream$1.readable(stream);
|
|||
|
|
|||
|
isStream$1.transform = stream =>
|
|||
|
isStream$1.duplex(stream) &&
|
|||
|
typeof stream._transform === 'function';
|
|||
|
|
|||
|
var isStream_1 = isStream$1;
|
|||
|
|
|||
|
var getStream$2 = {exports: {}};
|
|||
|
|
|||
|
const {PassThrough: PassThroughStream} = require$$0$5;
|
|||
|
|
|||
|
var bufferStream$1 = options => {
|
|||
|
options = {...options};
|
|||
|
|
|||
|
const {array} = options;
|
|||
|
let {encoding} = options;
|
|||
|
const isBuffer = encoding === 'buffer';
|
|||
|
let objectMode = false;
|
|||
|
|
|||
|
if (array) {
|
|||
|
objectMode = !(encoding || isBuffer);
|
|||
|
} else {
|
|||
|
encoding = encoding || 'utf8';
|
|||
|
}
|
|||
|
|
|||
|
if (isBuffer) {
|
|||
|
encoding = null;
|
|||
|
}
|
|||
|
|
|||
|
const stream = new PassThroughStream({objectMode});
|
|||
|
|
|||
|
if (encoding) {
|
|||
|
stream.setEncoding(encoding);
|
|||
|
}
|
|||
|
|
|||
|
let length = 0;
|
|||
|
const chunks = [];
|
|||
|
|
|||
|
stream.on('data', chunk => {
|
|||
|
chunks.push(chunk);
|
|||
|
|
|||
|
if (objectMode) {
|
|||
|
length = chunks.length;
|
|||
|
} else {
|
|||
|
length += chunk.length;
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
stream.getBufferedValue = () => {
|
|||
|
if (array) {
|
|||
|
return chunks;
|
|||
|
}
|
|||
|
|
|||
|
return isBuffer ? Buffer.concat(chunks, length) : chunks.join('');
|
|||
|
};
|
|||
|
|
|||
|
stream.getBufferedLength = () => length;
|
|||
|
|
|||
|
return stream;
|
|||
|
};
|
|||
|
|
|||
|
const {constants: BufferConstants} = require$$0$6;
|
|||
|
const stream$2 = require$$0$5;
|
|||
|
const {promisify} = require$$2$1;
|
|||
|
const bufferStream = bufferStream$1;
|
|||
|
|
|||
|
const streamPipelinePromisified = promisify(stream$2.pipeline);
|
|||
|
|
|||
|
class MaxBufferError extends Error {
|
|||
|
constructor() {
|
|||
|
super('maxBuffer exceeded');
|
|||
|
this.name = 'MaxBufferError';
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
async function getStream$1(inputStream, options) {
|
|||
|
if (!inputStream) {
|
|||
|
throw new Error('Expected a stream');
|
|||
|
}
|
|||
|
|
|||
|
options = {
|
|||
|
maxBuffer: Infinity,
|
|||
|
...options
|
|||
|
};
|
|||
|
|
|||
|
const {maxBuffer} = options;
|
|||
|
const stream = bufferStream(options);
|
|||
|
|
|||
|
await new Promise((resolve, reject) => {
|
|||
|
const rejectPromise = error => {
|
|||
|
// Don't retrieve an oversized buffer.
|
|||
|
if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) {
|
|||
|
error.bufferedData = stream.getBufferedValue();
|
|||
|
}
|
|||
|
|
|||
|
reject(error);
|
|||
|
};
|
|||
|
|
|||
|
(async () => {
|
|||
|
try {
|
|||
|
await streamPipelinePromisified(inputStream, stream);
|
|||
|
resolve();
|
|||
|
} catch (error) {
|
|||
|
rejectPromise(error);
|
|||
|
}
|
|||
|
})();
|
|||
|
|
|||
|
stream.on('data', () => {
|
|||
|
if (stream.getBufferedLength() > maxBuffer) {
|
|||
|
rejectPromise(new MaxBufferError());
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
|
|||
|
return stream.getBufferedValue();
|
|||
|
}
|
|||
|
|
|||
|
getStream$2.exports = getStream$1;
|
|||
|
getStream$2.exports.buffer = (stream, options) => getStream$1(stream, {...options, encoding: 'buffer'});
|
|||
|
getStream$2.exports.array = (stream, options) => getStream$1(stream, {...options, array: true});
|
|||
|
getStream$2.exports.MaxBufferError = MaxBufferError;
|
|||
|
|
|||
|
var getStreamExports = getStream$2.exports;
|
|||
|
|
|||
|
const { PassThrough } = require$$0$5;
|
|||
|
|
|||
|
var mergeStream$1 = function (/*streams...*/) {
|
|||
|
var sources = [];
|
|||
|
var output = new PassThrough({objectMode: true});
|
|||
|
|
|||
|
output.setMaxListeners(0);
|
|||
|
|
|||
|
output.add = add;
|
|||
|
output.isEmpty = isEmpty;
|
|||
|
|
|||
|
output.on('unpipe', remove);
|
|||
|
|
|||
|
Array.prototype.slice.call(arguments).forEach(add);
|
|||
|
|
|||
|
return output
|
|||
|
|
|||
|
function add (source) {
|
|||
|
if (Array.isArray(source)) {
|
|||
|
source.forEach(add);
|
|||
|
return this
|
|||
|
}
|
|||
|
|
|||
|
sources.push(source);
|
|||
|
source.once('end', remove.bind(null, source));
|
|||
|
source.once('error', output.emit.bind(output, 'error'));
|
|||
|
source.pipe(output, {end: false});
|
|||
|
return this
|
|||
|
}
|
|||
|
|
|||
|
function isEmpty () {
|
|||
|
return sources.length == 0;
|
|||
|
}
|
|||
|
|
|||
|
function remove (source) {
|
|||
|
sources = sources.filter(function (it) { return it !== source });
|
|||
|
if (!sources.length && output.readable) { output.end(); }
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
const isStream = isStream_1;
|
|||
|
const getStream = getStreamExports;
|
|||
|
const mergeStream = mergeStream$1;
|
|||
|
|
|||
|
// `input` option
|
|||
|
const handleInput$1 = (spawned, input) => {
|
|||
|
// Checking for stdin is workaround for https://github.com/nodejs/node/issues/26852
|
|||
|
// @todo remove `|| spawned.stdin === undefined` once we drop support for Node.js <=12.2.0
|
|||
|
if (input === undefined || spawned.stdin === undefined) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
if (isStream(input)) {
|
|||
|
input.pipe(spawned.stdin);
|
|||
|
} else {
|
|||
|
spawned.stdin.end(input);
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// `all` interleaves `stdout` and `stderr`
|
|||
|
const makeAllStream$1 = (spawned, {all}) => {
|
|||
|
if (!all || (!spawned.stdout && !spawned.stderr)) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
const mixed = mergeStream();
|
|||
|
|
|||
|
if (spawned.stdout) {
|
|||
|
mixed.add(spawned.stdout);
|
|||
|
}
|
|||
|
|
|||
|
if (spawned.stderr) {
|
|||
|
mixed.add(spawned.stderr);
|
|||
|
}
|
|||
|
|
|||
|
return mixed;
|
|||
|
};
|
|||
|
|
|||
|
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
|
|||
|
const getBufferedData = async (stream, streamPromise) => {
|
|||
|
if (!stream) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
stream.destroy();
|
|||
|
|
|||
|
try {
|
|||
|
return await streamPromise;
|
|||
|
} catch (error) {
|
|||
|
return error.bufferedData;
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {
|
|||
|
if (!stream || !buffer) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
if (encoding) {
|
|||
|
return getStream(stream, {encoding, maxBuffer});
|
|||
|
}
|
|||
|
|
|||
|
return getStream.buffer(stream, {maxBuffer});
|
|||
|
};
|
|||
|
|
|||
|
// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)
|
|||
|
const getSpawnedResult$1 = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {
|
|||
|
const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});
|
|||
|
const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});
|
|||
|
const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});
|
|||
|
|
|||
|
try {
|
|||
|
return await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]);
|
|||
|
} catch (error) {
|
|||
|
return Promise.all([
|
|||
|
{error, signal: error.signal, timedOut: error.timedOut},
|
|||
|
getBufferedData(stdout, stdoutPromise),
|
|||
|
getBufferedData(stderr, stderrPromise),
|
|||
|
getBufferedData(all, allPromise)
|
|||
|
]);
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
const validateInputSync$1 = ({input}) => {
|
|||
|
if (isStream(input)) {
|
|||
|
throw new TypeError('The `input` option cannot be a stream in sync mode');
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
var stream$1 = {
|
|||
|
handleInput: handleInput$1,
|
|||
|
makeAllStream: makeAllStream$1,
|
|||
|
getSpawnedResult: getSpawnedResult$1,
|
|||
|
validateInputSync: validateInputSync$1
|
|||
|
};
|
|||
|
|
|||
|
const nativePromisePrototype = (async () => {})().constructor.prototype;
|
|||
|
const descriptors = ['then', 'catch', 'finally'].map(property => [
|
|||
|
property,
|
|||
|
Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property)
|
|||
|
]);
|
|||
|
|
|||
|
// The return value is a mixin of `childProcess` and `Promise`
|
|||
|
const mergePromise$1 = (spawned, promise) => {
|
|||
|
for (const [property, descriptor] of descriptors) {
|
|||
|
// Starting the main `promise` is deferred to avoid consuming streams
|
|||
|
const value = typeof promise === 'function' ?
|
|||
|
(...args) => Reflect.apply(descriptor.value, promise(), args) :
|
|||
|
descriptor.value.bind(promise);
|
|||
|
|
|||
|
Reflect.defineProperty(spawned, property, {...descriptor, value});
|
|||
|
}
|
|||
|
|
|||
|
return spawned;
|
|||
|
};
|
|||
|
|
|||
|
// Use promises instead of `child_process` events
|
|||
|
const getSpawnedPromise$1 = spawned => {
|
|||
|
return new Promise((resolve, reject) => {
|
|||
|
spawned.on('exit', (exitCode, signal) => {
|
|||
|
resolve({exitCode, signal});
|
|||
|
});
|
|||
|
|
|||
|
spawned.on('error', error => {
|
|||
|
reject(error);
|
|||
|
});
|
|||
|
|
|||
|
if (spawned.stdin) {
|
|||
|
spawned.stdin.on('error', error => {
|
|||
|
reject(error);
|
|||
|
});
|
|||
|
}
|
|||
|
});
|
|||
|
};
|
|||
|
|
|||
|
var promise = {
|
|||
|
mergePromise: mergePromise$1,
|
|||
|
getSpawnedPromise: getSpawnedPromise$1
|
|||
|
};
|
|||
|
|
|||
|
const normalizeArgs = (file, args = []) => {
|
|||
|
if (!Array.isArray(args)) {
|
|||
|
return [file];
|
|||
|
}
|
|||
|
|
|||
|
return [file, ...args];
|
|||
|
};
|
|||
|
|
|||
|
const NO_ESCAPE_REGEXP = /^[\w.-]+$/;
|
|||
|
const DOUBLE_QUOTES_REGEXP = /"/g;
|
|||
|
|
|||
|
const escapeArg = arg => {
|
|||
|
if (typeof arg !== 'string' || NO_ESCAPE_REGEXP.test(arg)) {
|
|||
|
return arg;
|
|||
|
}
|
|||
|
|
|||
|
return `"${arg.replace(DOUBLE_QUOTES_REGEXP, '\\"')}"`;
|
|||
|
};
|
|||
|
|
|||
|
const joinCommand$1 = (file, args) => {
|
|||
|
return normalizeArgs(file, args).join(' ');
|
|||
|
};
|
|||
|
|
|||
|
const getEscapedCommand$1 = (file, args) => {
|
|||
|
return normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' ');
|
|||
|
};
|
|||
|
|
|||
|
const SPACES_REGEXP = / +/g;
|
|||
|
|
|||
|
// Handle `execa.command()`
|
|||
|
const parseCommand$1 = command => {
|
|||
|
const tokens = [];
|
|||
|
for (const token of command.trim().split(SPACES_REGEXP)) {
|
|||
|
// Allow spaces to be escaped by a backslash if not meant as a delimiter
|
|||
|
const previousToken = tokens[tokens.length - 1];
|
|||
|
if (previousToken && previousToken.endsWith('\\')) {
|
|||
|
// Merge previous token with current one
|
|||
|
tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`;
|
|||
|
} else {
|
|||
|
tokens.push(token);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return tokens;
|
|||
|
};
|
|||
|
|
|||
|
var command = {
|
|||
|
joinCommand: joinCommand$1,
|
|||
|
getEscapedCommand: getEscapedCommand$1,
|
|||
|
parseCommand: parseCommand$1
|
|||
|
};
|
|||
|
|
|||
|
const path = require$$0$1;
|
|||
|
const childProcess = require$$0$2;
|
|||
|
const crossSpawn = crossSpawnExports;
|
|||
|
const stripFinalNewline = stripFinalNewline$1;
|
|||
|
const npmRunPath = npmRunPathExports;
|
|||
|
const onetime = onetimeExports;
|
|||
|
const makeError = error;
|
|||
|
const normalizeStdio = stdioExports;
|
|||
|
const {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} = kill;
|
|||
|
const {handleInput, getSpawnedResult, makeAllStream, validateInputSync} = stream$1;
|
|||
|
const {mergePromise, getSpawnedPromise} = promise;
|
|||
|
const {joinCommand, parseCommand, getEscapedCommand} = command;
|
|||
|
|
|||
|
const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100;
|
|||
|
|
|||
|
const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) => {
|
|||
|
const env = extendEnv ? {...process.env, ...envOption} : envOption;
|
|||
|
|
|||
|
if (preferLocal) {
|
|||
|
return npmRunPath.env({env, cwd: localDir, execPath});
|
|||
|
}
|
|||
|
|
|||
|
return env;
|
|||
|
};
|
|||
|
|
|||
|
const handleArguments = (file, args, options = {}) => {
|
|||
|
const parsed = crossSpawn._parse(file, args, options);
|
|||
|
file = parsed.command;
|
|||
|
args = parsed.args;
|
|||
|
options = parsed.options;
|
|||
|
|
|||
|
options = {
|
|||
|
maxBuffer: DEFAULT_MAX_BUFFER,
|
|||
|
buffer: true,
|
|||
|
stripFinalNewline: true,
|
|||
|
extendEnv: true,
|
|||
|
preferLocal: false,
|
|||
|
localDir: options.cwd || process.cwd(),
|
|||
|
execPath: process.execPath,
|
|||
|
encoding: 'utf8',
|
|||
|
reject: true,
|
|||
|
cleanup: true,
|
|||
|
all: false,
|
|||
|
windowsHide: true,
|
|||
|
...options
|
|||
|
};
|
|||
|
|
|||
|
options.env = getEnv(options);
|
|||
|
|
|||
|
options.stdio = normalizeStdio(options);
|
|||
|
|
|||
|
if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') {
|
|||
|
// #116
|
|||
|
args.unshift('/q');
|
|||
|
}
|
|||
|
|
|||
|
return {file, args, options, parsed};
|
|||
|
};
|
|||
|
|
|||
|
const handleOutput = (options, value, error) => {
|
|||
|
if (typeof value !== 'string' && !Buffer.isBuffer(value)) {
|
|||
|
// When `execa.sync()` errors, we normalize it to '' to mimic `execa()`
|
|||
|
return error === undefined ? undefined : '';
|
|||
|
}
|
|||
|
|
|||
|
if (options.stripFinalNewline) {
|
|||
|
return stripFinalNewline(value);
|
|||
|
}
|
|||
|
|
|||
|
return value;
|
|||
|
};
|
|||
|
|
|||
|
const execa = (file, args, options) => {
|
|||
|
const parsed = handleArguments(file, args, options);
|
|||
|
const command = joinCommand(file, args);
|
|||
|
const escapedCommand = getEscapedCommand(file, args);
|
|||
|
|
|||
|
validateTimeout(parsed.options);
|
|||
|
|
|||
|
let spawned;
|
|||
|
try {
|
|||
|
spawned = childProcess.spawn(parsed.file, parsed.args, parsed.options);
|
|||
|
} catch (error) {
|
|||
|
// Ensure the returned error is always both a promise and a child process
|
|||
|
const dummySpawned = new childProcess.ChildProcess();
|
|||
|
const errorPromise = Promise.reject(makeError({
|
|||
|
error,
|
|||
|
stdout: '',
|
|||
|
stderr: '',
|
|||
|
all: '',
|
|||
|
command,
|
|||
|
escapedCommand,
|
|||
|
parsed,
|
|||
|
timedOut: false,
|
|||
|
isCanceled: false,
|
|||
|
killed: false
|
|||
|
}));
|
|||
|
return mergePromise(dummySpawned, errorPromise);
|
|||
|
}
|
|||
|
|
|||
|
const spawnedPromise = getSpawnedPromise(spawned);
|
|||
|
const timedPromise = setupTimeout(spawned, parsed.options, spawnedPromise);
|
|||
|
const processDone = setExitHandler(spawned, parsed.options, timedPromise);
|
|||
|
|
|||
|
const context = {isCanceled: false};
|
|||
|
|
|||
|
spawned.kill = spawnedKill.bind(null, spawned.kill.bind(spawned));
|
|||
|
spawned.cancel = spawnedCancel.bind(null, spawned, context);
|
|||
|
|
|||
|
const handlePromise = async () => {
|
|||
|
const [{error, exitCode, signal, timedOut}, stdoutResult, stderrResult, allResult] = await getSpawnedResult(spawned, parsed.options, processDone);
|
|||
|
const stdout = handleOutput(parsed.options, stdoutResult);
|
|||
|
const stderr = handleOutput(parsed.options, stderrResult);
|
|||
|
const all = handleOutput(parsed.options, allResult);
|
|||
|
|
|||
|
if (error || exitCode !== 0 || signal !== null) {
|
|||
|
const returnedError = makeError({
|
|||
|
error,
|
|||
|
exitCode,
|
|||
|
signal,
|
|||
|
stdout,
|
|||
|
stderr,
|
|||
|
all,
|
|||
|
command,
|
|||
|
escapedCommand,
|
|||
|
parsed,
|
|||
|
timedOut,
|
|||
|
isCanceled: context.isCanceled,
|
|||
|
killed: spawned.killed
|
|||
|
});
|
|||
|
|
|||
|
if (!parsed.options.reject) {
|
|||
|
return returnedError;
|
|||
|
}
|
|||
|
|
|||
|
throw returnedError;
|
|||
|
}
|
|||
|
|
|||
|
return {
|
|||
|
command,
|
|||
|
escapedCommand,
|
|||
|
exitCode: 0,
|
|||
|
stdout,
|
|||
|
stderr,
|
|||
|
all,
|
|||
|
failed: false,
|
|||
|
timedOut: false,
|
|||
|
isCanceled: false,
|
|||
|
killed: false
|
|||
|
};
|
|||
|
};
|
|||
|
|
|||
|
const handlePromiseOnce = onetime(handlePromise);
|
|||
|
|
|||
|
handleInput(spawned, parsed.options.input);
|
|||
|
|
|||
|
spawned.all = makeAllStream(spawned, parsed.options);
|
|||
|
|
|||
|
return mergePromise(spawned, handlePromiseOnce);
|
|||
|
};
|
|||
|
|
|||
|
execa$2.exports = execa;
|
|||
|
|
|||
|
execa$2.exports.sync = (file, args, options) => {
|
|||
|
const parsed = handleArguments(file, args, options);
|
|||
|
const command = joinCommand(file, args);
|
|||
|
const escapedCommand = getEscapedCommand(file, args);
|
|||
|
|
|||
|
validateInputSync(parsed.options);
|
|||
|
|
|||
|
let result;
|
|||
|
try {
|
|||
|
result = childProcess.spawnSync(parsed.file, parsed.args, parsed.options);
|
|||
|
} catch (error) {
|
|||
|
throw makeError({
|
|||
|
error,
|
|||
|
stdout: '',
|
|||
|
stderr: '',
|
|||
|
all: '',
|
|||
|
command,
|
|||
|
escapedCommand,
|
|||
|
parsed,
|
|||
|
timedOut: false,
|
|||
|
isCanceled: false,
|
|||
|
killed: false
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
const stdout = handleOutput(parsed.options, result.stdout, result.error);
|
|||
|
const stderr = handleOutput(parsed.options, result.stderr, result.error);
|
|||
|
|
|||
|
if (result.error || result.status !== 0 || result.signal !== null) {
|
|||
|
const error = makeError({
|
|||
|
stdout,
|
|||
|
stderr,
|
|||
|
error: result.error,
|
|||
|
signal: result.signal,
|
|||
|
exitCode: result.status,
|
|||
|
command,
|
|||
|
escapedCommand,
|
|||
|
parsed,
|
|||
|
timedOut: result.error && result.error.code === 'ETIMEDOUT',
|
|||
|
isCanceled: false,
|
|||
|
killed: result.signal !== null
|
|||
|
});
|
|||
|
|
|||
|
if (!parsed.options.reject) {
|
|||
|
return error;
|
|||
|
}
|
|||
|
|
|||
|
throw error;
|
|||
|
}
|
|||
|
|
|||
|
return {
|
|||
|
command,
|
|||
|
escapedCommand,
|
|||
|
exitCode: 0,
|
|||
|
stdout,
|
|||
|
stderr,
|
|||
|
failed: false,
|
|||
|
timedOut: false,
|
|||
|
isCanceled: false,
|
|||
|
killed: false
|
|||
|
};
|
|||
|
};
|
|||
|
|
|||
|
execa$2.exports.command = (command, options) => {
|
|||
|
const [file, ...args] = parseCommand(command);
|
|||
|
return execa(file, args, options);
|
|||
|
};
|
|||
|
|
|||
|
execa$2.exports.commandSync = (command, options) => {
|
|||
|
const [file, ...args] = parseCommand(command);
|
|||
|
return execa.sync(file, args, options);
|
|||
|
};
|
|||
|
|
|||
|
execa$2.exports.node = (scriptPath, args, options = {}) => {
|
|||
|
if (args && !Array.isArray(args) && typeof args === 'object') {
|
|||
|
options = args;
|
|||
|
args = [];
|
|||
|
}
|
|||
|
|
|||
|
const stdio = normalizeStdio.node(options);
|
|||
|
const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect'));
|
|||
|
|
|||
|
const {
|
|||
|
nodePath = process.execPath,
|
|||
|
nodeOptions = defaultExecArgv
|
|||
|
} = options;
|
|||
|
|
|||
|
return execa(
|
|||
|
nodePath,
|
|||
|
[
|
|||
|
...nodeOptions,
|
|||
|
scriptPath,
|
|||
|
...(Array.isArray(args) ? args : [])
|
|||
|
],
|
|||
|
{
|
|||
|
...options,
|
|||
|
stdin: undefined,
|
|||
|
stdout: undefined,
|
|||
|
stderr: undefined,
|
|||
|
stdio,
|
|||
|
shell: false
|
|||
|
}
|
|||
|
);
|
|||
|
};
|
|||
|
|
|||
|
var execaExports = execa$2.exports;
|
|||
|
var execa$1 = /*@__PURE__*/getDefaultExportFromCjs(execaExports);
|
|||
|
|
|||
|
function ansiRegex({onlyFirst = false} = {}) {
|
|||
|
const pattern = [
|
|||
|
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
|
|||
|
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
|
|||
|
].join('|');
|
|||
|
|
|||
|
return new RegExp(pattern, onlyFirst ? undefined : 'g');
|
|||
|
}
|
|||
|
|
|||
|
function stripAnsi(string) {
|
|||
|
if (typeof string !== 'string') {
|
|||
|
throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``);
|
|||
|
}
|
|||
|
|
|||
|
return string.replace(ansiRegex(), '');
|
|||
|
}
|
|||
|
|
|||
|
const detectDefaultShell = () => {
|
|||
|
const {env} = process$2;
|
|||
|
|
|||
|
if (process$2.platform === 'win32') {
|
|||
|
return env.COMSPEC || 'cmd.exe';
|
|||
|
}
|
|||
|
|
|||
|
try {
|
|||
|
const {shell} = node_os.userInfo();
|
|||
|
if (shell) {
|
|||
|
return shell;
|
|||
|
}
|
|||
|
} catch {}
|
|||
|
|
|||
|
if (process$2.platform === 'darwin') {
|
|||
|
return env.SHELL || '/bin/zsh';
|
|||
|
}
|
|||
|
|
|||
|
return env.SHELL || '/bin/sh';
|
|||
|
};
|
|||
|
|
|||
|
// Stores default shell when imported.
|
|||
|
const defaultShell = detectDefaultShell();
|
|||
|
|
|||
|
const args = [
|
|||
|
'-ilc',
|
|||
|
'echo -n "_SHELL_ENV_DELIMITER_"; env; echo -n "_SHELL_ENV_DELIMITER_"; exit',
|
|||
|
];
|
|||
|
|
|||
|
const env = {
|
|||
|
// Disables Oh My Zsh auto-update thing that can block the process.
|
|||
|
DISABLE_AUTO_UPDATE: 'true',
|
|||
|
};
|
|||
|
|
|||
|
const parseEnv = env => {
|
|||
|
env = env.split('_SHELL_ENV_DELIMITER_')[1];
|
|||
|
const returnValue = {};
|
|||
|
|
|||
|
for (const line of stripAnsi(env).split('\n').filter(line => Boolean(line))) {
|
|||
|
const [key, ...values] = line.split('=');
|
|||
|
returnValue[key] = values.join('=');
|
|||
|
}
|
|||
|
|
|||
|
return returnValue;
|
|||
|
};
|
|||
|
|
|||
|
function shellEnvSync(shell) {
|
|||
|
if (process$2.platform === 'win32') {
|
|||
|
return process$2.env;
|
|||
|
}
|
|||
|
|
|||
|
try {
|
|||
|
const {stdout} = execa$1.sync(shell || defaultShell, args, {env});
|
|||
|
return parseEnv(stdout);
|
|||
|
} catch (error) {
|
|||
|
if (shell) {
|
|||
|
throw error;
|
|||
|
} else {
|
|||
|
return process$2.env;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
function shellPathSync() {
|
|||
|
const {PATH} = shellEnvSync();
|
|||
|
return PATH;
|
|||
|
}
|
|||
|
|
|||
|
function fixPath() {
|
|||
|
if (process$2.platform === 'win32') {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
process$2.env.PATH = shellPathSync() || [
|
|||
|
'./node_modules/.bin',
|
|||
|
'/.nodebrew/current/bin',
|
|||
|
'/usr/local/bin',
|
|||
|
process$2.env.PATH,
|
|||
|
].join(':');
|
|||
|
}
|
|||
|
|
|||
|
var lib = {};
|
|||
|
|
|||
|
var readable = {exports: {}};
|
|||
|
|
|||
|
var stream;
|
|||
|
var hasRequiredStream;
|
|||
|
|
|||
|
function requireStream () {
|
|||
|
if (hasRequiredStream) return stream;
|
|||
|
hasRequiredStream = 1;
|
|||
|
stream = require$$0$5;
|
|||
|
return stream;
|
|||
|
}
|
|||
|
|
|||
|
var buffer_list;
|
|||
|
var hasRequiredBuffer_list;
|
|||
|
|
|||
|
function requireBuffer_list () {
|
|||
|
if (hasRequiredBuffer_list) return buffer_list;
|
|||
|
hasRequiredBuffer_list = 1;
|
|||
|
|
|||
|
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
|||
|
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
|||
|
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
|||
|
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
|||
|
function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } }
|
|||
|
function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
|
|||
|
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
|
|||
|
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
|
|||
|
var _require = require$$0$6,
|
|||
|
Buffer = _require.Buffer;
|
|||
|
var _require2 = require$$2$1,
|
|||
|
inspect = _require2.inspect;
|
|||
|
var custom = inspect && inspect.custom || 'inspect';
|
|||
|
function copyBuffer(src, target, offset) {
|
|||
|
Buffer.prototype.copy.call(src, target, offset);
|
|||
|
}
|
|||
|
buffer_list = /*#__PURE__*/function () {
|
|||
|
function BufferList() {
|
|||
|
_classCallCheck(this, BufferList);
|
|||
|
this.head = null;
|
|||
|
this.tail = null;
|
|||
|
this.length = 0;
|
|||
|
}
|
|||
|
_createClass(BufferList, [{
|
|||
|
key: "push",
|
|||
|
value: function push(v) {
|
|||
|
var entry = {
|
|||
|
data: v,
|
|||
|
next: null
|
|||
|
};
|
|||
|
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
|||
|
this.tail = entry;
|
|||
|
++this.length;
|
|||
|
}
|
|||
|
}, {
|
|||
|
key: "unshift",
|
|||
|
value: function unshift(v) {
|
|||
|
var entry = {
|
|||
|
data: v,
|
|||
|
next: this.head
|
|||
|
};
|
|||
|
if (this.length === 0) this.tail = entry;
|
|||
|
this.head = entry;
|
|||
|
++this.length;
|
|||
|
}
|
|||
|
}, {
|
|||
|
key: "shift",
|
|||
|
value: function shift() {
|
|||
|
if (this.length === 0) return;
|
|||
|
var ret = this.head.data;
|
|||
|
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
|||
|
--this.length;
|
|||
|
return ret;
|
|||
|
}
|
|||
|
}, {
|
|||
|
key: "clear",
|
|||
|
value: function clear() {
|
|||
|
this.head = this.tail = null;
|
|||
|
this.length = 0;
|
|||
|
}
|
|||
|
}, {
|
|||
|
key: "join",
|
|||
|
value: function join(s) {
|
|||
|
if (this.length === 0) return '';
|
|||
|
var p = this.head;
|
|||
|
var ret = '' + p.data;
|
|||
|
while (p = p.next) ret += s + p.data;
|
|||
|
return ret;
|
|||
|
}
|
|||
|
}, {
|
|||
|
key: "concat",
|
|||
|
value: function concat(n) {
|
|||
|
if (this.length === 0) return Buffer.alloc(0);
|
|||
|
var ret = Buffer.allocUnsafe(n >>> 0);
|
|||
|
var p = this.head;
|
|||
|
var i = 0;
|
|||
|
while (p) {
|
|||
|
copyBuffer(p.data, ret, i);
|
|||
|
i += p.data.length;
|
|||
|
p = p.next;
|
|||
|
}
|
|||
|
return ret;
|
|||
|
}
|
|||
|
|
|||
|
// Consumes a specified amount of bytes or characters from the buffered data.
|
|||
|
}, {
|
|||
|
key: "consume",
|
|||
|
value: function consume(n, hasStrings) {
|
|||
|
var ret;
|
|||
|
if (n < this.head.data.length) {
|
|||
|
// `slice` is the same for buffers and strings.
|
|||
|
ret = this.head.data.slice(0, n);
|
|||
|
this.head.data = this.head.data.slice(n);
|
|||
|
} else if (n === this.head.data.length) {
|
|||
|
// First chunk is a perfect match.
|
|||
|
ret = this.shift();
|
|||
|
} else {
|
|||
|
// Result spans more than one buffer.
|
|||
|
ret = hasStrings ? this._getString(n) : this._getBuffer(n);
|
|||
|
}
|
|||
|
return ret;
|
|||
|
}
|
|||
|
}, {
|
|||
|
key: "first",
|
|||
|
value: function first() {
|
|||
|
return this.head.data;
|
|||
|
}
|
|||
|
|
|||
|
// Consumes a specified amount of characters from the buffered data.
|
|||
|
}, {
|
|||
|
key: "_getString",
|
|||
|
value: function _getString(n) {
|
|||
|
var p = this.head;
|
|||
|
var c = 1;
|
|||
|
var ret = p.data;
|
|||
|
n -= ret.length;
|
|||
|
while (p = p.next) {
|
|||
|
var str = p.data;
|
|||
|
var nb = n > str.length ? str.length : n;
|
|||
|
if (nb === str.length) ret += str;else ret += str.slice(0, n);
|
|||
|
n -= nb;
|
|||
|
if (n === 0) {
|
|||
|
if (nb === str.length) {
|
|||
|
++c;
|
|||
|
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
|||
|
} else {
|
|||
|
this.head = p;
|
|||
|
p.data = str.slice(nb);
|
|||
|
}
|
|||
|
break;
|
|||
|
}
|
|||
|
++c;
|
|||
|
}
|
|||
|
this.length -= c;
|
|||
|
return ret;
|
|||
|
}
|
|||
|
|
|||
|
// Consumes a specified amount of bytes from the buffered data.
|
|||
|
}, {
|
|||
|
key: "_getBuffer",
|
|||
|
value: function _getBuffer(n) {
|
|||
|
var ret = Buffer.allocUnsafe(n);
|
|||
|
var p = this.head;
|
|||
|
var c = 1;
|
|||
|
p.data.copy(ret);
|
|||
|
n -= p.data.length;
|
|||
|
while (p = p.next) {
|
|||
|
var buf = p.data;
|
|||
|
var nb = n > buf.length ? buf.length : n;
|
|||
|
buf.copy(ret, ret.length - n, 0, nb);
|
|||
|
n -= nb;
|
|||
|
if (n === 0) {
|
|||
|
if (nb === buf.length) {
|
|||
|
++c;
|
|||
|
if (p.next) this.head = p.next;else this.head = this.tail = null;
|
|||
|
} else {
|
|||
|
this.head = p;
|
|||
|
p.data = buf.slice(nb);
|
|||
|
}
|
|||
|
break;
|
|||
|
}
|
|||
|
++c;
|
|||
|
}
|
|||
|
this.length -= c;
|
|||
|
return ret;
|
|||
|
}
|
|||
|
|
|||
|
// Make sure the linked list only shows the minimal necessary information.
|
|||
|
}, {
|
|||
|
key: custom,
|
|||
|
value: function value(_, options) {
|
|||
|
return inspect(this, _objectSpread(_objectSpread({}, options), {}, {
|
|||
|
// Only inspect one level.
|
|||
|
depth: 0,
|
|||
|
// It should not recurse.
|
|||
|
customInspect: false
|
|||
|
}));
|
|||
|
}
|
|||
|
}]);
|
|||
|
return BufferList;
|
|||
|
}();
|
|||
|
return buffer_list;
|
|||
|
}
|
|||
|
|
|||
|
var destroy_1;
|
|||
|
var hasRequiredDestroy;
|
|||
|
|
|||
|
function requireDestroy () {
|
|||
|
if (hasRequiredDestroy) return destroy_1;
|
|||
|
hasRequiredDestroy = 1;
|
|||
|
|
|||
|
// undocumented cb() API, needed for core, not for public API
|
|||
|
function destroy(err, cb) {
|
|||
|
var _this = this;
|
|||
|
var readableDestroyed = this._readableState && this._readableState.destroyed;
|
|||
|
var writableDestroyed = this._writableState && this._writableState.destroyed;
|
|||
|
if (readableDestroyed || writableDestroyed) {
|
|||
|
if (cb) {
|
|||
|
cb(err);
|
|||
|
} else if (err) {
|
|||
|
if (!this._writableState) {
|
|||
|
process.nextTick(emitErrorNT, this, err);
|
|||
|
} else if (!this._writableState.errorEmitted) {
|
|||
|
this._writableState.errorEmitted = true;
|
|||
|
process.nextTick(emitErrorNT, this, err);
|
|||
|
}
|
|||
|
}
|
|||
|
return this;
|
|||
|
}
|
|||
|
|
|||
|
// we set destroyed to true before firing error callbacks in order
|
|||
|
// to make it re-entrance safe in case destroy() is called within callbacks
|
|||
|
|
|||
|
if (this._readableState) {
|
|||
|
this._readableState.destroyed = true;
|
|||
|
}
|
|||
|
|
|||
|
// if this is a duplex stream mark the writable part as destroyed as well
|
|||
|
if (this._writableState) {
|
|||
|
this._writableState.destroyed = true;
|
|||
|
}
|
|||
|
this._destroy(err || null, function (err) {
|
|||
|
if (!cb && err) {
|
|||
|
if (!_this._writableState) {
|
|||
|
process.nextTick(emitErrorAndCloseNT, _this, err);
|
|||
|
} else if (!_this._writableState.errorEmitted) {
|
|||
|
_this._writableState.errorEmitted = true;
|
|||
|
process.nextTick(emitErrorAndCloseNT, _this, err);
|
|||
|
} else {
|
|||
|
process.nextTick(emitCloseNT, _this);
|
|||
|
}
|
|||
|
} else if (cb) {
|
|||
|
process.nextTick(emitCloseNT, _this);
|
|||
|
cb(err);
|
|||
|
} else {
|
|||
|
process.nextTick(emitCloseNT, _this);
|
|||
|
}
|
|||
|
});
|
|||
|
return this;
|
|||
|
}
|
|||
|
function emitErrorAndCloseNT(self, err) {
|
|||
|
emitErrorNT(self, err);
|
|||
|
emitCloseNT(self);
|
|||
|
}
|
|||
|
function emitCloseNT(self) {
|
|||
|
if (self._writableState && !self._writableState.emitClose) return;
|
|||
|
if (self._readableState && !self._readableState.emitClose) return;
|
|||
|
self.emit('close');
|
|||
|
}
|
|||
|
function undestroy() {
|
|||
|
if (this._readableState) {
|
|||
|
this._readableState.destroyed = false;
|
|||
|
this._readableState.reading = false;
|
|||
|
this._readableState.ended = false;
|
|||
|
this._readableState.endEmitted = false;
|
|||
|
}
|
|||
|
if (this._writableState) {
|
|||
|
this._writableState.destroyed = false;
|
|||
|
this._writableState.ended = false;
|
|||
|
this._writableState.ending = false;
|
|||
|
this._writableState.finalCalled = false;
|
|||
|
this._writableState.prefinished = false;
|
|||
|
this._writableState.finished = false;
|
|||
|
this._writableState.errorEmitted = false;
|
|||
|
}
|
|||
|
}
|
|||
|
function emitErrorNT(self, err) {
|
|||
|
self.emit('error', err);
|
|||
|
}
|
|||
|
function errorOrDestroy(stream, err) {
|
|||
|
// We have tests that rely on errors being emitted
|
|||
|
// in the same tick, so changing this is semver major.
|
|||
|
// For now when you opt-in to autoDestroy we allow
|
|||
|
// the error to be emitted nextTick. In a future
|
|||
|
// semver major update we should change the default to this.
|
|||
|
|
|||
|
var rState = stream._readableState;
|
|||
|
var wState = stream._writableState;
|
|||
|
if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err);
|
|||
|
}
|
|||
|
destroy_1 = {
|
|||
|
destroy: destroy,
|
|||
|
undestroy: undestroy,
|
|||
|
errorOrDestroy: errorOrDestroy
|
|||
|
};
|
|||
|
return destroy_1;
|
|||
|
}
|
|||
|
|
|||
|
var errors = {};
|
|||
|
|
|||
|
var hasRequiredErrors;
|
|||
|
|
|||
|
function requireErrors () {
|
|||
|
if (hasRequiredErrors) return errors;
|
|||
|
hasRequiredErrors = 1;
|
|||
|
|
|||
|
const codes = {};
|
|||
|
|
|||
|
function createErrorType(code, message, Base) {
|
|||
|
if (!Base) {
|
|||
|
Base = Error;
|
|||
|
}
|
|||
|
|
|||
|
function getMessage (arg1, arg2, arg3) {
|
|||
|
if (typeof message === 'string') {
|
|||
|
return message
|
|||
|
} else {
|
|||
|
return message(arg1, arg2, arg3)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
class NodeError extends Base {
|
|||
|
constructor (arg1, arg2, arg3) {
|
|||
|
super(getMessage(arg1, arg2, arg3));
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
NodeError.prototype.name = Base.name;
|
|||
|
NodeError.prototype.code = code;
|
|||
|
|
|||
|
codes[code] = NodeError;
|
|||
|
}
|
|||
|
|
|||
|
// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js
|
|||
|
function oneOf(expected, thing) {
|
|||
|
if (Array.isArray(expected)) {
|
|||
|
const len = expected.length;
|
|||
|
expected = expected.map((i) => String(i));
|
|||
|
if (len > 2) {
|
|||
|
return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +
|
|||
|
expected[len - 1];
|
|||
|
} else if (len === 2) {
|
|||
|
return `one of ${thing} ${expected[0]} or ${expected[1]}`;
|
|||
|
} else {
|
|||
|
return `of ${thing} ${expected[0]}`;
|
|||
|
}
|
|||
|
} else {
|
|||
|
return `of ${thing} ${String(expected)}`;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
|
|||
|
function startsWith(str, search, pos) {
|
|||
|
return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search;
|
|||
|
}
|
|||
|
|
|||
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
|||
|
function endsWith(str, search, this_len) {
|
|||
|
if (this_len === undefined || this_len > str.length) {
|
|||
|
this_len = str.length;
|
|||
|
}
|
|||
|
return str.substring(this_len - search.length, this_len) === search;
|
|||
|
}
|
|||
|
|
|||
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes
|
|||
|
function includes(str, search, start) {
|
|||
|
if (typeof start !== 'number') {
|
|||
|
start = 0;
|
|||
|
}
|
|||
|
|
|||
|
if (start + search.length > str.length) {
|
|||
|
return false;
|
|||
|
} else {
|
|||
|
return str.indexOf(search, start) !== -1;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) {
|
|||
|
return 'The value "' + value + '" is invalid for option "' + name + '"'
|
|||
|
}, TypeError);
|
|||
|
createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) {
|
|||
|
// determiner: 'must be' or 'must not be'
|
|||
|
let determiner;
|
|||
|
if (typeof expected === 'string' && startsWith(expected, 'not ')) {
|
|||
|
determiner = 'must not be';
|
|||
|
expected = expected.replace(/^not /, '');
|
|||
|
} else {
|
|||
|
determiner = 'must be';
|
|||
|
}
|
|||
|
|
|||
|
let msg;
|
|||
|
if (endsWith(name, ' argument')) {
|
|||
|
// For cases like 'first argument'
|
|||
|
msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;
|
|||
|
} else {
|
|||
|
const type = includes(name, '.') ? 'property' : 'argument';
|
|||
|
msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`;
|
|||
|
}
|
|||
|
|
|||
|
msg += `. Received type ${typeof actual}`;
|
|||
|
return msg;
|
|||
|
}, TypeError);
|
|||
|
createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
|
|||
|
createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) {
|
|||
|
return 'The ' + name + ' method is not implemented'
|
|||
|
});
|
|||
|
createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close');
|
|||
|
createErrorType('ERR_STREAM_DESTROYED', function (name) {
|
|||
|
return 'Cannot call ' + name + ' after a stream was destroyed';
|
|||
|
});
|
|||
|
createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
|
|||
|
createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
|
|||
|
createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end');
|
|||
|
createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError);
|
|||
|
createErrorType('ERR_UNKNOWN_ENCODING', function (arg) {
|
|||
|
return 'Unknown encoding: ' + arg
|
|||
|
}, TypeError);
|
|||
|
createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
|
|||
|
|
|||
|
errors.codes = codes;
|
|||
|
return errors;
|
|||
|
}
|
|||
|
|
|||
|
var state;
|
|||
|
var hasRequiredState;
|
|||
|
|
|||
|
function requireState () {
|
|||
|
if (hasRequiredState) return state;
|
|||
|
hasRequiredState = 1;
|
|||
|
|
|||
|
var ERR_INVALID_OPT_VALUE = requireErrors().codes.ERR_INVALID_OPT_VALUE;
|
|||
|
function highWaterMarkFrom(options, isDuplex, duplexKey) {
|
|||
|
return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null;
|
|||
|
}
|
|||
|
function getHighWaterMark(state, options, duplexKey, isDuplex) {
|
|||
|
var hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
|
|||
|
if (hwm != null) {
|
|||
|
if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) {
|
|||
|
var name = isDuplex ? duplexKey : 'highWaterMark';
|
|||
|
throw new ERR_INVALID_OPT_VALUE(name, hwm);
|
|||
|
}
|
|||
|
return Math.floor(hwm);
|
|||
|
}
|
|||
|
|
|||
|
// Default value
|
|||
|
return state.objectMode ? 16 : 16 * 1024;
|
|||
|
}
|
|||
|
state = {
|
|||
|
getHighWaterMark: getHighWaterMark
|
|||
|
};
|
|||
|
return state;
|
|||
|
}
|
|||
|
|
|||
|
var inherits = {exports: {}};
|
|||
|
|
|||
|
var inherits_browser = {exports: {}};
|
|||
|
|
|||
|
var hasRequiredInherits_browser;
|
|||
|
|
|||
|
function requireInherits_browser () {
|
|||
|
if (hasRequiredInherits_browser) return inherits_browser.exports;
|
|||
|
hasRequiredInherits_browser = 1;
|
|||
|
if (typeof Object.create === 'function') {
|
|||
|
// implementation from standard node.js 'util' module
|
|||
|
inherits_browser.exports = function inherits(ctor, superCtor) {
|
|||
|
if (superCtor) {
|
|||
|
ctor.super_ = superCtor;
|
|||
|
ctor.prototype = Object.create(superCtor.prototype, {
|
|||
|
constructor: {
|
|||
|
value: ctor,
|
|||
|
enumerable: false,
|
|||
|
writable: true,
|
|||
|
configurable: true
|
|||
|
}
|
|||
|
});
|
|||
|
}
|
|||
|
};
|
|||
|
} else {
|
|||
|
// old school shim for old browsers
|
|||
|
inherits_browser.exports = function inherits(ctor, superCtor) {
|
|||
|
if (superCtor) {
|
|||
|
ctor.super_ = superCtor;
|
|||
|
var TempCtor = function () {};
|
|||
|
TempCtor.prototype = superCtor.prototype;
|
|||
|
ctor.prototype = new TempCtor();
|
|||
|
ctor.prototype.constructor = ctor;
|
|||
|
}
|
|||
|
};
|
|||
|
}
|
|||
|
return inherits_browser.exports;
|
|||
|
}
|
|||
|
|
|||
|
var hasRequiredInherits;
|
|||
|
|
|||
|
function requireInherits () {
|
|||
|
if (hasRequiredInherits) return inherits.exports;
|
|||
|
hasRequiredInherits = 1;
|
|||
|
try {
|
|||
|
var util = require('util');
|
|||
|
/* istanbul ignore next */
|
|||
|
if (typeof util.inherits !== 'function') throw '';
|
|||
|
inherits.exports = util.inherits;
|
|||
|
} catch (e) {
|
|||
|
/* istanbul ignore next */
|
|||
|
inherits.exports = requireInherits_browser();
|
|||
|
}
|
|||
|
return inherits.exports;
|
|||
|
}
|
|||
|
|
|||
|
var node;
|
|||
|
var hasRequiredNode;
|
|||
|
|
|||
|
function requireNode () {
|
|||
|
if (hasRequiredNode) return node;
|
|||
|
hasRequiredNode = 1;
|
|||
|
/**
|
|||
|
* For Node.js, simply re-export the core `util.deprecate` function.
|
|||
|
*/
|
|||
|
|
|||
|
node = require$$2$1.deprecate;
|
|||
|
return node;
|
|||
|
}
|
|||
|
|
|||
|
var _stream_writable;
|
|||
|
var hasRequired_stream_writable;
|
|||
|
|
|||
|
function require_stream_writable () {
|
|||
|
if (hasRequired_stream_writable) return _stream_writable;
|
|||
|
hasRequired_stream_writable = 1;
|
|||
|
|
|||
|
_stream_writable = Writable;
|
|||
|
|
|||
|
// It seems a linked list but it is not
|
|||
|
// there will be only 2 of these for each stream
|
|||
|
function CorkedRequest(state) {
|
|||
|
var _this = this;
|
|||
|
this.next = null;
|
|||
|
this.entry = null;
|
|||
|
this.finish = function () {
|
|||
|
onCorkedFinish(_this, state);
|
|||
|
};
|
|||
|
}
|
|||
|
/* </replacement> */
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
var Duplex;
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
Writable.WritableState = WritableState;
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
var internalUtil = {
|
|||
|
deprecate: requireNode()
|
|||
|
};
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
var Stream = requireStream();
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
var Buffer = require$$0$6.Buffer;
|
|||
|
var OurUint8Array = (typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
|
|||
|
function _uint8ArrayToBuffer(chunk) {
|
|||
|
return Buffer.from(chunk);
|
|||
|
}
|
|||
|
function _isUint8Array(obj) {
|
|||
|
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
|||
|
}
|
|||
|
var destroyImpl = requireDestroy();
|
|||
|
var _require = requireState(),
|
|||
|
getHighWaterMark = _require.getHighWaterMark;
|
|||
|
var _require$codes = requireErrors().codes,
|
|||
|
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
|
|||
|
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
|||
|
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
|||
|
ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
|
|||
|
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
|
|||
|
ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
|
|||
|
ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
|
|||
|
ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
|
|||
|
var errorOrDestroy = destroyImpl.errorOrDestroy;
|
|||
|
requireInherits()(Writable, Stream);
|
|||
|
function nop() {}
|
|||
|
function WritableState(options, stream, isDuplex) {
|
|||
|
Duplex = Duplex || require_stream_duplex();
|
|||
|
options = options || {};
|
|||
|
|
|||
|
// Duplex streams are both readable and writable, but share
|
|||
|
// the same options object.
|
|||
|
// However, some cases require setting options to different
|
|||
|
// values for the readable and the writable sides of the duplex stream,
|
|||
|
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
|||
|
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;
|
|||
|
|
|||
|
// object stream flag to indicate whether or not this stream
|
|||
|
// contains buffers or objects.
|
|||
|
this.objectMode = !!options.objectMode;
|
|||
|
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
|
|||
|
|
|||
|
// the point at which write() starts returning false
|
|||
|
// Note: 0 is a valid value, means that we always return false if
|
|||
|
// the entire buffer is not flushed immediately on write()
|
|||
|
this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex);
|
|||
|
|
|||
|
// if _final has been called
|
|||
|
this.finalCalled = false;
|
|||
|
|
|||
|
// drain event flag.
|
|||
|
this.needDrain = false;
|
|||
|
// at the start of calling end()
|
|||
|
this.ending = false;
|
|||
|
// when end() has been called, and returned
|
|||
|
this.ended = false;
|
|||
|
// when 'finish' is emitted
|
|||
|
this.finished = false;
|
|||
|
|
|||
|
// has it been destroyed
|
|||
|
this.destroyed = false;
|
|||
|
|
|||
|
// should we decode strings into buffers before passing to _write?
|
|||
|
// this is here so that some node-core streams can optimize string
|
|||
|
// handling at a lower level.
|
|||
|
var noDecode = options.decodeStrings === false;
|
|||
|
this.decodeStrings = !noDecode;
|
|||
|
|
|||
|
// Crypto is kind of old and crusty. Historically, its default string
|
|||
|
// encoding is 'binary' so we have to make this configurable.
|
|||
|
// Everything else in the universe uses 'utf8', though.
|
|||
|
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
|||
|
|
|||
|
// not an actual buffer we keep track of, but a measurement
|
|||
|
// of how much we're waiting to get pushed to some underlying
|
|||
|
// socket or file.
|
|||
|
this.length = 0;
|
|||
|
|
|||
|
// a flag to see when we're in the middle of a write.
|
|||
|
this.writing = false;
|
|||
|
|
|||
|
// when true all writes will be buffered until .uncork() call
|
|||
|
this.corked = 0;
|
|||
|
|
|||
|
// a flag to be able to tell if the onwrite cb is called immediately,
|
|||
|
// or on a later tick. We set this to true at first, because any
|
|||
|
// actions that shouldn't happen until "later" should generally also
|
|||
|
// not happen before the first write call.
|
|||
|
this.sync = true;
|
|||
|
|
|||
|
// a flag to know if we're processing previously buffered items, which
|
|||
|
// may call the _write() callback in the same tick, so that we don't
|
|||
|
// end up in an overlapped onwrite situation.
|
|||
|
this.bufferProcessing = false;
|
|||
|
|
|||
|
// the callback that's passed to _write(chunk,cb)
|
|||
|
this.onwrite = function (er) {
|
|||
|
onwrite(stream, er);
|
|||
|
};
|
|||
|
|
|||
|
// the callback that the user supplies to write(chunk,encoding,cb)
|
|||
|
this.writecb = null;
|
|||
|
|
|||
|
// the amount that is being written when _write is called.
|
|||
|
this.writelen = 0;
|
|||
|
this.bufferedRequest = null;
|
|||
|
this.lastBufferedRequest = null;
|
|||
|
|
|||
|
// number of pending user-supplied write callbacks
|
|||
|
// this must be 0 before 'finish' can be emitted
|
|||
|
this.pendingcb = 0;
|
|||
|
|
|||
|
// emit prefinish if the only thing we're waiting for is _write cbs
|
|||
|
// This is relevant for synchronous Transform streams
|
|||
|
this.prefinished = false;
|
|||
|
|
|||
|
// True if the error was already emitted and should not be thrown again
|
|||
|
this.errorEmitted = false;
|
|||
|
|
|||
|
// Should close be emitted on destroy. Defaults to true.
|
|||
|
this.emitClose = options.emitClose !== false;
|
|||
|
|
|||
|
// Should .destroy() be called after 'finish' (and potentially 'end')
|
|||
|
this.autoDestroy = !!options.autoDestroy;
|
|||
|
|
|||
|
// count buffered requests
|
|||
|
this.bufferedRequestCount = 0;
|
|||
|
|
|||
|
// allocate the first CorkedRequest, there is always
|
|||
|
// one allocated and free to use, and we maintain at most two
|
|||
|
this.corkedRequestsFree = new CorkedRequest(this);
|
|||
|
}
|
|||
|
WritableState.prototype.getBuffer = function getBuffer() {
|
|||
|
var current = this.bufferedRequest;
|
|||
|
var out = [];
|
|||
|
while (current) {
|
|||
|
out.push(current);
|
|||
|
current = current.next;
|
|||
|
}
|
|||
|
return out;
|
|||
|
};
|
|||
|
(function () {
|
|||
|
try {
|
|||
|
Object.defineProperty(WritableState.prototype, 'buffer', {
|
|||
|
get: internalUtil.deprecate(function writableStateBufferGetter() {
|
|||
|
return this.getBuffer();
|
|||
|
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
|||
|
});
|
|||
|
} catch (_) {}
|
|||
|
})();
|
|||
|
|
|||
|
// Test _writableState for inheritance to account for Duplex streams,
|
|||
|
// whose prototype chain only points to Readable.
|
|||
|
var realHasInstance;
|
|||
|
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
|||
|
realHasInstance = Function.prototype[Symbol.hasInstance];
|
|||
|
Object.defineProperty(Writable, Symbol.hasInstance, {
|
|||
|
value: function value(object) {
|
|||
|
if (realHasInstance.call(this, object)) return true;
|
|||
|
if (this !== Writable) return false;
|
|||
|
return object && object._writableState instanceof WritableState;
|
|||
|
}
|
|||
|
});
|
|||
|
} else {
|
|||
|
realHasInstance = function realHasInstance(object) {
|
|||
|
return object instanceof this;
|
|||
|
};
|
|||
|
}
|
|||
|
function Writable(options) {
|
|||
|
Duplex = Duplex || require_stream_duplex();
|
|||
|
|
|||
|
// Writable ctor is applied to Duplexes, too.
|
|||
|
// `realHasInstance` is necessary because using plain `instanceof`
|
|||
|
// would return false, as no `_writableState` property is attached.
|
|||
|
|
|||
|
// Trying to use the custom `instanceof` for Writable here will also break the
|
|||
|
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
|||
|
// `_writableState` that would lead to infinite recursion.
|
|||
|
|
|||
|
// Checking for a Stream.Duplex instance is faster here instead of inside
|
|||
|
// the WritableState constructor, at least with V8 6.5
|
|||
|
var isDuplex = this instanceof Duplex;
|
|||
|
if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
|
|||
|
this._writableState = new WritableState(options, this, isDuplex);
|
|||
|
|
|||
|
// legacy.
|
|||
|
this.writable = true;
|
|||
|
if (options) {
|
|||
|
if (typeof options.write === 'function') this._write = options.write;
|
|||
|
if (typeof options.writev === 'function') this._writev = options.writev;
|
|||
|
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
|||
|
if (typeof options.final === 'function') this._final = options.final;
|
|||
|
}
|
|||
|
Stream.call(this);
|
|||
|
}
|
|||
|
|
|||
|
// Otherwise people can pipe Writable streams, which is just wrong.
|
|||
|
Writable.prototype.pipe = function () {
|
|||
|
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
|
|||
|
};
|
|||
|
function writeAfterEnd(stream, cb) {
|
|||
|
var er = new ERR_STREAM_WRITE_AFTER_END();
|
|||
|
// TODO: defer error events consistently everywhere, not just the cb
|
|||
|
errorOrDestroy(stream, er);
|
|||
|
process.nextTick(cb, er);
|
|||
|
}
|
|||
|
|
|||
|
// Checks that a user-supplied chunk is valid, especially for the particular
|
|||
|
// mode the stream is in. Currently this means that `null` is never accepted
|
|||
|
// and undefined/non-string values are only allowed in object mode.
|
|||
|
function validChunk(stream, state, chunk, cb) {
|
|||
|
var er;
|
|||
|
if (chunk === null) {
|
|||
|
er = new ERR_STREAM_NULL_VALUES();
|
|||
|
} else if (typeof chunk !== 'string' && !state.objectMode) {
|
|||
|
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
|
|||
|
}
|
|||
|
if (er) {
|
|||
|
errorOrDestroy(stream, er);
|
|||
|
process.nextTick(cb, er);
|
|||
|
return false;
|
|||
|
}
|
|||
|
return true;
|
|||
|
}
|
|||
|
Writable.prototype.write = function (chunk, encoding, cb) {
|
|||
|
var state = this._writableState;
|
|||
|
var ret = false;
|
|||
|
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
|||
|
if (isBuf && !Buffer.isBuffer(chunk)) {
|
|||
|
chunk = _uint8ArrayToBuffer(chunk);
|
|||
|
}
|
|||
|
if (typeof encoding === 'function') {
|
|||
|
cb = encoding;
|
|||
|
encoding = null;
|
|||
|
}
|
|||
|
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
|||
|
if (typeof cb !== 'function') cb = nop;
|
|||
|
if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
|||
|
state.pendingcb++;
|
|||
|
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
|||
|
}
|
|||
|
return ret;
|
|||
|
};
|
|||
|
Writable.prototype.cork = function () {
|
|||
|
this._writableState.corked++;
|
|||
|
};
|
|||
|
Writable.prototype.uncork = function () {
|
|||
|
var state = this._writableState;
|
|||
|
if (state.corked) {
|
|||
|
state.corked--;
|
|||
|
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
|||
|
}
|
|||
|
};
|
|||
|
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
|||
|
// node::ParseEncoding() requires lower case.
|
|||
|
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
|||
|
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
|
|||
|
this._writableState.defaultEncoding = encoding;
|
|||
|
return this;
|
|||
|
};
|
|||
|
Object.defineProperty(Writable.prototype, 'writableBuffer', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._writableState && this._writableState.getBuffer();
|
|||
|
}
|
|||
|
});
|
|||
|
function decodeChunk(state, chunk, encoding) {
|
|||
|
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
|||
|
chunk = Buffer.from(chunk, encoding);
|
|||
|
}
|
|||
|
return chunk;
|
|||
|
}
|
|||
|
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._writableState.highWaterMark;
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
// if we're already writing something, then just put this
|
|||
|
// in the queue, and wait our turn. Otherwise, call _write
|
|||
|
// If we return false, then we need a drain event, so set that flag.
|
|||
|
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
|||
|
if (!isBuf) {
|
|||
|
var newChunk = decodeChunk(state, chunk, encoding);
|
|||
|
if (chunk !== newChunk) {
|
|||
|
isBuf = true;
|
|||
|
encoding = 'buffer';
|
|||
|
chunk = newChunk;
|
|||
|
}
|
|||
|
}
|
|||
|
var len = state.objectMode ? 1 : chunk.length;
|
|||
|
state.length += len;
|
|||
|
var ret = state.length < state.highWaterMark;
|
|||
|
// we must ensure that previous needDrain will not be reset to false.
|
|||
|
if (!ret) state.needDrain = true;
|
|||
|
if (state.writing || state.corked) {
|
|||
|
var last = state.lastBufferedRequest;
|
|||
|
state.lastBufferedRequest = {
|
|||
|
chunk: chunk,
|
|||
|
encoding: encoding,
|
|||
|
isBuf: isBuf,
|
|||
|
callback: cb,
|
|||
|
next: null
|
|||
|
};
|
|||
|
if (last) {
|
|||
|
last.next = state.lastBufferedRequest;
|
|||
|
} else {
|
|||
|
state.bufferedRequest = state.lastBufferedRequest;
|
|||
|
}
|
|||
|
state.bufferedRequestCount += 1;
|
|||
|
} else {
|
|||
|
doWrite(stream, state, false, len, chunk, encoding, cb);
|
|||
|
}
|
|||
|
return ret;
|
|||
|
}
|
|||
|
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
|||
|
state.writelen = len;
|
|||
|
state.writecb = cb;
|
|||
|
state.writing = true;
|
|||
|
state.sync = true;
|
|||
|
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
|||
|
state.sync = false;
|
|||
|
}
|
|||
|
function onwriteError(stream, state, sync, er, cb) {
|
|||
|
--state.pendingcb;
|
|||
|
if (sync) {
|
|||
|
// defer the callback if we are being called synchronously
|
|||
|
// to avoid piling up things on the stack
|
|||
|
process.nextTick(cb, er);
|
|||
|
// this can emit finish, and it will always happen
|
|||
|
// after error
|
|||
|
process.nextTick(finishMaybe, stream, state);
|
|||
|
stream._writableState.errorEmitted = true;
|
|||
|
errorOrDestroy(stream, er);
|
|||
|
} else {
|
|||
|
// the caller expect this to happen before if
|
|||
|
// it is async
|
|||
|
cb(er);
|
|||
|
stream._writableState.errorEmitted = true;
|
|||
|
errorOrDestroy(stream, er);
|
|||
|
// this can emit finish, but finish must
|
|||
|
// always follow error
|
|||
|
finishMaybe(stream, state);
|
|||
|
}
|
|||
|
}
|
|||
|
function onwriteStateUpdate(state) {
|
|||
|
state.writing = false;
|
|||
|
state.writecb = null;
|
|||
|
state.length -= state.writelen;
|
|||
|
state.writelen = 0;
|
|||
|
}
|
|||
|
function onwrite(stream, er) {
|
|||
|
var state = stream._writableState;
|
|||
|
var sync = state.sync;
|
|||
|
var cb = state.writecb;
|
|||
|
if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
|
|||
|
onwriteStateUpdate(state);
|
|||
|
if (er) onwriteError(stream, state, sync, er, cb);else {
|
|||
|
// Check if we're actually ready to finish, but don't emit yet
|
|||
|
var finished = needFinish(state) || stream.destroyed;
|
|||
|
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
|||
|
clearBuffer(stream, state);
|
|||
|
}
|
|||
|
if (sync) {
|
|||
|
process.nextTick(afterWrite, stream, state, finished, cb);
|
|||
|
} else {
|
|||
|
afterWrite(stream, state, finished, cb);
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
function afterWrite(stream, state, finished, cb) {
|
|||
|
if (!finished) onwriteDrain(stream, state);
|
|||
|
state.pendingcb--;
|
|||
|
cb();
|
|||
|
finishMaybe(stream, state);
|
|||
|
}
|
|||
|
|
|||
|
// Must force callback to be called on nextTick, so that we don't
|
|||
|
// emit 'drain' before the write() consumer gets the 'false' return
|
|||
|
// value, and has a chance to attach a 'drain' listener.
|
|||
|
function onwriteDrain(stream, state) {
|
|||
|
if (state.length === 0 && state.needDrain) {
|
|||
|
state.needDrain = false;
|
|||
|
stream.emit('drain');
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// if there's something in the buffer waiting, then process it
|
|||
|
function clearBuffer(stream, state) {
|
|||
|
state.bufferProcessing = true;
|
|||
|
var entry = state.bufferedRequest;
|
|||
|
if (stream._writev && entry && entry.next) {
|
|||
|
// Fast case, write everything using _writev()
|
|||
|
var l = state.bufferedRequestCount;
|
|||
|
var buffer = new Array(l);
|
|||
|
var holder = state.corkedRequestsFree;
|
|||
|
holder.entry = entry;
|
|||
|
var count = 0;
|
|||
|
var allBuffers = true;
|
|||
|
while (entry) {
|
|||
|
buffer[count] = entry;
|
|||
|
if (!entry.isBuf) allBuffers = false;
|
|||
|
entry = entry.next;
|
|||
|
count += 1;
|
|||
|
}
|
|||
|
buffer.allBuffers = allBuffers;
|
|||
|
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
|
|||
|
|
|||
|
// doWrite is almost always async, defer these to save a bit of time
|
|||
|
// as the hot path ends with doWrite
|
|||
|
state.pendingcb++;
|
|||
|
state.lastBufferedRequest = null;
|
|||
|
if (holder.next) {
|
|||
|
state.corkedRequestsFree = holder.next;
|
|||
|
holder.next = null;
|
|||
|
} else {
|
|||
|
state.corkedRequestsFree = new CorkedRequest(state);
|
|||
|
}
|
|||
|
state.bufferedRequestCount = 0;
|
|||
|
} else {
|
|||
|
// Slow case, write chunks one-by-one
|
|||
|
while (entry) {
|
|||
|
var chunk = entry.chunk;
|
|||
|
var encoding = entry.encoding;
|
|||
|
var cb = entry.callback;
|
|||
|
var len = state.objectMode ? 1 : chunk.length;
|
|||
|
doWrite(stream, state, false, len, chunk, encoding, cb);
|
|||
|
entry = entry.next;
|
|||
|
state.bufferedRequestCount--;
|
|||
|
// if we didn't call the onwrite immediately, then
|
|||
|
// it means that we need to wait until it does.
|
|||
|
// also, that means that the chunk and cb are currently
|
|||
|
// being processed, so move the buffer counter past them.
|
|||
|
if (state.writing) {
|
|||
|
break;
|
|||
|
}
|
|||
|
}
|
|||
|
if (entry === null) state.lastBufferedRequest = null;
|
|||
|
}
|
|||
|
state.bufferedRequest = entry;
|
|||
|
state.bufferProcessing = false;
|
|||
|
}
|
|||
|
Writable.prototype._write = function (chunk, encoding, cb) {
|
|||
|
cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
|
|||
|
};
|
|||
|
Writable.prototype._writev = null;
|
|||
|
Writable.prototype.end = function (chunk, encoding, cb) {
|
|||
|
var state = this._writableState;
|
|||
|
if (typeof chunk === 'function') {
|
|||
|
cb = chunk;
|
|||
|
chunk = null;
|
|||
|
encoding = null;
|
|||
|
} else if (typeof encoding === 'function') {
|
|||
|
cb = encoding;
|
|||
|
encoding = null;
|
|||
|
}
|
|||
|
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
|
|||
|
|
|||
|
// .end() fully uncorks
|
|||
|
if (state.corked) {
|
|||
|
state.corked = 1;
|
|||
|
this.uncork();
|
|||
|
}
|
|||
|
|
|||
|
// ignore unnecessary end() calls.
|
|||
|
if (!state.ending) endWritable(this, state, cb);
|
|||
|
return this;
|
|||
|
};
|
|||
|
Object.defineProperty(Writable.prototype, 'writableLength', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._writableState.length;
|
|||
|
}
|
|||
|
});
|
|||
|
function needFinish(state) {
|
|||
|
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
|||
|
}
|
|||
|
function callFinal(stream, state) {
|
|||
|
stream._final(function (err) {
|
|||
|
state.pendingcb--;
|
|||
|
if (err) {
|
|||
|
errorOrDestroy(stream, err);
|
|||
|
}
|
|||
|
state.prefinished = true;
|
|||
|
stream.emit('prefinish');
|
|||
|
finishMaybe(stream, state);
|
|||
|
});
|
|||
|
}
|
|||
|
function prefinish(stream, state) {
|
|||
|
if (!state.prefinished && !state.finalCalled) {
|
|||
|
if (typeof stream._final === 'function' && !state.destroyed) {
|
|||
|
state.pendingcb++;
|
|||
|
state.finalCalled = true;
|
|||
|
process.nextTick(callFinal, stream, state);
|
|||
|
} else {
|
|||
|
state.prefinished = true;
|
|||
|
stream.emit('prefinish');
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
function finishMaybe(stream, state) {
|
|||
|
var need = needFinish(state);
|
|||
|
if (need) {
|
|||
|
prefinish(stream, state);
|
|||
|
if (state.pendingcb === 0) {
|
|||
|
state.finished = true;
|
|||
|
stream.emit('finish');
|
|||
|
if (state.autoDestroy) {
|
|||
|
// In case of duplex streams we need a way to detect
|
|||
|
// if the readable side is ready for autoDestroy as well
|
|||
|
var rState = stream._readableState;
|
|||
|
if (!rState || rState.autoDestroy && rState.endEmitted) {
|
|||
|
stream.destroy();
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
return need;
|
|||
|
}
|
|||
|
function endWritable(stream, state, cb) {
|
|||
|
state.ending = true;
|
|||
|
finishMaybe(stream, state);
|
|||
|
if (cb) {
|
|||
|
if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
|
|||
|
}
|
|||
|
state.ended = true;
|
|||
|
stream.writable = false;
|
|||
|
}
|
|||
|
function onCorkedFinish(corkReq, state, err) {
|
|||
|
var entry = corkReq.entry;
|
|||
|
corkReq.entry = null;
|
|||
|
while (entry) {
|
|||
|
var cb = entry.callback;
|
|||
|
state.pendingcb--;
|
|||
|
cb(err);
|
|||
|
entry = entry.next;
|
|||
|
}
|
|||
|
|
|||
|
// reuse the free corkReq.
|
|||
|
state.corkedRequestsFree.next = corkReq;
|
|||
|
}
|
|||
|
Object.defineProperty(Writable.prototype, 'destroyed', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
if (this._writableState === undefined) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
return this._writableState.destroyed;
|
|||
|
},
|
|||
|
set: function set(value) {
|
|||
|
// we ignore the value if the stream
|
|||
|
// has not been initialized yet
|
|||
|
if (!this._writableState) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// backward compatibility, the user is explicitly
|
|||
|
// managing destroyed
|
|||
|
this._writableState.destroyed = value;
|
|||
|
}
|
|||
|
});
|
|||
|
Writable.prototype.destroy = destroyImpl.destroy;
|
|||
|
Writable.prototype._undestroy = destroyImpl.undestroy;
|
|||
|
Writable.prototype._destroy = function (err, cb) {
|
|||
|
cb(err);
|
|||
|
};
|
|||
|
return _stream_writable;
|
|||
|
}
|
|||
|
|
|||
|
var _stream_duplex;
|
|||
|
var hasRequired_stream_duplex;
|
|||
|
|
|||
|
function require_stream_duplex () {
|
|||
|
if (hasRequired_stream_duplex) return _stream_duplex;
|
|||
|
hasRequired_stream_duplex = 1;
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
var objectKeys = Object.keys || function (obj) {
|
|||
|
var keys = [];
|
|||
|
for (var key in obj) keys.push(key);
|
|||
|
return keys;
|
|||
|
};
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
_stream_duplex = Duplex;
|
|||
|
var Readable = require_stream_readable();
|
|||
|
var Writable = require_stream_writable();
|
|||
|
requireInherits()(Duplex, Readable);
|
|||
|
{
|
|||
|
// Allow the keys array to be GC'ed.
|
|||
|
var keys = objectKeys(Writable.prototype);
|
|||
|
for (var v = 0; v < keys.length; v++) {
|
|||
|
var method = keys[v];
|
|||
|
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
|||
|
}
|
|||
|
}
|
|||
|
function Duplex(options) {
|
|||
|
if (!(this instanceof Duplex)) return new Duplex(options);
|
|||
|
Readable.call(this, options);
|
|||
|
Writable.call(this, options);
|
|||
|
this.allowHalfOpen = true;
|
|||
|
if (options) {
|
|||
|
if (options.readable === false) this.readable = false;
|
|||
|
if (options.writable === false) this.writable = false;
|
|||
|
if (options.allowHalfOpen === false) {
|
|||
|
this.allowHalfOpen = false;
|
|||
|
this.once('end', onend);
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._writableState.highWaterMark;
|
|||
|
}
|
|||
|
});
|
|||
|
Object.defineProperty(Duplex.prototype, 'writableBuffer', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._writableState && this._writableState.getBuffer();
|
|||
|
}
|
|||
|
});
|
|||
|
Object.defineProperty(Duplex.prototype, 'writableLength', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._writableState.length;
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
// the no-half-open enforcer
|
|||
|
function onend() {
|
|||
|
// If the writable side ended, then we're ok.
|
|||
|
if (this._writableState.ended) return;
|
|||
|
|
|||
|
// no more data can be written.
|
|||
|
// But allow more writes to happen in this tick.
|
|||
|
process.nextTick(onEndNT, this);
|
|||
|
}
|
|||
|
function onEndNT(self) {
|
|||
|
self.end();
|
|||
|
}
|
|||
|
Object.defineProperty(Duplex.prototype, 'destroyed', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
if (this._readableState === undefined || this._writableState === undefined) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
return this._readableState.destroyed && this._writableState.destroyed;
|
|||
|
},
|
|||
|
set: function set(value) {
|
|||
|
// we ignore the value if the stream
|
|||
|
// has not been initialized yet
|
|||
|
if (this._readableState === undefined || this._writableState === undefined) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// backward compatibility, the user is explicitly
|
|||
|
// managing destroyed
|
|||
|
this._readableState.destroyed = value;
|
|||
|
this._writableState.destroyed = value;
|
|||
|
}
|
|||
|
});
|
|||
|
return _stream_duplex;
|
|||
|
}
|
|||
|
|
|||
|
var string_decoder = {};
|
|||
|
|
|||
|
var safeBuffer = {exports: {}};
|
|||
|
|
|||
|
/*! safe-buffer. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
|
|||
|
|
|||
|
var hasRequiredSafeBuffer;
|
|||
|
|
|||
|
function requireSafeBuffer () {
|
|||
|
if (hasRequiredSafeBuffer) return safeBuffer.exports;
|
|||
|
hasRequiredSafeBuffer = 1;
|
|||
|
(function (module, exports) {
|
|||
|
/* eslint-disable node/no-deprecated-api */
|
|||
|
var buffer = require$$0$6;
|
|||
|
var Buffer = buffer.Buffer;
|
|||
|
|
|||
|
// alternative to using Object.keys for old browsers
|
|||
|
function copyProps (src, dst) {
|
|||
|
for (var key in src) {
|
|||
|
dst[key] = src[key];
|
|||
|
}
|
|||
|
}
|
|||
|
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
|
|||
|
module.exports = buffer;
|
|||
|
} else {
|
|||
|
// Copy properties from require('buffer')
|
|||
|
copyProps(buffer, exports);
|
|||
|
exports.Buffer = SafeBuffer;
|
|||
|
}
|
|||
|
|
|||
|
function SafeBuffer (arg, encodingOrOffset, length) {
|
|||
|
return Buffer(arg, encodingOrOffset, length)
|
|||
|
}
|
|||
|
|
|||
|
SafeBuffer.prototype = Object.create(Buffer.prototype);
|
|||
|
|
|||
|
// Copy static methods from Buffer
|
|||
|
copyProps(Buffer, SafeBuffer);
|
|||
|
|
|||
|
SafeBuffer.from = function (arg, encodingOrOffset, length) {
|
|||
|
if (typeof arg === 'number') {
|
|||
|
throw new TypeError('Argument must not be a number')
|
|||
|
}
|
|||
|
return Buffer(arg, encodingOrOffset, length)
|
|||
|
};
|
|||
|
|
|||
|
SafeBuffer.alloc = function (size, fill, encoding) {
|
|||
|
if (typeof size !== 'number') {
|
|||
|
throw new TypeError('Argument must be a number')
|
|||
|
}
|
|||
|
var buf = Buffer(size);
|
|||
|
if (fill !== undefined) {
|
|||
|
if (typeof encoding === 'string') {
|
|||
|
buf.fill(fill, encoding);
|
|||
|
} else {
|
|||
|
buf.fill(fill);
|
|||
|
}
|
|||
|
} else {
|
|||
|
buf.fill(0);
|
|||
|
}
|
|||
|
return buf
|
|||
|
};
|
|||
|
|
|||
|
SafeBuffer.allocUnsafe = function (size) {
|
|||
|
if (typeof size !== 'number') {
|
|||
|
throw new TypeError('Argument must be a number')
|
|||
|
}
|
|||
|
return Buffer(size)
|
|||
|
};
|
|||
|
|
|||
|
SafeBuffer.allocUnsafeSlow = function (size) {
|
|||
|
if (typeof size !== 'number') {
|
|||
|
throw new TypeError('Argument must be a number')
|
|||
|
}
|
|||
|
return buffer.SlowBuffer(size)
|
|||
|
};
|
|||
|
} (safeBuffer, safeBuffer.exports));
|
|||
|
return safeBuffer.exports;
|
|||
|
}
|
|||
|
|
|||
|
var hasRequiredString_decoder;
|
|||
|
|
|||
|
function requireString_decoder () {
|
|||
|
if (hasRequiredString_decoder) return string_decoder;
|
|||
|
hasRequiredString_decoder = 1;
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
|
|||
|
var Buffer = requireSafeBuffer().Buffer;
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
var isEncoding = Buffer.isEncoding || function (encoding) {
|
|||
|
encoding = '' + encoding;
|
|||
|
switch (encoding && encoding.toLowerCase()) {
|
|||
|
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
|
|||
|
return true;
|
|||
|
default:
|
|||
|
return false;
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
function _normalizeEncoding(enc) {
|
|||
|
if (!enc) return 'utf8';
|
|||
|
var retried;
|
|||
|
while (true) {
|
|||
|
switch (enc) {
|
|||
|
case 'utf8':
|
|||
|
case 'utf-8':
|
|||
|
return 'utf8';
|
|||
|
case 'ucs2':
|
|||
|
case 'ucs-2':
|
|||
|
case 'utf16le':
|
|||
|
case 'utf-16le':
|
|||
|
return 'utf16le';
|
|||
|
case 'latin1':
|
|||
|
case 'binary':
|
|||
|
return 'latin1';
|
|||
|
case 'base64':
|
|||
|
case 'ascii':
|
|||
|
case 'hex':
|
|||
|
return enc;
|
|||
|
default:
|
|||
|
if (retried) return; // undefined
|
|||
|
enc = ('' + enc).toLowerCase();
|
|||
|
retried = true;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
// Do not cache `Buffer.isEncoding` when checking encoding names as some
|
|||
|
// modules monkey-patch it to support additional encodings
|
|||
|
function normalizeEncoding(enc) {
|
|||
|
var nenc = _normalizeEncoding(enc);
|
|||
|
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
|
|||
|
return nenc || enc;
|
|||
|
}
|
|||
|
|
|||
|
// StringDecoder provides an interface for efficiently splitting a series of
|
|||
|
// buffers into a series of JS strings without breaking apart multi-byte
|
|||
|
// characters.
|
|||
|
string_decoder.StringDecoder = StringDecoder;
|
|||
|
function StringDecoder(encoding) {
|
|||
|
this.encoding = normalizeEncoding(encoding);
|
|||
|
var nb;
|
|||
|
switch (this.encoding) {
|
|||
|
case 'utf16le':
|
|||
|
this.text = utf16Text;
|
|||
|
this.end = utf16End;
|
|||
|
nb = 4;
|
|||
|
break;
|
|||
|
case 'utf8':
|
|||
|
this.fillLast = utf8FillLast;
|
|||
|
nb = 4;
|
|||
|
break;
|
|||
|
case 'base64':
|
|||
|
this.text = base64Text;
|
|||
|
this.end = base64End;
|
|||
|
nb = 3;
|
|||
|
break;
|
|||
|
default:
|
|||
|
this.write = simpleWrite;
|
|||
|
this.end = simpleEnd;
|
|||
|
return;
|
|||
|
}
|
|||
|
this.lastNeed = 0;
|
|||
|
this.lastTotal = 0;
|
|||
|
this.lastChar = Buffer.allocUnsafe(nb);
|
|||
|
}
|
|||
|
|
|||
|
StringDecoder.prototype.write = function (buf) {
|
|||
|
if (buf.length === 0) return '';
|
|||
|
var r;
|
|||
|
var i;
|
|||
|
if (this.lastNeed) {
|
|||
|
r = this.fillLast(buf);
|
|||
|
if (r === undefined) return '';
|
|||
|
i = this.lastNeed;
|
|||
|
this.lastNeed = 0;
|
|||
|
} else {
|
|||
|
i = 0;
|
|||
|
}
|
|||
|
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
|
|||
|
return r || '';
|
|||
|
};
|
|||
|
|
|||
|
StringDecoder.prototype.end = utf8End;
|
|||
|
|
|||
|
// Returns only complete characters in a Buffer
|
|||
|
StringDecoder.prototype.text = utf8Text;
|
|||
|
|
|||
|
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
|
|||
|
StringDecoder.prototype.fillLast = function (buf) {
|
|||
|
if (this.lastNeed <= buf.length) {
|
|||
|
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
|
|||
|
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
|||
|
}
|
|||
|
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
|
|||
|
this.lastNeed -= buf.length;
|
|||
|
};
|
|||
|
|
|||
|
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
|
|||
|
// continuation byte. If an invalid byte is detected, -2 is returned.
|
|||
|
function utf8CheckByte(byte) {
|
|||
|
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
|
|||
|
return byte >> 6 === 0x02 ? -1 : -2;
|
|||
|
}
|
|||
|
|
|||
|
// Checks at most 3 bytes at the end of a Buffer in order to detect an
|
|||
|
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
|
|||
|
// needed to complete the UTF-8 character (if applicable) are returned.
|
|||
|
function utf8CheckIncomplete(self, buf, i) {
|
|||
|
var j = buf.length - 1;
|
|||
|
if (j < i) return 0;
|
|||
|
var nb = utf8CheckByte(buf[j]);
|
|||
|
if (nb >= 0) {
|
|||
|
if (nb > 0) self.lastNeed = nb - 1;
|
|||
|
return nb;
|
|||
|
}
|
|||
|
if (--j < i || nb === -2) return 0;
|
|||
|
nb = utf8CheckByte(buf[j]);
|
|||
|
if (nb >= 0) {
|
|||
|
if (nb > 0) self.lastNeed = nb - 2;
|
|||
|
return nb;
|
|||
|
}
|
|||
|
if (--j < i || nb === -2) return 0;
|
|||
|
nb = utf8CheckByte(buf[j]);
|
|||
|
if (nb >= 0) {
|
|||
|
if (nb > 0) {
|
|||
|
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
|
|||
|
}
|
|||
|
return nb;
|
|||
|
}
|
|||
|
return 0;
|
|||
|
}
|
|||
|
|
|||
|
// Validates as many continuation bytes for a multi-byte UTF-8 character as
|
|||
|
// needed or are available. If we see a non-continuation byte where we expect
|
|||
|
// one, we "replace" the validated continuation bytes we've seen so far with
|
|||
|
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
|
|||
|
// behavior. The continuation byte check is included three times in the case
|
|||
|
// where all of the continuation bytes for a character exist in the same buffer.
|
|||
|
// It is also done this way as a slight performance increase instead of using a
|
|||
|
// loop.
|
|||
|
function utf8CheckExtraBytes(self, buf, p) {
|
|||
|
if ((buf[0] & 0xC0) !== 0x80) {
|
|||
|
self.lastNeed = 0;
|
|||
|
return '\ufffd';
|
|||
|
}
|
|||
|
if (self.lastNeed > 1 && buf.length > 1) {
|
|||
|
if ((buf[1] & 0xC0) !== 0x80) {
|
|||
|
self.lastNeed = 1;
|
|||
|
return '\ufffd';
|
|||
|
}
|
|||
|
if (self.lastNeed > 2 && buf.length > 2) {
|
|||
|
if ((buf[2] & 0xC0) !== 0x80) {
|
|||
|
self.lastNeed = 2;
|
|||
|
return '\ufffd';
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
|
|||
|
function utf8FillLast(buf) {
|
|||
|
var p = this.lastTotal - this.lastNeed;
|
|||
|
var r = utf8CheckExtraBytes(this, buf);
|
|||
|
if (r !== undefined) return r;
|
|||
|
if (this.lastNeed <= buf.length) {
|
|||
|
buf.copy(this.lastChar, p, 0, this.lastNeed);
|
|||
|
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
|||
|
}
|
|||
|
buf.copy(this.lastChar, p, 0, buf.length);
|
|||
|
this.lastNeed -= buf.length;
|
|||
|
}
|
|||
|
|
|||
|
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
|
|||
|
// partial character, the character's bytes are buffered until the required
|
|||
|
// number of bytes are available.
|
|||
|
function utf8Text(buf, i) {
|
|||
|
var total = utf8CheckIncomplete(this, buf, i);
|
|||
|
if (!this.lastNeed) return buf.toString('utf8', i);
|
|||
|
this.lastTotal = total;
|
|||
|
var end = buf.length - (total - this.lastNeed);
|
|||
|
buf.copy(this.lastChar, 0, end);
|
|||
|
return buf.toString('utf8', i, end);
|
|||
|
}
|
|||
|
|
|||
|
// For UTF-8, a replacement character is added when ending on a partial
|
|||
|
// character.
|
|||
|
function utf8End(buf) {
|
|||
|
var r = buf && buf.length ? this.write(buf) : '';
|
|||
|
if (this.lastNeed) return r + '\ufffd';
|
|||
|
return r;
|
|||
|
}
|
|||
|
|
|||
|
// UTF-16LE typically needs two bytes per character, but even if we have an even
|
|||
|
// number of bytes available, we need to check if we end on a leading/high
|
|||
|
// surrogate. In that case, we need to wait for the next two bytes in order to
|
|||
|
// decode the last character properly.
|
|||
|
function utf16Text(buf, i) {
|
|||
|
if ((buf.length - i) % 2 === 0) {
|
|||
|
var r = buf.toString('utf16le', i);
|
|||
|
if (r) {
|
|||
|
var c = r.charCodeAt(r.length - 1);
|
|||
|
if (c >= 0xD800 && c <= 0xDBFF) {
|
|||
|
this.lastNeed = 2;
|
|||
|
this.lastTotal = 4;
|
|||
|
this.lastChar[0] = buf[buf.length - 2];
|
|||
|
this.lastChar[1] = buf[buf.length - 1];
|
|||
|
return r.slice(0, -1);
|
|||
|
}
|
|||
|
}
|
|||
|
return r;
|
|||
|
}
|
|||
|
this.lastNeed = 1;
|
|||
|
this.lastTotal = 2;
|
|||
|
this.lastChar[0] = buf[buf.length - 1];
|
|||
|
return buf.toString('utf16le', i, buf.length - 1);
|
|||
|
}
|
|||
|
|
|||
|
// For UTF-16LE we do not explicitly append special replacement characters if we
|
|||
|
// end on a partial character, we simply let v8 handle that.
|
|||
|
function utf16End(buf) {
|
|||
|
var r = buf && buf.length ? this.write(buf) : '';
|
|||
|
if (this.lastNeed) {
|
|||
|
var end = this.lastTotal - this.lastNeed;
|
|||
|
return r + this.lastChar.toString('utf16le', 0, end);
|
|||
|
}
|
|||
|
return r;
|
|||
|
}
|
|||
|
|
|||
|
function base64Text(buf, i) {
|
|||
|
var n = (buf.length - i) % 3;
|
|||
|
if (n === 0) return buf.toString('base64', i);
|
|||
|
this.lastNeed = 3 - n;
|
|||
|
this.lastTotal = 3;
|
|||
|
if (n === 1) {
|
|||
|
this.lastChar[0] = buf[buf.length - 1];
|
|||
|
} else {
|
|||
|
this.lastChar[0] = buf[buf.length - 2];
|
|||
|
this.lastChar[1] = buf[buf.length - 1];
|
|||
|
}
|
|||
|
return buf.toString('base64', i, buf.length - n);
|
|||
|
}
|
|||
|
|
|||
|
function base64End(buf) {
|
|||
|
var r = buf && buf.length ? this.write(buf) : '';
|
|||
|
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
|
|||
|
return r;
|
|||
|
}
|
|||
|
|
|||
|
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
|
|||
|
function simpleWrite(buf) {
|
|||
|
return buf.toString(this.encoding);
|
|||
|
}
|
|||
|
|
|||
|
function simpleEnd(buf) {
|
|||
|
return buf && buf.length ? this.write(buf) : '';
|
|||
|
}
|
|||
|
return string_decoder;
|
|||
|
}
|
|||
|
|
|||
|
var endOfStream;
|
|||
|
var hasRequiredEndOfStream;
|
|||
|
|
|||
|
function requireEndOfStream () {
|
|||
|
if (hasRequiredEndOfStream) return endOfStream;
|
|||
|
hasRequiredEndOfStream = 1;
|
|||
|
|
|||
|
var ERR_STREAM_PREMATURE_CLOSE = requireErrors().codes.ERR_STREAM_PREMATURE_CLOSE;
|
|||
|
function once(callback) {
|
|||
|
var called = false;
|
|||
|
return function () {
|
|||
|
if (called) return;
|
|||
|
called = true;
|
|||
|
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
|||
|
args[_key] = arguments[_key];
|
|||
|
}
|
|||
|
callback.apply(this, args);
|
|||
|
};
|
|||
|
}
|
|||
|
function noop() {}
|
|||
|
function isRequest(stream) {
|
|||
|
return stream.setHeader && typeof stream.abort === 'function';
|
|||
|
}
|
|||
|
function eos(stream, opts, callback) {
|
|||
|
if (typeof opts === 'function') return eos(stream, null, opts);
|
|||
|
if (!opts) opts = {};
|
|||
|
callback = once(callback || noop);
|
|||
|
var readable = opts.readable || opts.readable !== false && stream.readable;
|
|||
|
var writable = opts.writable || opts.writable !== false && stream.writable;
|
|||
|
var onlegacyfinish = function onlegacyfinish() {
|
|||
|
if (!stream.writable) onfinish();
|
|||
|
};
|
|||
|
var writableEnded = stream._writableState && stream._writableState.finished;
|
|||
|
var onfinish = function onfinish() {
|
|||
|
writable = false;
|
|||
|
writableEnded = true;
|
|||
|
if (!readable) callback.call(stream);
|
|||
|
};
|
|||
|
var readableEnded = stream._readableState && stream._readableState.endEmitted;
|
|||
|
var onend = function onend() {
|
|||
|
readable = false;
|
|||
|
readableEnded = true;
|
|||
|
if (!writable) callback.call(stream);
|
|||
|
};
|
|||
|
var onerror = function onerror(err) {
|
|||
|
callback.call(stream, err);
|
|||
|
};
|
|||
|
var onclose = function onclose() {
|
|||
|
var err;
|
|||
|
if (readable && !readableEnded) {
|
|||
|
if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
|||
|
return callback.call(stream, err);
|
|||
|
}
|
|||
|
if (writable && !writableEnded) {
|
|||
|
if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE();
|
|||
|
return callback.call(stream, err);
|
|||
|
}
|
|||
|
};
|
|||
|
var onrequest = function onrequest() {
|
|||
|
stream.req.on('finish', onfinish);
|
|||
|
};
|
|||
|
if (isRequest(stream)) {
|
|||
|
stream.on('complete', onfinish);
|
|||
|
stream.on('abort', onclose);
|
|||
|
if (stream.req) onrequest();else stream.on('request', onrequest);
|
|||
|
} else if (writable && !stream._writableState) {
|
|||
|
// legacy streams
|
|||
|
stream.on('end', onlegacyfinish);
|
|||
|
stream.on('close', onlegacyfinish);
|
|||
|
}
|
|||
|
stream.on('end', onend);
|
|||
|
stream.on('finish', onfinish);
|
|||
|
if (opts.error !== false) stream.on('error', onerror);
|
|||
|
stream.on('close', onclose);
|
|||
|
return function () {
|
|||
|
stream.removeListener('complete', onfinish);
|
|||
|
stream.removeListener('abort', onclose);
|
|||
|
stream.removeListener('request', onrequest);
|
|||
|
if (stream.req) stream.req.removeListener('finish', onfinish);
|
|||
|
stream.removeListener('end', onlegacyfinish);
|
|||
|
stream.removeListener('close', onlegacyfinish);
|
|||
|
stream.removeListener('finish', onfinish);
|
|||
|
stream.removeListener('end', onend);
|
|||
|
stream.removeListener('error', onerror);
|
|||
|
stream.removeListener('close', onclose);
|
|||
|
};
|
|||
|
}
|
|||
|
endOfStream = eos;
|
|||
|
return endOfStream;
|
|||
|
}
|
|||
|
|
|||
|
var async_iterator;
|
|||
|
var hasRequiredAsync_iterator;
|
|||
|
|
|||
|
function requireAsync_iterator () {
|
|||
|
if (hasRequiredAsync_iterator) return async_iterator;
|
|||
|
hasRequiredAsync_iterator = 1;
|
|||
|
|
|||
|
var _Object$setPrototypeO;
|
|||
|
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
|||
|
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
|
|||
|
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
|
|||
|
var finished = requireEndOfStream();
|
|||
|
var kLastResolve = Symbol('lastResolve');
|
|||
|
var kLastReject = Symbol('lastReject');
|
|||
|
var kError = Symbol('error');
|
|||
|
var kEnded = Symbol('ended');
|
|||
|
var kLastPromise = Symbol('lastPromise');
|
|||
|
var kHandlePromise = Symbol('handlePromise');
|
|||
|
var kStream = Symbol('stream');
|
|||
|
function createIterResult(value, done) {
|
|||
|
return {
|
|||
|
value: value,
|
|||
|
done: done
|
|||
|
};
|
|||
|
}
|
|||
|
function readAndResolve(iter) {
|
|||
|
var resolve = iter[kLastResolve];
|
|||
|
if (resolve !== null) {
|
|||
|
var data = iter[kStream].read();
|
|||
|
// we defer if data is null
|
|||
|
// we can be expecting either 'end' or
|
|||
|
// 'error'
|
|||
|
if (data !== null) {
|
|||
|
iter[kLastPromise] = null;
|
|||
|
iter[kLastResolve] = null;
|
|||
|
iter[kLastReject] = null;
|
|||
|
resolve(createIterResult(data, false));
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
function onReadable(iter) {
|
|||
|
// we wait for the next tick, because it might
|
|||
|
// emit an error with process.nextTick
|
|||
|
process.nextTick(readAndResolve, iter);
|
|||
|
}
|
|||
|
function wrapForNext(lastPromise, iter) {
|
|||
|
return function (resolve, reject) {
|
|||
|
lastPromise.then(function () {
|
|||
|
if (iter[kEnded]) {
|
|||
|
resolve(createIterResult(undefined, true));
|
|||
|
return;
|
|||
|
}
|
|||
|
iter[kHandlePromise](resolve, reject);
|
|||
|
}, reject);
|
|||
|
};
|
|||
|
}
|
|||
|
var AsyncIteratorPrototype = Object.getPrototypeOf(function () {});
|
|||
|
var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = {
|
|||
|
get stream() {
|
|||
|
return this[kStream];
|
|||
|
},
|
|||
|
next: function next() {
|
|||
|
var _this = this;
|
|||
|
// if we have detected an error in the meanwhile
|
|||
|
// reject straight away
|
|||
|
var error = this[kError];
|
|||
|
if (error !== null) {
|
|||
|
return Promise.reject(error);
|
|||
|
}
|
|||
|
if (this[kEnded]) {
|
|||
|
return Promise.resolve(createIterResult(undefined, true));
|
|||
|
}
|
|||
|
if (this[kStream].destroyed) {
|
|||
|
// We need to defer via nextTick because if .destroy(err) is
|
|||
|
// called, the error will be emitted via nextTick, and
|
|||
|
// we cannot guarantee that there is no error lingering around
|
|||
|
// waiting to be emitted.
|
|||
|
return new Promise(function (resolve, reject) {
|
|||
|
process.nextTick(function () {
|
|||
|
if (_this[kError]) {
|
|||
|
reject(_this[kError]);
|
|||
|
} else {
|
|||
|
resolve(createIterResult(undefined, true));
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
// if we have multiple next() calls
|
|||
|
// we will wait for the previous Promise to finish
|
|||
|
// this logic is optimized to support for await loops,
|
|||
|
// where next() is only called once at a time
|
|||
|
var lastPromise = this[kLastPromise];
|
|||
|
var promise;
|
|||
|
if (lastPromise) {
|
|||
|
promise = new Promise(wrapForNext(lastPromise, this));
|
|||
|
} else {
|
|||
|
// fast path needed to support multiple this.push()
|
|||
|
// without triggering the next() queue
|
|||
|
var data = this[kStream].read();
|
|||
|
if (data !== null) {
|
|||
|
return Promise.resolve(createIterResult(data, false));
|
|||
|
}
|
|||
|
promise = new Promise(this[kHandlePromise]);
|
|||
|
}
|
|||
|
this[kLastPromise] = promise;
|
|||
|
return promise;
|
|||
|
}
|
|||
|
}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () {
|
|||
|
return this;
|
|||
|
}), _defineProperty(_Object$setPrototypeO, "return", function _return() {
|
|||
|
var _this2 = this;
|
|||
|
// destroy(err, cb) is a private API
|
|||
|
// we can guarantee we have that here, because we control the
|
|||
|
// Readable class this is attached to
|
|||
|
return new Promise(function (resolve, reject) {
|
|||
|
_this2[kStream].destroy(null, function (err) {
|
|||
|
if (err) {
|
|||
|
reject(err);
|
|||
|
return;
|
|||
|
}
|
|||
|
resolve(createIterResult(undefined, true));
|
|||
|
});
|
|||
|
});
|
|||
|
}), _Object$setPrototypeO), AsyncIteratorPrototype);
|
|||
|
var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) {
|
|||
|
var _Object$create;
|
|||
|
var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, {
|
|||
|
value: stream,
|
|||
|
writable: true
|
|||
|
}), _defineProperty(_Object$create, kLastResolve, {
|
|||
|
value: null,
|
|||
|
writable: true
|
|||
|
}), _defineProperty(_Object$create, kLastReject, {
|
|||
|
value: null,
|
|||
|
writable: true
|
|||
|
}), _defineProperty(_Object$create, kError, {
|
|||
|
value: null,
|
|||
|
writable: true
|
|||
|
}), _defineProperty(_Object$create, kEnded, {
|
|||
|
value: stream._readableState.endEmitted,
|
|||
|
writable: true
|
|||
|
}), _defineProperty(_Object$create, kHandlePromise, {
|
|||
|
value: function value(resolve, reject) {
|
|||
|
var data = iterator[kStream].read();
|
|||
|
if (data) {
|
|||
|
iterator[kLastPromise] = null;
|
|||
|
iterator[kLastResolve] = null;
|
|||
|
iterator[kLastReject] = null;
|
|||
|
resolve(createIterResult(data, false));
|
|||
|
} else {
|
|||
|
iterator[kLastResolve] = resolve;
|
|||
|
iterator[kLastReject] = reject;
|
|||
|
}
|
|||
|
},
|
|||
|
writable: true
|
|||
|
}), _Object$create));
|
|||
|
iterator[kLastPromise] = null;
|
|||
|
finished(stream, function (err) {
|
|||
|
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
|||
|
var reject = iterator[kLastReject];
|
|||
|
// reject if we are waiting for data in the Promise
|
|||
|
// returned by next() and store the error
|
|||
|
if (reject !== null) {
|
|||
|
iterator[kLastPromise] = null;
|
|||
|
iterator[kLastResolve] = null;
|
|||
|
iterator[kLastReject] = null;
|
|||
|
reject(err);
|
|||
|
}
|
|||
|
iterator[kError] = err;
|
|||
|
return;
|
|||
|
}
|
|||
|
var resolve = iterator[kLastResolve];
|
|||
|
if (resolve !== null) {
|
|||
|
iterator[kLastPromise] = null;
|
|||
|
iterator[kLastResolve] = null;
|
|||
|
iterator[kLastReject] = null;
|
|||
|
resolve(createIterResult(undefined, true));
|
|||
|
}
|
|||
|
iterator[kEnded] = true;
|
|||
|
});
|
|||
|
stream.on('readable', onReadable.bind(null, iterator));
|
|||
|
return iterator;
|
|||
|
};
|
|||
|
async_iterator = createReadableStreamAsyncIterator;
|
|||
|
return async_iterator;
|
|||
|
}
|
|||
|
|
|||
|
var from_1;
|
|||
|
var hasRequiredFrom;
|
|||
|
|
|||
|
function requireFrom () {
|
|||
|
if (hasRequiredFrom) return from_1;
|
|||
|
hasRequiredFrom = 1;
|
|||
|
|
|||
|
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
|||
|
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
|||
|
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
|||
|
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
|||
|
function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
|||
|
function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); }
|
|||
|
function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); }
|
|||
|
var ERR_INVALID_ARG_TYPE = requireErrors().codes.ERR_INVALID_ARG_TYPE;
|
|||
|
function from(Readable, iterable, opts) {
|
|||
|
var iterator;
|
|||
|
if (iterable && typeof iterable.next === 'function') {
|
|||
|
iterator = iterable;
|
|||
|
} else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable);
|
|||
|
var readable = new Readable(_objectSpread({
|
|||
|
objectMode: true
|
|||
|
}, opts));
|
|||
|
// Reading boolean to protect against _read
|
|||
|
// being called before last iteration completion.
|
|||
|
var reading = false;
|
|||
|
readable._read = function () {
|
|||
|
if (!reading) {
|
|||
|
reading = true;
|
|||
|
next();
|
|||
|
}
|
|||
|
};
|
|||
|
function next() {
|
|||
|
return _next2.apply(this, arguments);
|
|||
|
}
|
|||
|
function _next2() {
|
|||
|
_next2 = _asyncToGenerator(function* () {
|
|||
|
try {
|
|||
|
var _yield$iterator$next = yield iterator.next(),
|
|||
|
value = _yield$iterator$next.value,
|
|||
|
done = _yield$iterator$next.done;
|
|||
|
if (done) {
|
|||
|
readable.push(null);
|
|||
|
} else if (readable.push(yield value)) {
|
|||
|
next();
|
|||
|
} else {
|
|||
|
reading = false;
|
|||
|
}
|
|||
|
} catch (err) {
|
|||
|
readable.destroy(err);
|
|||
|
}
|
|||
|
});
|
|||
|
return _next2.apply(this, arguments);
|
|||
|
}
|
|||
|
return readable;
|
|||
|
}
|
|||
|
from_1 = from;
|
|||
|
return from_1;
|
|||
|
}
|
|||
|
|
|||
|
var _stream_readable;
|
|||
|
var hasRequired_stream_readable;
|
|||
|
|
|||
|
function require_stream_readable () {
|
|||
|
if (hasRequired_stream_readable) return _stream_readable;
|
|||
|
hasRequired_stream_readable = 1;
|
|||
|
|
|||
|
_stream_readable = Readable;
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
var Duplex;
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
Readable.ReadableState = ReadableState;
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
require$$2.EventEmitter;
|
|||
|
var EElistenerCount = function EElistenerCount(emitter, type) {
|
|||
|
return emitter.listeners(type).length;
|
|||
|
};
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
var Stream = requireStream();
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
var Buffer = require$$0$6.Buffer;
|
|||
|
var OurUint8Array = (typeof commonjsGlobal !== 'undefined' ? commonjsGlobal : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
|
|||
|
function _uint8ArrayToBuffer(chunk) {
|
|||
|
return Buffer.from(chunk);
|
|||
|
}
|
|||
|
function _isUint8Array(obj) {
|
|||
|
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
|||
|
}
|
|||
|
|
|||
|
/*<replacement>*/
|
|||
|
var debugUtil = require$$2$1;
|
|||
|
var debug;
|
|||
|
if (debugUtil && debugUtil.debuglog) {
|
|||
|
debug = debugUtil.debuglog('stream');
|
|||
|
} else {
|
|||
|
debug = function debug() {};
|
|||
|
}
|
|||
|
/*</replacement>*/
|
|||
|
|
|||
|
var BufferList = requireBuffer_list();
|
|||
|
var destroyImpl = requireDestroy();
|
|||
|
var _require = requireState(),
|
|||
|
getHighWaterMark = _require.getHighWaterMark;
|
|||
|
var _require$codes = requireErrors().codes,
|
|||
|
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
|
|||
|
ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF,
|
|||
|
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
|||
|
ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT;
|
|||
|
|
|||
|
// Lazy loaded to improve the startup performance.
|
|||
|
var StringDecoder;
|
|||
|
var createReadableStreamAsyncIterator;
|
|||
|
var from;
|
|||
|
requireInherits()(Readable, Stream);
|
|||
|
var errorOrDestroy = destroyImpl.errorOrDestroy;
|
|||
|
var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
|
|||
|
function prependListener(emitter, event, fn) {
|
|||
|
// Sadly this is not cacheable as some libraries bundle their own
|
|||
|
// event emitter implementation with them.
|
|||
|
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
|
|||
|
|
|||
|
// This is a hack to make sure that our error handler is attached before any
|
|||
|
// userland ones. NEVER DO THIS. This is here only because this code needs
|
|||
|
// to continue to work with older versions of Node.js that do not include
|
|||
|
// the prependListener() method. The goal is to eventually remove this hack.
|
|||
|
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
|
|||
|
}
|
|||
|
function ReadableState(options, stream, isDuplex) {
|
|||
|
Duplex = Duplex || require_stream_duplex();
|
|||
|
options = options || {};
|
|||
|
|
|||
|
// Duplex streams are both readable and writable, but share
|
|||
|
// the same options object.
|
|||
|
// However, some cases require setting options to different
|
|||
|
// values for the readable and the writable sides of the duplex stream.
|
|||
|
// These options can be provided separately as readableXXX and writableXXX.
|
|||
|
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex;
|
|||
|
|
|||
|
// object stream flag. Used to make read(n) ignore n and to
|
|||
|
// make all the buffer merging and length checks go away
|
|||
|
this.objectMode = !!options.objectMode;
|
|||
|
if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
|
|||
|
|
|||
|
// the point at which it stops calling _read() to fill the buffer
|
|||
|
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
|||
|
this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex);
|
|||
|
|
|||
|
// A linked list is used to store data chunks instead of an array because the
|
|||
|
// linked list can remove elements from the beginning faster than
|
|||
|
// array.shift()
|
|||
|
this.buffer = new BufferList();
|
|||
|
this.length = 0;
|
|||
|
this.pipes = null;
|
|||
|
this.pipesCount = 0;
|
|||
|
this.flowing = null;
|
|||
|
this.ended = false;
|
|||
|
this.endEmitted = false;
|
|||
|
this.reading = false;
|
|||
|
|
|||
|
// a flag to be able to tell if the event 'readable'/'data' is emitted
|
|||
|
// immediately, or on a later tick. We set this to true at first, because
|
|||
|
// any actions that shouldn't happen until "later" should generally also
|
|||
|
// not happen before the first read call.
|
|||
|
this.sync = true;
|
|||
|
|
|||
|
// whenever we return null, then we set a flag to say
|
|||
|
// that we're awaiting a 'readable' event emission.
|
|||
|
this.needReadable = false;
|
|||
|
this.emittedReadable = false;
|
|||
|
this.readableListening = false;
|
|||
|
this.resumeScheduled = false;
|
|||
|
this.paused = true;
|
|||
|
|
|||
|
// Should close be emitted on destroy. Defaults to true.
|
|||
|
this.emitClose = options.emitClose !== false;
|
|||
|
|
|||
|
// Should .destroy() be called after 'end' (and potentially 'finish')
|
|||
|
this.autoDestroy = !!options.autoDestroy;
|
|||
|
|
|||
|
// has it been destroyed
|
|||
|
this.destroyed = false;
|
|||
|
|
|||
|
// Crypto is kind of old and crusty. Historically, its default string
|
|||
|
// encoding is 'binary' so we have to make this configurable.
|
|||
|
// Everything else in the universe uses 'utf8', though.
|
|||
|
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
|||
|
|
|||
|
// the number of writers that are awaiting a drain event in .pipe()s
|
|||
|
this.awaitDrain = 0;
|
|||
|
|
|||
|
// if true, a maybeReadMore has been scheduled
|
|||
|
this.readingMore = false;
|
|||
|
this.decoder = null;
|
|||
|
this.encoding = null;
|
|||
|
if (options.encoding) {
|
|||
|
if (!StringDecoder) StringDecoder = requireString_decoder().StringDecoder;
|
|||
|
this.decoder = new StringDecoder(options.encoding);
|
|||
|
this.encoding = options.encoding;
|
|||
|
}
|
|||
|
}
|
|||
|
function Readable(options) {
|
|||
|
Duplex = Duplex || require_stream_duplex();
|
|||
|
if (!(this instanceof Readable)) return new Readable(options);
|
|||
|
|
|||
|
// Checking for a Stream.Duplex instance is faster here instead of inside
|
|||
|
// the ReadableState constructor, at least with V8 6.5
|
|||
|
var isDuplex = this instanceof Duplex;
|
|||
|
this._readableState = new ReadableState(options, this, isDuplex);
|
|||
|
|
|||
|
// legacy
|
|||
|
this.readable = true;
|
|||
|
if (options) {
|
|||
|
if (typeof options.read === 'function') this._read = options.read;
|
|||
|
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
|||
|
}
|
|||
|
Stream.call(this);
|
|||
|
}
|
|||
|
Object.defineProperty(Readable.prototype, 'destroyed', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
if (this._readableState === undefined) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
return this._readableState.destroyed;
|
|||
|
},
|
|||
|
set: function set(value) {
|
|||
|
// we ignore the value if the stream
|
|||
|
// has not been initialized yet
|
|||
|
if (!this._readableState) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// backward compatibility, the user is explicitly
|
|||
|
// managing destroyed
|
|||
|
this._readableState.destroyed = value;
|
|||
|
}
|
|||
|
});
|
|||
|
Readable.prototype.destroy = destroyImpl.destroy;
|
|||
|
Readable.prototype._undestroy = destroyImpl.undestroy;
|
|||
|
Readable.prototype._destroy = function (err, cb) {
|
|||
|
cb(err);
|
|||
|
};
|
|||
|
|
|||
|
// Manually shove something into the read() buffer.
|
|||
|
// This returns true if the highWaterMark has not been hit yet,
|
|||
|
// similar to how Writable.write() returns true if you should
|
|||
|
// write() some more.
|
|||
|
Readable.prototype.push = function (chunk, encoding) {
|
|||
|
var state = this._readableState;
|
|||
|
var skipChunkCheck;
|
|||
|
if (!state.objectMode) {
|
|||
|
if (typeof chunk === 'string') {
|
|||
|
encoding = encoding || state.defaultEncoding;
|
|||
|
if (encoding !== state.encoding) {
|
|||
|
chunk = Buffer.from(chunk, encoding);
|
|||
|
encoding = '';
|
|||
|
}
|
|||
|
skipChunkCheck = true;
|
|||
|
}
|
|||
|
} else {
|
|||
|
skipChunkCheck = true;
|
|||
|
}
|
|||
|
return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
|
|||
|
};
|
|||
|
|
|||
|
// Unshift should *always* be something directly out of read()
|
|||
|
Readable.prototype.unshift = function (chunk) {
|
|||
|
return readableAddChunk(this, chunk, null, true, false);
|
|||
|
};
|
|||
|
function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
|
|||
|
debug('readableAddChunk', chunk);
|
|||
|
var state = stream._readableState;
|
|||
|
if (chunk === null) {
|
|||
|
state.reading = false;
|
|||
|
onEofChunk(stream, state);
|
|||
|
} else {
|
|||
|
var er;
|
|||
|
if (!skipChunkCheck) er = chunkInvalid(state, chunk);
|
|||
|
if (er) {
|
|||
|
errorOrDestroy(stream, er);
|
|||
|
} else if (state.objectMode || chunk && chunk.length > 0) {
|
|||
|
if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
|
|||
|
chunk = _uint8ArrayToBuffer(chunk);
|
|||
|
}
|
|||
|
if (addToFront) {
|
|||
|
if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true);
|
|||
|
} else if (state.ended) {
|
|||
|
errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF());
|
|||
|
} else if (state.destroyed) {
|
|||
|
return false;
|
|||
|
} else {
|
|||
|
state.reading = false;
|
|||
|
if (state.decoder && !encoding) {
|
|||
|
chunk = state.decoder.write(chunk);
|
|||
|
if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
|
|||
|
} else {
|
|||
|
addChunk(stream, state, chunk, false);
|
|||
|
}
|
|||
|
}
|
|||
|
} else if (!addToFront) {
|
|||
|
state.reading = false;
|
|||
|
maybeReadMore(stream, state);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// We can push more data if we are below the highWaterMark.
|
|||
|
// Also, if we have no data yet, we can stand some more bytes.
|
|||
|
// This is to work around cases where hwm=0, such as the repl.
|
|||
|
return !state.ended && (state.length < state.highWaterMark || state.length === 0);
|
|||
|
}
|
|||
|
function addChunk(stream, state, chunk, addToFront) {
|
|||
|
if (state.flowing && state.length === 0 && !state.sync) {
|
|||
|
state.awaitDrain = 0;
|
|||
|
stream.emit('data', chunk);
|
|||
|
} else {
|
|||
|
// update the buffer info.
|
|||
|
state.length += state.objectMode ? 1 : chunk.length;
|
|||
|
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
|
|||
|
if (state.needReadable) emitReadable(stream);
|
|||
|
}
|
|||
|
maybeReadMore(stream, state);
|
|||
|
}
|
|||
|
function chunkInvalid(state, chunk) {
|
|||
|
var er;
|
|||
|
if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
|||
|
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk);
|
|||
|
}
|
|||
|
return er;
|
|||
|
}
|
|||
|
Readable.prototype.isPaused = function () {
|
|||
|
return this._readableState.flowing === false;
|
|||
|
};
|
|||
|
|
|||
|
// backwards compatibility.
|
|||
|
Readable.prototype.setEncoding = function (enc) {
|
|||
|
if (!StringDecoder) StringDecoder = requireString_decoder().StringDecoder;
|
|||
|
var decoder = new StringDecoder(enc);
|
|||
|
this._readableState.decoder = decoder;
|
|||
|
// If setEncoding(null), decoder.encoding equals utf8
|
|||
|
this._readableState.encoding = this._readableState.decoder.encoding;
|
|||
|
|
|||
|
// Iterate over current buffer to convert already stored Buffers:
|
|||
|
var p = this._readableState.buffer.head;
|
|||
|
var content = '';
|
|||
|
while (p !== null) {
|
|||
|
content += decoder.write(p.data);
|
|||
|
p = p.next;
|
|||
|
}
|
|||
|
this._readableState.buffer.clear();
|
|||
|
if (content !== '') this._readableState.buffer.push(content);
|
|||
|
this._readableState.length = content.length;
|
|||
|
return this;
|
|||
|
};
|
|||
|
|
|||
|
// Don't raise the hwm > 1GB
|
|||
|
var MAX_HWM = 0x40000000;
|
|||
|
function computeNewHighWaterMark(n) {
|
|||
|
if (n >= MAX_HWM) {
|
|||
|
// TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE.
|
|||
|
n = MAX_HWM;
|
|||
|
} else {
|
|||
|
// Get the next highest power of 2 to prevent increasing hwm excessively in
|
|||
|
// tiny amounts
|
|||
|
n--;
|
|||
|
n |= n >>> 1;
|
|||
|
n |= n >>> 2;
|
|||
|
n |= n >>> 4;
|
|||
|
n |= n >>> 8;
|
|||
|
n |= n >>> 16;
|
|||
|
n++;
|
|||
|
}
|
|||
|
return n;
|
|||
|
}
|
|||
|
|
|||
|
// This function is designed to be inlinable, so please take care when making
|
|||
|
// changes to the function body.
|
|||
|
function howMuchToRead(n, state) {
|
|||
|
if (n <= 0 || state.length === 0 && state.ended) return 0;
|
|||
|
if (state.objectMode) return 1;
|
|||
|
if (n !== n) {
|
|||
|
// Only flow one buffer at a time
|
|||
|
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
|
|||
|
}
|
|||
|
// If we're asking for more than the current hwm, then raise the hwm.
|
|||
|
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
|
|||
|
if (n <= state.length) return n;
|
|||
|
// Don't have enough
|
|||
|
if (!state.ended) {
|
|||
|
state.needReadable = true;
|
|||
|
return 0;
|
|||
|
}
|
|||
|
return state.length;
|
|||
|
}
|
|||
|
|
|||
|
// you can override either this method, or the async _read(n) below.
|
|||
|
Readable.prototype.read = function (n) {
|
|||
|
debug('read', n);
|
|||
|
n = parseInt(n, 10);
|
|||
|
var state = this._readableState;
|
|||
|
var nOrig = n;
|
|||
|
if (n !== 0) state.emittedReadable = false;
|
|||
|
|
|||
|
// if we're doing read(0) to trigger a readable event, but we
|
|||
|
// already have a bunch of data in the buffer, then just trigger
|
|||
|
// the 'readable' event and move on.
|
|||
|
if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) {
|
|||
|
debug('read: emitReadable', state.length, state.ended);
|
|||
|
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
|
|||
|
return null;
|
|||
|
}
|
|||
|
n = howMuchToRead(n, state);
|
|||
|
|
|||
|
// if we've ended, and we're now clear, then finish it up.
|
|||
|
if (n === 0 && state.ended) {
|
|||
|
if (state.length === 0) endReadable(this);
|
|||
|
return null;
|
|||
|
}
|
|||
|
|
|||
|
// All the actual chunk generation logic needs to be
|
|||
|
// *below* the call to _read. The reason is that in certain
|
|||
|
// synthetic stream cases, such as passthrough streams, _read
|
|||
|
// may be a completely synchronous operation which may change
|
|||
|
// the state of the read buffer, providing enough data when
|
|||
|
// before there was *not* enough.
|
|||
|
//
|
|||
|
// So, the steps are:
|
|||
|
// 1. Figure out what the state of things will be after we do
|
|||
|
// a read from the buffer.
|
|||
|
//
|
|||
|
// 2. If that resulting state will trigger a _read, then call _read.
|
|||
|
// Note that this may be asynchronous, or synchronous. Yes, it is
|
|||
|
// deeply ugly to write APIs this way, but that still doesn't mean
|
|||
|
// that the Readable class should behave improperly, as streams are
|
|||
|
// designed to be sync/async agnostic.
|
|||
|
// Take note if the _read call is sync or async (ie, if the read call
|
|||
|
// has returned yet), so that we know whether or not it's safe to emit
|
|||
|
// 'readable' etc.
|
|||
|
//
|
|||
|
// 3. Actually pull the requested chunks out of the buffer and return.
|
|||
|
|
|||
|
// if we need a readable event, then we need to do some reading.
|
|||
|
var doRead = state.needReadable;
|
|||
|
debug('need readable', doRead);
|
|||
|
|
|||
|
// if we currently have less than the highWaterMark, then also read some
|
|||
|
if (state.length === 0 || state.length - n < state.highWaterMark) {
|
|||
|
doRead = true;
|
|||
|
debug('length less than watermark', doRead);
|
|||
|
}
|
|||
|
|
|||
|
// however, if we've ended, then there's no point, and if we're already
|
|||
|
// reading, then it's unnecessary.
|
|||
|
if (state.ended || state.reading) {
|
|||
|
doRead = false;
|
|||
|
debug('reading or ended', doRead);
|
|||
|
} else if (doRead) {
|
|||
|
debug('do read');
|
|||
|
state.reading = true;
|
|||
|
state.sync = true;
|
|||
|
// if the length is currently zero, then we *need* a readable event.
|
|||
|
if (state.length === 0) state.needReadable = true;
|
|||
|
// call internal read method
|
|||
|
this._read(state.highWaterMark);
|
|||
|
state.sync = false;
|
|||
|
// If _read pushed data synchronously, then `reading` will be false,
|
|||
|
// and we need to re-evaluate how much data we can return to the user.
|
|||
|
if (!state.reading) n = howMuchToRead(nOrig, state);
|
|||
|
}
|
|||
|
var ret;
|
|||
|
if (n > 0) ret = fromList(n, state);else ret = null;
|
|||
|
if (ret === null) {
|
|||
|
state.needReadable = state.length <= state.highWaterMark;
|
|||
|
n = 0;
|
|||
|
} else {
|
|||
|
state.length -= n;
|
|||
|
state.awaitDrain = 0;
|
|||
|
}
|
|||
|
if (state.length === 0) {
|
|||
|
// If we have nothing in the buffer, then we want to know
|
|||
|
// as soon as we *do* get something into the buffer.
|
|||
|
if (!state.ended) state.needReadable = true;
|
|||
|
|
|||
|
// If we tried to read() past the EOF, then emit end on the next tick.
|
|||
|
if (nOrig !== n && state.ended) endReadable(this);
|
|||
|
}
|
|||
|
if (ret !== null) this.emit('data', ret);
|
|||
|
return ret;
|
|||
|
};
|
|||
|
function onEofChunk(stream, state) {
|
|||
|
debug('onEofChunk');
|
|||
|
if (state.ended) return;
|
|||
|
if (state.decoder) {
|
|||
|
var chunk = state.decoder.end();
|
|||
|
if (chunk && chunk.length) {
|
|||
|
state.buffer.push(chunk);
|
|||
|
state.length += state.objectMode ? 1 : chunk.length;
|
|||
|
}
|
|||
|
}
|
|||
|
state.ended = true;
|
|||
|
if (state.sync) {
|
|||
|
// if we are sync, wait until next tick to emit the data.
|
|||
|
// Otherwise we risk emitting data in the flow()
|
|||
|
// the readable code triggers during a read() call
|
|||
|
emitReadable(stream);
|
|||
|
} else {
|
|||
|
// emit 'readable' now to make sure it gets picked up.
|
|||
|
state.needReadable = false;
|
|||
|
if (!state.emittedReadable) {
|
|||
|
state.emittedReadable = true;
|
|||
|
emitReadable_(stream);
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Don't emit readable right away in sync mode, because this can trigger
|
|||
|
// another read() call => stack overflow. This way, it might trigger
|
|||
|
// a nextTick recursion warning, but that's not so bad.
|
|||
|
function emitReadable(stream) {
|
|||
|
var state = stream._readableState;
|
|||
|
debug('emitReadable', state.needReadable, state.emittedReadable);
|
|||
|
state.needReadable = false;
|
|||
|
if (!state.emittedReadable) {
|
|||
|
debug('emitReadable', state.flowing);
|
|||
|
state.emittedReadable = true;
|
|||
|
process.nextTick(emitReadable_, stream);
|
|||
|
}
|
|||
|
}
|
|||
|
function emitReadable_(stream) {
|
|||
|
var state = stream._readableState;
|
|||
|
debug('emitReadable_', state.destroyed, state.length, state.ended);
|
|||
|
if (!state.destroyed && (state.length || state.ended)) {
|
|||
|
stream.emit('readable');
|
|||
|
state.emittedReadable = false;
|
|||
|
}
|
|||
|
|
|||
|
// The stream needs another readable event if
|
|||
|
// 1. It is not flowing, as the flow mechanism will take
|
|||
|
// care of it.
|
|||
|
// 2. It is not ended.
|
|||
|
// 3. It is below the highWaterMark, so we can schedule
|
|||
|
// another readable later.
|
|||
|
state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark;
|
|||
|
flow(stream);
|
|||
|
}
|
|||
|
|
|||
|
// at this point, the user has presumably seen the 'readable' event,
|
|||
|
// and called read() to consume some data. that may have triggered
|
|||
|
// in turn another _read(n) call, in which case reading = true if
|
|||
|
// it's in progress.
|
|||
|
// However, if we're not ended, or reading, and the length < hwm,
|
|||
|
// then go ahead and try to read some more preemptively.
|
|||
|
function maybeReadMore(stream, state) {
|
|||
|
if (!state.readingMore) {
|
|||
|
state.readingMore = true;
|
|||
|
process.nextTick(maybeReadMore_, stream, state);
|
|||
|
}
|
|||
|
}
|
|||
|
function maybeReadMore_(stream, state) {
|
|||
|
// Attempt to read more data if we should.
|
|||
|
//
|
|||
|
// The conditions for reading more data are (one of):
|
|||
|
// - Not enough data buffered (state.length < state.highWaterMark). The loop
|
|||
|
// is responsible for filling the buffer with enough data if such data
|
|||
|
// is available. If highWaterMark is 0 and we are not in the flowing mode
|
|||
|
// we should _not_ attempt to buffer any extra data. We'll get more data
|
|||
|
// when the stream consumer calls read() instead.
|
|||
|
// - No data in the buffer, and the stream is in flowing mode. In this mode
|
|||
|
// the loop below is responsible for ensuring read() is called. Failing to
|
|||
|
// call read here would abort the flow and there's no other mechanism for
|
|||
|
// continuing the flow if the stream consumer has just subscribed to the
|
|||
|
// 'data' event.
|
|||
|
//
|
|||
|
// In addition to the above conditions to keep reading data, the following
|
|||
|
// conditions prevent the data from being read:
|
|||
|
// - The stream has ended (state.ended).
|
|||
|
// - There is already a pending 'read' operation (state.reading). This is a
|
|||
|
// case where the the stream has called the implementation defined _read()
|
|||
|
// method, but they are processing the call asynchronously and have _not_
|
|||
|
// called push() with new data. In this case we skip performing more
|
|||
|
// read()s. The execution ends in this method again after the _read() ends
|
|||
|
// up calling push() with more data.
|
|||
|
while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) {
|
|||
|
var len = state.length;
|
|||
|
debug('maybeReadMore read 0');
|
|||
|
stream.read(0);
|
|||
|
if (len === state.length)
|
|||
|
// didn't get any data, stop spinning.
|
|||
|
break;
|
|||
|
}
|
|||
|
state.readingMore = false;
|
|||
|
}
|
|||
|
|
|||
|
// abstract method. to be overridden in specific implementation classes.
|
|||
|
// call cb(er, data) where data is <= n in length.
|
|||
|
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
|||
|
// arbitrary, and perhaps not very meaningful.
|
|||
|
Readable.prototype._read = function (n) {
|
|||
|
errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()'));
|
|||
|
};
|
|||
|
Readable.prototype.pipe = function (dest, pipeOpts) {
|
|||
|
var src = this;
|
|||
|
var state = this._readableState;
|
|||
|
switch (state.pipesCount) {
|
|||
|
case 0:
|
|||
|
state.pipes = dest;
|
|||
|
break;
|
|||
|
case 1:
|
|||
|
state.pipes = [state.pipes, dest];
|
|||
|
break;
|
|||
|
default:
|
|||
|
state.pipes.push(dest);
|
|||
|
break;
|
|||
|
}
|
|||
|
state.pipesCount += 1;
|
|||
|
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
|
|||
|
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
|
|||
|
var endFn = doEnd ? onend : unpipe;
|
|||
|
if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn);
|
|||
|
dest.on('unpipe', onunpipe);
|
|||
|
function onunpipe(readable, unpipeInfo) {
|
|||
|
debug('onunpipe');
|
|||
|
if (readable === src) {
|
|||
|
if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
|
|||
|
unpipeInfo.hasUnpiped = true;
|
|||
|
cleanup();
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
function onend() {
|
|||
|
debug('onend');
|
|||
|
dest.end();
|
|||
|
}
|
|||
|
|
|||
|
// when the dest drains, it reduces the awaitDrain counter
|
|||
|
// on the source. This would be more elegant with a .once()
|
|||
|
// handler in flow(), but adding and removing repeatedly is
|
|||
|
// too slow.
|
|||
|
var ondrain = pipeOnDrain(src);
|
|||
|
dest.on('drain', ondrain);
|
|||
|
var cleanedUp = false;
|
|||
|
function cleanup() {
|
|||
|
debug('cleanup');
|
|||
|
// cleanup event handlers once the pipe is broken
|
|||
|
dest.removeListener('close', onclose);
|
|||
|
dest.removeListener('finish', onfinish);
|
|||
|
dest.removeListener('drain', ondrain);
|
|||
|
dest.removeListener('error', onerror);
|
|||
|
dest.removeListener('unpipe', onunpipe);
|
|||
|
src.removeListener('end', onend);
|
|||
|
src.removeListener('end', unpipe);
|
|||
|
src.removeListener('data', ondata);
|
|||
|
cleanedUp = true;
|
|||
|
|
|||
|
// if the reader is waiting for a drain event from this
|
|||
|
// specific writer, then it would cause it to never start
|
|||
|
// flowing again.
|
|||
|
// So, if this is awaiting a drain, then we just call it now.
|
|||
|
// If we don't know, then assume that we are waiting for one.
|
|||
|
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
|
|||
|
}
|
|||
|
src.on('data', ondata);
|
|||
|
function ondata(chunk) {
|
|||
|
debug('ondata');
|
|||
|
var ret = dest.write(chunk);
|
|||
|
debug('dest.write', ret);
|
|||
|
if (ret === false) {
|
|||
|
// If the user unpiped during `dest.write()`, it is possible
|
|||
|
// to get stuck in a permanently paused state if that write
|
|||
|
// also returned false.
|
|||
|
// => Check whether `dest` is still a piping destination.
|
|||
|
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
|
|||
|
debug('false write response, pause', state.awaitDrain);
|
|||
|
state.awaitDrain++;
|
|||
|
}
|
|||
|
src.pause();
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// if the dest has an error, then stop piping into it.
|
|||
|
// however, don't suppress the throwing behavior for this.
|
|||
|
function onerror(er) {
|
|||
|
debug('onerror', er);
|
|||
|
unpipe();
|
|||
|
dest.removeListener('error', onerror);
|
|||
|
if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er);
|
|||
|
}
|
|||
|
|
|||
|
// Make sure our error handler is attached before userland ones.
|
|||
|
prependListener(dest, 'error', onerror);
|
|||
|
|
|||
|
// Both close and finish should trigger unpipe, but only once.
|
|||
|
function onclose() {
|
|||
|
dest.removeListener('finish', onfinish);
|
|||
|
unpipe();
|
|||
|
}
|
|||
|
dest.once('close', onclose);
|
|||
|
function onfinish() {
|
|||
|
debug('onfinish');
|
|||
|
dest.removeListener('close', onclose);
|
|||
|
unpipe();
|
|||
|
}
|
|||
|
dest.once('finish', onfinish);
|
|||
|
function unpipe() {
|
|||
|
debug('unpipe');
|
|||
|
src.unpipe(dest);
|
|||
|
}
|
|||
|
|
|||
|
// tell the dest that it's being piped to
|
|||
|
dest.emit('pipe', src);
|
|||
|
|
|||
|
// start the flow if it hasn't been started already.
|
|||
|
if (!state.flowing) {
|
|||
|
debug('pipe resume');
|
|||
|
src.resume();
|
|||
|
}
|
|||
|
return dest;
|
|||
|
};
|
|||
|
function pipeOnDrain(src) {
|
|||
|
return function pipeOnDrainFunctionResult() {
|
|||
|
var state = src._readableState;
|
|||
|
debug('pipeOnDrain', state.awaitDrain);
|
|||
|
if (state.awaitDrain) state.awaitDrain--;
|
|||
|
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
|
|||
|
state.flowing = true;
|
|||
|
flow(src);
|
|||
|
}
|
|||
|
};
|
|||
|
}
|
|||
|
Readable.prototype.unpipe = function (dest) {
|
|||
|
var state = this._readableState;
|
|||
|
var unpipeInfo = {
|
|||
|
hasUnpiped: false
|
|||
|
};
|
|||
|
|
|||
|
// if we're not piping anywhere, then do nothing.
|
|||
|
if (state.pipesCount === 0) return this;
|
|||
|
|
|||
|
// just one destination. most common case.
|
|||
|
if (state.pipesCount === 1) {
|
|||
|
// passed in one, but it's not the right one.
|
|||
|
if (dest && dest !== state.pipes) return this;
|
|||
|
if (!dest) dest = state.pipes;
|
|||
|
|
|||
|
// got a match.
|
|||
|
state.pipes = null;
|
|||
|
state.pipesCount = 0;
|
|||
|
state.flowing = false;
|
|||
|
if (dest) dest.emit('unpipe', this, unpipeInfo);
|
|||
|
return this;
|
|||
|
}
|
|||
|
|
|||
|
// slow case. multiple pipe destinations.
|
|||
|
|
|||
|
if (!dest) {
|
|||
|
// remove all.
|
|||
|
var dests = state.pipes;
|
|||
|
var len = state.pipesCount;
|
|||
|
state.pipes = null;
|
|||
|
state.pipesCount = 0;
|
|||
|
state.flowing = false;
|
|||
|
for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, {
|
|||
|
hasUnpiped: false
|
|||
|
});
|
|||
|
return this;
|
|||
|
}
|
|||
|
|
|||
|
// try to find the right one.
|
|||
|
var index = indexOf(state.pipes, dest);
|
|||
|
if (index === -1) return this;
|
|||
|
state.pipes.splice(index, 1);
|
|||
|
state.pipesCount -= 1;
|
|||
|
if (state.pipesCount === 1) state.pipes = state.pipes[0];
|
|||
|
dest.emit('unpipe', this, unpipeInfo);
|
|||
|
return this;
|
|||
|
};
|
|||
|
|
|||
|
// set up data events if they are asked for
|
|||
|
// Ensure readable listeners eventually get something
|
|||
|
Readable.prototype.on = function (ev, fn) {
|
|||
|
var res = Stream.prototype.on.call(this, ev, fn);
|
|||
|
var state = this._readableState;
|
|||
|
if (ev === 'data') {
|
|||
|
// update readableListening so that resume() may be a no-op
|
|||
|
// a few lines down. This is needed to support once('readable').
|
|||
|
state.readableListening = this.listenerCount('readable') > 0;
|
|||
|
|
|||
|
// Try start flowing on next tick if stream isn't explicitly paused
|
|||
|
if (state.flowing !== false) this.resume();
|
|||
|
} else if (ev === 'readable') {
|
|||
|
if (!state.endEmitted && !state.readableListening) {
|
|||
|
state.readableListening = state.needReadable = true;
|
|||
|
state.flowing = false;
|
|||
|
state.emittedReadable = false;
|
|||
|
debug('on readable', state.length, state.reading);
|
|||
|
if (state.length) {
|
|||
|
emitReadable(this);
|
|||
|
} else if (!state.reading) {
|
|||
|
process.nextTick(nReadingNextTick, this);
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
return res;
|
|||
|
};
|
|||
|
Readable.prototype.addListener = Readable.prototype.on;
|
|||
|
Readable.prototype.removeListener = function (ev, fn) {
|
|||
|
var res = Stream.prototype.removeListener.call(this, ev, fn);
|
|||
|
if (ev === 'readable') {
|
|||
|
// We need to check if there is someone still listening to
|
|||
|
// readable and reset the state. However this needs to happen
|
|||
|
// after readable has been emitted but before I/O (nextTick) to
|
|||
|
// support once('readable', fn) cycles. This means that calling
|
|||
|
// resume within the same tick will have no
|
|||
|
// effect.
|
|||
|
process.nextTick(updateReadableListening, this);
|
|||
|
}
|
|||
|
return res;
|
|||
|
};
|
|||
|
Readable.prototype.removeAllListeners = function (ev) {
|
|||
|
var res = Stream.prototype.removeAllListeners.apply(this, arguments);
|
|||
|
if (ev === 'readable' || ev === undefined) {
|
|||
|
// We need to check if there is someone still listening to
|
|||
|
// readable and reset the state. However this needs to happen
|
|||
|
// after readable has been emitted but before I/O (nextTick) to
|
|||
|
// support once('readable', fn) cycles. This means that calling
|
|||
|
// resume within the same tick will have no
|
|||
|
// effect.
|
|||
|
process.nextTick(updateReadableListening, this);
|
|||
|
}
|
|||
|
return res;
|
|||
|
};
|
|||
|
function updateReadableListening(self) {
|
|||
|
var state = self._readableState;
|
|||
|
state.readableListening = self.listenerCount('readable') > 0;
|
|||
|
if (state.resumeScheduled && !state.paused) {
|
|||
|
// flowing needs to be set to true now, otherwise
|
|||
|
// the upcoming resume will not flow.
|
|||
|
state.flowing = true;
|
|||
|
|
|||
|
// crude way to check if we should resume
|
|||
|
} else if (self.listenerCount('data') > 0) {
|
|||
|
self.resume();
|
|||
|
}
|
|||
|
}
|
|||
|
function nReadingNextTick(self) {
|
|||
|
debug('readable nexttick read 0');
|
|||
|
self.read(0);
|
|||
|
}
|
|||
|
|
|||
|
// pause() and resume() are remnants of the legacy readable stream API
|
|||
|
// If the user uses them, then switch into old mode.
|
|||
|
Readable.prototype.resume = function () {
|
|||
|
var state = this._readableState;
|
|||
|
if (!state.flowing) {
|
|||
|
debug('resume');
|
|||
|
// we flow only if there is no one listening
|
|||
|
// for readable, but we still have to call
|
|||
|
// resume()
|
|||
|
state.flowing = !state.readableListening;
|
|||
|
resume(this, state);
|
|||
|
}
|
|||
|
state.paused = false;
|
|||
|
return this;
|
|||
|
};
|
|||
|
function resume(stream, state) {
|
|||
|
if (!state.resumeScheduled) {
|
|||
|
state.resumeScheduled = true;
|
|||
|
process.nextTick(resume_, stream, state);
|
|||
|
}
|
|||
|
}
|
|||
|
function resume_(stream, state) {
|
|||
|
debug('resume', state.reading);
|
|||
|
if (!state.reading) {
|
|||
|
stream.read(0);
|
|||
|
}
|
|||
|
state.resumeScheduled = false;
|
|||
|
stream.emit('resume');
|
|||
|
flow(stream);
|
|||
|
if (state.flowing && !state.reading) stream.read(0);
|
|||
|
}
|
|||
|
Readable.prototype.pause = function () {
|
|||
|
debug('call pause flowing=%j', this._readableState.flowing);
|
|||
|
if (this._readableState.flowing !== false) {
|
|||
|
debug('pause');
|
|||
|
this._readableState.flowing = false;
|
|||
|
this.emit('pause');
|
|||
|
}
|
|||
|
this._readableState.paused = true;
|
|||
|
return this;
|
|||
|
};
|
|||
|
function flow(stream) {
|
|||
|
var state = stream._readableState;
|
|||
|
debug('flow', state.flowing);
|
|||
|
while (state.flowing && stream.read() !== null);
|
|||
|
}
|
|||
|
|
|||
|
// wrap an old-style stream as the async data source.
|
|||
|
// This is *not* part of the readable stream interface.
|
|||
|
// It is an ugly unfortunate mess of history.
|
|||
|
Readable.prototype.wrap = function (stream) {
|
|||
|
var _this = this;
|
|||
|
var state = this._readableState;
|
|||
|
var paused = false;
|
|||
|
stream.on('end', function () {
|
|||
|
debug('wrapped end');
|
|||
|
if (state.decoder && !state.ended) {
|
|||
|
var chunk = state.decoder.end();
|
|||
|
if (chunk && chunk.length) _this.push(chunk);
|
|||
|
}
|
|||
|
_this.push(null);
|
|||
|
});
|
|||
|
stream.on('data', function (chunk) {
|
|||
|
debug('wrapped data');
|
|||
|
if (state.decoder) chunk = state.decoder.write(chunk);
|
|||
|
|
|||
|
// don't skip over falsy values in objectMode
|
|||
|
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
|
|||
|
var ret = _this.push(chunk);
|
|||
|
if (!ret) {
|
|||
|
paused = true;
|
|||
|
stream.pause();
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
// proxy all the other methods.
|
|||
|
// important when wrapping filters and duplexes.
|
|||
|
for (var i in stream) {
|
|||
|
if (this[i] === undefined && typeof stream[i] === 'function') {
|
|||
|
this[i] = function methodWrap(method) {
|
|||
|
return function methodWrapReturnFunction() {
|
|||
|
return stream[method].apply(stream, arguments);
|
|||
|
};
|
|||
|
}(i);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// proxy certain important events.
|
|||
|
for (var n = 0; n < kProxyEvents.length; n++) {
|
|||
|
stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
|
|||
|
}
|
|||
|
|
|||
|
// when we try to consume some more bytes, simply unpause the
|
|||
|
// underlying stream.
|
|||
|
this._read = function (n) {
|
|||
|
debug('wrapped _read', n);
|
|||
|
if (paused) {
|
|||
|
paused = false;
|
|||
|
stream.resume();
|
|||
|
}
|
|||
|
};
|
|||
|
return this;
|
|||
|
};
|
|||
|
if (typeof Symbol === 'function') {
|
|||
|
Readable.prototype[Symbol.asyncIterator] = function () {
|
|||
|
if (createReadableStreamAsyncIterator === undefined) {
|
|||
|
createReadableStreamAsyncIterator = requireAsync_iterator();
|
|||
|
}
|
|||
|
return createReadableStreamAsyncIterator(this);
|
|||
|
};
|
|||
|
}
|
|||
|
Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._readableState.highWaterMark;
|
|||
|
}
|
|||
|
});
|
|||
|
Object.defineProperty(Readable.prototype, 'readableBuffer', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._readableState && this._readableState.buffer;
|
|||
|
}
|
|||
|
});
|
|||
|
Object.defineProperty(Readable.prototype, 'readableFlowing', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._readableState.flowing;
|
|||
|
},
|
|||
|
set: function set(state) {
|
|||
|
if (this._readableState) {
|
|||
|
this._readableState.flowing = state;
|
|||
|
}
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
// exposed for testing purposes only.
|
|||
|
Readable._fromList = fromList;
|
|||
|
Object.defineProperty(Readable.prototype, 'readableLength', {
|
|||
|
// making it explicit this property is not enumerable
|
|||
|
// because otherwise some prototype manipulation in
|
|||
|
// userland will fail
|
|||
|
enumerable: false,
|
|||
|
get: function get() {
|
|||
|
return this._readableState.length;
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
// Pluck off n bytes from an array of buffers.
|
|||
|
// Length is the combined lengths of all the buffers in the list.
|
|||
|
// This function is designed to be inlinable, so please take care when making
|
|||
|
// changes to the function body.
|
|||
|
function fromList(n, state) {
|
|||
|
// nothing buffered
|
|||
|
if (state.length === 0) return null;
|
|||
|
var ret;
|
|||
|
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
|
|||
|
// read it all, truncate the list
|
|||
|
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length);
|
|||
|
state.buffer.clear();
|
|||
|
} else {
|
|||
|
// read part of list
|
|||
|
ret = state.buffer.consume(n, state.decoder);
|
|||
|
}
|
|||
|
return ret;
|
|||
|
}
|
|||
|
function endReadable(stream) {
|
|||
|
var state = stream._readableState;
|
|||
|
debug('endReadable', state.endEmitted);
|
|||
|
if (!state.endEmitted) {
|
|||
|
state.ended = true;
|
|||
|
process.nextTick(endReadableNT, state, stream);
|
|||
|
}
|
|||
|
}
|
|||
|
function endReadableNT(state, stream) {
|
|||
|
debug('endReadableNT', state.endEmitted, state.length);
|
|||
|
|
|||
|
// Check that we didn't get one last unshift.
|
|||
|
if (!state.endEmitted && state.length === 0) {
|
|||
|
state.endEmitted = true;
|
|||
|
stream.readable = false;
|
|||
|
stream.emit('end');
|
|||
|
if (state.autoDestroy) {
|
|||
|
// In case of duplex streams we need a way to detect
|
|||
|
// if the writable side is ready for autoDestroy as well
|
|||
|
var wState = stream._writableState;
|
|||
|
if (!wState || wState.autoDestroy && wState.finished) {
|
|||
|
stream.destroy();
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
if (typeof Symbol === 'function') {
|
|||
|
Readable.from = function (iterable, opts) {
|
|||
|
if (from === undefined) {
|
|||
|
from = requireFrom();
|
|||
|
}
|
|||
|
return from(Readable, iterable, opts);
|
|||
|
};
|
|||
|
}
|
|||
|
function indexOf(xs, x) {
|
|||
|
for (var i = 0, l = xs.length; i < l; i++) {
|
|||
|
if (xs[i] === x) return i;
|
|||
|
}
|
|||
|
return -1;
|
|||
|
}
|
|||
|
return _stream_readable;
|
|||
|
}
|
|||
|
|
|||
|
var _stream_transform;
|
|||
|
var hasRequired_stream_transform;
|
|||
|
|
|||
|
function require_stream_transform () {
|
|||
|
if (hasRequired_stream_transform) return _stream_transform;
|
|||
|
hasRequired_stream_transform = 1;
|
|||
|
|
|||
|
_stream_transform = Transform;
|
|||
|
var _require$codes = requireErrors().codes,
|
|||
|
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
|||
|
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
|||
|
ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING,
|
|||
|
ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0;
|
|||
|
var Duplex = require_stream_duplex();
|
|||
|
requireInherits()(Transform, Duplex);
|
|||
|
function afterTransform(er, data) {
|
|||
|
var ts = this._transformState;
|
|||
|
ts.transforming = false;
|
|||
|
var cb = ts.writecb;
|
|||
|
if (cb === null) {
|
|||
|
return this.emit('error', new ERR_MULTIPLE_CALLBACK());
|
|||
|
}
|
|||
|
ts.writechunk = null;
|
|||
|
ts.writecb = null;
|
|||
|
if (data != null)
|
|||
|
// single equals check for both `null` and `undefined`
|
|||
|
this.push(data);
|
|||
|
cb(er);
|
|||
|
var rs = this._readableState;
|
|||
|
rs.reading = false;
|
|||
|
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
|||
|
this._read(rs.highWaterMark);
|
|||
|
}
|
|||
|
}
|
|||
|
function Transform(options) {
|
|||
|
if (!(this instanceof Transform)) return new Transform(options);
|
|||
|
Duplex.call(this, options);
|
|||
|
this._transformState = {
|
|||
|
afterTransform: afterTransform.bind(this),
|
|||
|
needTransform: false,
|
|||
|
transforming: false,
|
|||
|
writecb: null,
|
|||
|
writechunk: null,
|
|||
|
writeencoding: null
|
|||
|
};
|
|||
|
|
|||
|
// start out asking for a readable event once data is transformed.
|
|||
|
this._readableState.needReadable = true;
|
|||
|
|
|||
|
// we have implemented the _read method, and done the other things
|
|||
|
// that Readable wants before the first _read call, so unset the
|
|||
|
// sync guard flag.
|
|||
|
this._readableState.sync = false;
|
|||
|
if (options) {
|
|||
|
if (typeof options.transform === 'function') this._transform = options.transform;
|
|||
|
if (typeof options.flush === 'function') this._flush = options.flush;
|
|||
|
}
|
|||
|
|
|||
|
// When the writable side finishes, then flush out anything remaining.
|
|||
|
this.on('prefinish', prefinish);
|
|||
|
}
|
|||
|
function prefinish() {
|
|||
|
var _this = this;
|
|||
|
if (typeof this._flush === 'function' && !this._readableState.destroyed) {
|
|||
|
this._flush(function (er, data) {
|
|||
|
done(_this, er, data);
|
|||
|
});
|
|||
|
} else {
|
|||
|
done(this, null, null);
|
|||
|
}
|
|||
|
}
|
|||
|
Transform.prototype.push = function (chunk, encoding) {
|
|||
|
this._transformState.needTransform = false;
|
|||
|
return Duplex.prototype.push.call(this, chunk, encoding);
|
|||
|
};
|
|||
|
|
|||
|
// This is the part where you do stuff!
|
|||
|
// override this function in implementation classes.
|
|||
|
// 'chunk' is an input chunk.
|
|||
|
//
|
|||
|
// Call `push(newChunk)` to pass along transformed output
|
|||
|
// to the readable side. You may call 'push' zero or more times.
|
|||
|
//
|
|||
|
// Call `cb(err)` when you are done with this chunk. If you pass
|
|||
|
// an error, then that'll put the hurt on the whole operation. If you
|
|||
|
// never call cb(), then you'll never get another chunk.
|
|||
|
Transform.prototype._transform = function (chunk, encoding, cb) {
|
|||
|
cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()'));
|
|||
|
};
|
|||
|
Transform.prototype._write = function (chunk, encoding, cb) {
|
|||
|
var ts = this._transformState;
|
|||
|
ts.writecb = cb;
|
|||
|
ts.writechunk = chunk;
|
|||
|
ts.writeencoding = encoding;
|
|||
|
if (!ts.transforming) {
|
|||
|
var rs = this._readableState;
|
|||
|
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// Doesn't matter what the args are here.
|
|||
|
// _transform does all the work.
|
|||
|
// That we got here means that the readable side wants more data.
|
|||
|
Transform.prototype._read = function (n) {
|
|||
|
var ts = this._transformState;
|
|||
|
if (ts.writechunk !== null && !ts.transforming) {
|
|||
|
ts.transforming = true;
|
|||
|
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
|||
|
} else {
|
|||
|
// mark that we need a transform, so that any data that comes in
|
|||
|
// will get processed, now that we've asked for it.
|
|||
|
ts.needTransform = true;
|
|||
|
}
|
|||
|
};
|
|||
|
Transform.prototype._destroy = function (err, cb) {
|
|||
|
Duplex.prototype._destroy.call(this, err, function (err2) {
|
|||
|
cb(err2);
|
|||
|
});
|
|||
|
};
|
|||
|
function done(stream, er, data) {
|
|||
|
if (er) return stream.emit('error', er);
|
|||
|
if (data != null)
|
|||
|
// single equals check for both `null` and `undefined`
|
|||
|
stream.push(data);
|
|||
|
|
|||
|
// TODO(BridgeAR): Write a test for these two error cases
|
|||
|
// if there's nothing in the write buffer, then that means
|
|||
|
// that nothing more will ever be provided
|
|||
|
if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0();
|
|||
|
if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING();
|
|||
|
return stream.push(null);
|
|||
|
}
|
|||
|
return _stream_transform;
|
|||
|
}
|
|||
|
|
|||
|
var _stream_passthrough;
|
|||
|
var hasRequired_stream_passthrough;
|
|||
|
|
|||
|
function require_stream_passthrough () {
|
|||
|
if (hasRequired_stream_passthrough) return _stream_passthrough;
|
|||
|
hasRequired_stream_passthrough = 1;
|
|||
|
|
|||
|
_stream_passthrough = PassThrough;
|
|||
|
var Transform = require_stream_transform();
|
|||
|
requireInherits()(PassThrough, Transform);
|
|||
|
function PassThrough(options) {
|
|||
|
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
|||
|
Transform.call(this, options);
|
|||
|
}
|
|||
|
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
|||
|
cb(null, chunk);
|
|||
|
};
|
|||
|
return _stream_passthrough;
|
|||
|
}
|
|||
|
|
|||
|
var pipeline_1;
|
|||
|
var hasRequiredPipeline;
|
|||
|
|
|||
|
function requirePipeline () {
|
|||
|
if (hasRequiredPipeline) return pipeline_1;
|
|||
|
hasRequiredPipeline = 1;
|
|||
|
|
|||
|
var eos;
|
|||
|
function once(callback) {
|
|||
|
var called = false;
|
|||
|
return function () {
|
|||
|
if (called) return;
|
|||
|
called = true;
|
|||
|
callback.apply(void 0, arguments);
|
|||
|
};
|
|||
|
}
|
|||
|
var _require$codes = requireErrors().codes,
|
|||
|
ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS,
|
|||
|
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED;
|
|||
|
function noop(err) {
|
|||
|
// Rethrow the error if it exists to avoid swallowing it
|
|||
|
if (err) throw err;
|
|||
|
}
|
|||
|
function isRequest(stream) {
|
|||
|
return stream.setHeader && typeof stream.abort === 'function';
|
|||
|
}
|
|||
|
function destroyer(stream, reading, writing, callback) {
|
|||
|
callback = once(callback);
|
|||
|
var closed = false;
|
|||
|
stream.on('close', function () {
|
|||
|
closed = true;
|
|||
|
});
|
|||
|
if (eos === undefined) eos = requireEndOfStream();
|
|||
|
eos(stream, {
|
|||
|
readable: reading,
|
|||
|
writable: writing
|
|||
|
}, function (err) {
|
|||
|
if (err) return callback(err);
|
|||
|
closed = true;
|
|||
|
callback();
|
|||
|
});
|
|||
|
var destroyed = false;
|
|||
|
return function (err) {
|
|||
|
if (closed) return;
|
|||
|
if (destroyed) return;
|
|||
|
destroyed = true;
|
|||
|
|
|||
|
// request.destroy just do .end - .abort is what we want
|
|||
|
if (isRequest(stream)) return stream.abort();
|
|||
|
if (typeof stream.destroy === 'function') return stream.destroy();
|
|||
|
callback(err || new ERR_STREAM_DESTROYED('pipe'));
|
|||
|
};
|
|||
|
}
|
|||
|
function call(fn) {
|
|||
|
fn();
|
|||
|
}
|
|||
|
function pipe(from, to) {
|
|||
|
return from.pipe(to);
|
|||
|
}
|
|||
|
function popCallback(streams) {
|
|||
|
if (!streams.length) return noop;
|
|||
|
if (typeof streams[streams.length - 1] !== 'function') return noop;
|
|||
|
return streams.pop();
|
|||
|
}
|
|||
|
function pipeline() {
|
|||
|
for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) {
|
|||
|
streams[_key] = arguments[_key];
|
|||
|
}
|
|||
|
var callback = popCallback(streams);
|
|||
|
if (Array.isArray(streams[0])) streams = streams[0];
|
|||
|
if (streams.length < 2) {
|
|||
|
throw new ERR_MISSING_ARGS('streams');
|
|||
|
}
|
|||
|
var error;
|
|||
|
var destroys = streams.map(function (stream, i) {
|
|||
|
var reading = i < streams.length - 1;
|
|||
|
var writing = i > 0;
|
|||
|
return destroyer(stream, reading, writing, function (err) {
|
|||
|
if (!error) error = err;
|
|||
|
if (err) destroys.forEach(call);
|
|||
|
if (reading) return;
|
|||
|
destroys.forEach(call);
|
|||
|
callback(error);
|
|||
|
});
|
|||
|
});
|
|||
|
return streams.reduce(pipe);
|
|||
|
}
|
|||
|
pipeline_1 = pipeline;
|
|||
|
return pipeline_1;
|
|||
|
}
|
|||
|
|
|||
|
(function (module, exports) {
|
|||
|
var Stream = require$$0$5;
|
|||
|
if (process.env.READABLE_STREAM === 'disable' && Stream) {
|
|||
|
module.exports = Stream.Readable;
|
|||
|
Object.assign(module.exports, Stream);
|
|||
|
module.exports.Stream = Stream;
|
|||
|
} else {
|
|||
|
exports = module.exports = require_stream_readable();
|
|||
|
exports.Stream = Stream || exports;
|
|||
|
exports.Readable = exports;
|
|||
|
exports.Writable = require_stream_writable();
|
|||
|
exports.Duplex = require_stream_duplex();
|
|||
|
exports.Transform = require_stream_transform();
|
|||
|
exports.PassThrough = require_stream_passthrough();
|
|||
|
exports.finished = requireEndOfStream();
|
|||
|
exports.pipeline = requirePipeline();
|
|||
|
}
|
|||
|
} (readable, readable.exports));
|
|||
|
|
|||
|
var readableExports = readable.exports;
|
|||
|
|
|||
|
Object.defineProperty(lib, "__esModule", { value: true });
|
|||
|
lib.ReadableWebToNodeStream = void 0;
|
|||
|
const readable_stream_1 = readableExports;
|
|||
|
/**
|
|||
|
* Converts a Web-API stream into Node stream.Readable class
|
|||
|
* Node stream readable: https://nodejs.org/api/stream.html#stream_readable_streams
|
|||
|
* Web API readable-stream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
|
|||
|
* Node readable stream: https://nodejs.org/api/stream.html#stream_readable_streams
|
|||
|
*/
|
|||
|
class ReadableWebToNodeStream extends readable_stream_1.Readable {
|
|||
|
/**
|
|||
|
*
|
|||
|
* @param stream ReadableStream: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream
|
|||
|
*/
|
|||
|
constructor(stream) {
|
|||
|
super();
|
|||
|
this.bytesRead = 0;
|
|||
|
this.released = false;
|
|||
|
this.reader = stream.getReader();
|
|||
|
}
|
|||
|
/**
|
|||
|
* Implementation of readable._read(size).
|
|||
|
* When readable._read() is called, if data is available from the resource,
|
|||
|
* the implementation should begin pushing that data into the read queue
|
|||
|
* https://nodejs.org/api/stream.html#stream_readable_read_size_1
|
|||
|
*/
|
|||
|
async _read() {
|
|||
|
// Should start pushing data into the queue
|
|||
|
// Read data from the underlying Web-API-readable-stream
|
|||
|
if (this.released) {
|
|||
|
this.push(null); // Signal EOF
|
|||
|
return;
|
|||
|
}
|
|||
|
this.pendingRead = this.reader.read();
|
|||
|
const data = await this.pendingRead;
|
|||
|
// clear the promise before pushing pushing new data to the queue and allow sequential calls to _read()
|
|||
|
delete this.pendingRead;
|
|||
|
if (data.done || this.released) {
|
|||
|
this.push(null); // Signal EOF
|
|||
|
}
|
|||
|
else {
|
|||
|
this.bytesRead += data.value.length;
|
|||
|
this.push(data.value); // Push new data to the queue
|
|||
|
}
|
|||
|
}
|
|||
|
/**
|
|||
|
* If there is no unresolved read call to Web-API ReadableStream immediately returns;
|
|||
|
* otherwise will wait until the read is resolved.
|
|||
|
*/
|
|||
|
async waitForReadToComplete() {
|
|||
|
if (this.pendingRead) {
|
|||
|
await this.pendingRead;
|
|||
|
}
|
|||
|
}
|
|||
|
/**
|
|||
|
* Close wrapper
|
|||
|
*/
|
|||
|
async close() {
|
|||
|
await this.syncAndRelease();
|
|||
|
}
|
|||
|
async syncAndRelease() {
|
|||
|
this.released = true;
|
|||
|
await this.waitForReadToComplete();
|
|||
|
await this.reader.releaseLock();
|
|||
|
}
|
|||
|
}
|
|||
|
lib.ReadableWebToNodeStream = ReadableWebToNodeStream;
|
|||
|
|
|||
|
// Primitive types
|
|||
|
function dv(array) {
|
|||
|
return new DataView(array.buffer, array.byteOffset);
|
|||
|
}
|
|||
|
/**
|
|||
|
* 8-bit unsigned integer
|
|||
|
*/
|
|||
|
const UINT8 = {
|
|||
|
len: 1,
|
|||
|
get(array, offset) {
|
|||
|
return dv(array).getUint8(offset);
|
|||
|
},
|
|||
|
put(array, offset, value) {
|
|||
|
dv(array).setUint8(offset, value);
|
|||
|
return offset + 1;
|
|||
|
}
|
|||
|
};
|
|||
|
/**
|
|||
|
* 16-bit unsigned integer, Little Endian byte order
|
|||
|
*/
|
|||
|
const UINT16_LE = {
|
|||
|
len: 2,
|
|||
|
get(array, offset) {
|
|||
|
return dv(array).getUint16(offset, true);
|
|||
|
},
|
|||
|
put(array, offset, value) {
|
|||
|
dv(array).setUint16(offset, value, true);
|
|||
|
return offset + 2;
|
|||
|
}
|
|||
|
};
|
|||
|
/**
|
|||
|
* 16-bit unsigned integer, Big Endian byte order
|
|||
|
*/
|
|||
|
const UINT16_BE = {
|
|||
|
len: 2,
|
|||
|
get(array, offset) {
|
|||
|
return dv(array).getUint16(offset);
|
|||
|
},
|
|||
|
put(array, offset, value) {
|
|||
|
dv(array).setUint16(offset, value);
|
|||
|
return offset + 2;
|
|||
|
}
|
|||
|
};
|
|||
|
/**
|
|||
|
* 32-bit unsigned integer, Little Endian byte order
|
|||
|
*/
|
|||
|
const UINT32_LE = {
|
|||
|
len: 4,
|
|||
|
get(array, offset) {
|
|||
|
return dv(array).getUint32(offset, true);
|
|||
|
},
|
|||
|
put(array, offset, value) {
|
|||
|
dv(array).setUint32(offset, value, true);
|
|||
|
return offset + 4;
|
|||
|
}
|
|||
|
};
|
|||
|
/**
|
|||
|
* 32-bit unsigned integer, Big Endian byte order
|
|||
|
*/
|
|||
|
const UINT32_BE = {
|
|||
|
len: 4,
|
|||
|
get(array, offset) {
|
|||
|
return dv(array).getUint32(offset);
|
|||
|
},
|
|||
|
put(array, offset, value) {
|
|||
|
dv(array).setUint32(offset, value);
|
|||
|
return offset + 4;
|
|||
|
}
|
|||
|
};
|
|||
|
/**
|
|||
|
* 32-bit signed integer, Big Endian byte order
|
|||
|
*/
|
|||
|
const INT32_BE = {
|
|||
|
len: 4,
|
|||
|
get(array, offset) {
|
|||
|
return dv(array).getInt32(offset);
|
|||
|
},
|
|||
|
put(array, offset, value) {
|
|||
|
dv(array).setInt32(offset, value);
|
|||
|
return offset + 4;
|
|||
|
}
|
|||
|
};
|
|||
|
/**
|
|||
|
* 64-bit unsigned integer, Little Endian byte order
|
|||
|
*/
|
|||
|
const UINT64_LE = {
|
|||
|
len: 8,
|
|||
|
get(array, offset) {
|
|||
|
return dv(array).getBigUint64(offset, true);
|
|||
|
},
|
|||
|
put(array, offset, value) {
|
|||
|
dv(array).setBigUint64(offset, value, true);
|
|||
|
return offset + 8;
|
|||
|
}
|
|||
|
};
|
|||
|
/**
|
|||
|
* Consume a fixed number of bytes from the stream and return a string with a specified encoding.
|
|||
|
*/
|
|||
|
class StringType {
|
|||
|
constructor(len, encoding) {
|
|||
|
this.len = len;
|
|||
|
this.encoding = encoding;
|
|||
|
}
|
|||
|
get(uint8Array, offset) {
|
|||
|
return node_buffer.Buffer.from(uint8Array).toString(this.encoding, offset, offset + this.len);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
const defaultMessages = 'End-Of-Stream';
|
|||
|
/**
|
|||
|
* Thrown on read operation of the end of file or stream has been reached
|
|||
|
*/
|
|||
|
class EndOfStreamError extends Error {
|
|||
|
constructor() {
|
|||
|
super(defaultMessages);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Core tokenizer
|
|||
|
*/
|
|||
|
class AbstractTokenizer {
|
|||
|
constructor(fileInfo) {
|
|||
|
/**
|
|||
|
* Tokenizer-stream position
|
|||
|
*/
|
|||
|
this.position = 0;
|
|||
|
this.numBuffer = new Uint8Array(8);
|
|||
|
this.fileInfo = fileInfo ? fileInfo : {};
|
|||
|
}
|
|||
|
/**
|
|||
|
* Read a token from the tokenizer-stream
|
|||
|
* @param token - The token to read
|
|||
|
* @param position - If provided, the desired position in the tokenizer-stream
|
|||
|
* @returns Promise with token data
|
|||
|
*/
|
|||
|
async readToken(token, position = this.position) {
|
|||
|
const uint8Array = node_buffer.Buffer.alloc(token.len);
|
|||
|
const len = await this.readBuffer(uint8Array, { position });
|
|||
|
if (len < token.len)
|
|||
|
throw new EndOfStreamError();
|
|||
|
return token.get(uint8Array, 0);
|
|||
|
}
|
|||
|
/**
|
|||
|
* Peek a token from the tokenizer-stream.
|
|||
|
* @param token - Token to peek from the tokenizer-stream.
|
|||
|
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
|
|||
|
* @returns Promise with token data
|
|||
|
*/
|
|||
|
async peekToken(token, position = this.position) {
|
|||
|
const uint8Array = node_buffer.Buffer.alloc(token.len);
|
|||
|
const len = await this.peekBuffer(uint8Array, { position });
|
|||
|
if (len < token.len)
|
|||
|
throw new EndOfStreamError();
|
|||
|
return token.get(uint8Array, 0);
|
|||
|
}
|
|||
|
/**
|
|||
|
* Read a numeric token from the stream
|
|||
|
* @param token - Numeric token
|
|||
|
* @returns Promise with number
|
|||
|
*/
|
|||
|
async readNumber(token) {
|
|||
|
const len = await this.readBuffer(this.numBuffer, { length: token.len });
|
|||
|
if (len < token.len)
|
|||
|
throw new EndOfStreamError();
|
|||
|
return token.get(this.numBuffer, 0);
|
|||
|
}
|
|||
|
/**
|
|||
|
* Read a numeric token from the stream
|
|||
|
* @param token - Numeric token
|
|||
|
* @returns Promise with number
|
|||
|
*/
|
|||
|
async peekNumber(token) {
|
|||
|
const len = await this.peekBuffer(this.numBuffer, { length: token.len });
|
|||
|
if (len < token.len)
|
|||
|
throw new EndOfStreamError();
|
|||
|
return token.get(this.numBuffer, 0);
|
|||
|
}
|
|||
|
/**
|
|||
|
* Ignore number of bytes, advances the pointer in under tokenizer-stream.
|
|||
|
* @param length - Number of bytes to ignore
|
|||
|
* @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
|
|||
|
*/
|
|||
|
async ignore(length) {
|
|||
|
if (this.fileInfo.size !== undefined) {
|
|||
|
const bytesLeft = this.fileInfo.size - this.position;
|
|||
|
if (length > bytesLeft) {
|
|||
|
this.position += bytesLeft;
|
|||
|
return bytesLeft;
|
|||
|
}
|
|||
|
}
|
|||
|
this.position += length;
|
|||
|
return length;
|
|||
|
}
|
|||
|
async close() {
|
|||
|
// empty
|
|||
|
}
|
|||
|
normalizeOptions(uint8Array, options) {
|
|||
|
if (options && options.position !== undefined && options.position < this.position) {
|
|||
|
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|||
|
}
|
|||
|
if (options) {
|
|||
|
return {
|
|||
|
mayBeLess: options.mayBeLess === true,
|
|||
|
offset: options.offset ? options.offset : 0,
|
|||
|
length: options.length ? options.length : (uint8Array.length - (options.offset ? options.offset : 0)),
|
|||
|
position: options.position ? options.position : this.position
|
|||
|
};
|
|||
|
}
|
|||
|
return {
|
|||
|
mayBeLess: false,
|
|||
|
offset: 0,
|
|||
|
length: uint8Array.length,
|
|||
|
position: this.position
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
class BufferTokenizer extends AbstractTokenizer {
|
|||
|
/**
|
|||
|
* Construct BufferTokenizer
|
|||
|
* @param uint8Array - Uint8Array to tokenize
|
|||
|
* @param fileInfo - Pass additional file information to the tokenizer
|
|||
|
*/
|
|||
|
constructor(uint8Array, fileInfo) {
|
|||
|
super(fileInfo);
|
|||
|
this.uint8Array = uint8Array;
|
|||
|
this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : uint8Array.length;
|
|||
|
}
|
|||
|
/**
|
|||
|
* Read buffer from tokenizer
|
|||
|
* @param uint8Array - Uint8Array to tokenize
|
|||
|
* @param options - Read behaviour options
|
|||
|
* @returns {Promise<number>}
|
|||
|
*/
|
|||
|
async readBuffer(uint8Array, options) {
|
|||
|
if (options && options.position) {
|
|||
|
if (options.position < this.position) {
|
|||
|
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
|||
|
}
|
|||
|
this.position = options.position;
|
|||
|
}
|
|||
|
const bytesRead = await this.peekBuffer(uint8Array, options);
|
|||
|
this.position += bytesRead;
|
|||
|
return bytesRead;
|
|||
|
}
|
|||
|
/**
|
|||
|
* Peek (read ahead) buffer from tokenizer
|
|||
|
* @param uint8Array
|
|||
|
* @param options - Read behaviour options
|
|||
|
* @returns {Promise<number>}
|
|||
|
*/
|
|||
|
async peekBuffer(uint8Array, options) {
|
|||
|
const normOptions = this.normalizeOptions(uint8Array, options);
|
|||
|
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
|
|||
|
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
|
|||
|
throw new EndOfStreamError();
|
|||
|
}
|
|||
|
else {
|
|||
|
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
|
|||
|
return bytes2read;
|
|||
|
}
|
|||
|
}
|
|||
|
async close() {
|
|||
|
// empty
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Construct ReadStreamTokenizer from given Buffer.
|
|||
|
* @param uint8Array - Uint8Array to tokenize
|
|||
|
* @param fileInfo - Pass additional file information to the tokenizer
|
|||
|
* @returns BufferTokenizer
|
|||
|
*/
|
|||
|
function fromBuffer(uint8Array, fileInfo) {
|
|||
|
return new BufferTokenizer(uint8Array, fileInfo);
|
|||
|
}
|
|||
|
|
|||
|
function stringToBytes(string) {
|
|||
|
return [...string].map(character => character.charCodeAt(0)); // eslint-disable-line unicorn/prefer-code-point
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
Checks whether the TAR checksum is valid.
|
|||
|
|
|||
|
@param {Buffer} buffer - The TAR header `[offset ... offset + 512]`.
|
|||
|
@param {number} offset - TAR header offset.
|
|||
|
@returns {boolean} `true` if the TAR checksum is valid, otherwise `false`.
|
|||
|
*/
|
|||
|
function tarHeaderChecksumMatches(buffer, offset = 0) {
|
|||
|
const readSum = Number.parseInt(buffer.toString('utf8', 148, 154).replace(/\0.*$/, '').trim(), 8); // Read sum in header
|
|||
|
if (Number.isNaN(readSum)) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
|
|||
|
let sum = 8 * 0x20; // Initialize signed bit sum
|
|||
|
|
|||
|
for (let index = offset; index < offset + 148; index++) {
|
|||
|
sum += buffer[index];
|
|||
|
}
|
|||
|
|
|||
|
for (let index = offset + 156; index < offset + 512; index++) {
|
|||
|
sum += buffer[index];
|
|||
|
}
|
|||
|
|
|||
|
return readSum === sum;
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
ID3 UINT32 sync-safe tokenizer token.
|
|||
|
28 bits (representing up to 256MB) integer, the msb is 0 to avoid "false syncsignals".
|
|||
|
*/
|
|||
|
const uint32SyncSafeToken = {
|
|||
|
get: (buffer, offset) => (buffer[offset + 3] & 0x7F) | ((buffer[offset + 2]) << 7) | ((buffer[offset + 1]) << 14) | ((buffer[offset]) << 21),
|
|||
|
len: 4,
|
|||
|
};
|
|||
|
|
|||
|
const extensions = [
|
|||
|
'jpg',
|
|||
|
'png',
|
|||
|
'apng',
|
|||
|
'gif',
|
|||
|
'webp',
|
|||
|
'flif',
|
|||
|
'xcf',
|
|||
|
'cr2',
|
|||
|
'cr3',
|
|||
|
'orf',
|
|||
|
'arw',
|
|||
|
'dng',
|
|||
|
'nef',
|
|||
|
'rw2',
|
|||
|
'raf',
|
|||
|
'tif',
|
|||
|
'bmp',
|
|||
|
'icns',
|
|||
|
'jxr',
|
|||
|
'psd',
|
|||
|
'indd',
|
|||
|
'zip',
|
|||
|
'tar',
|
|||
|
'rar',
|
|||
|
'gz',
|
|||
|
'bz2',
|
|||
|
'7z',
|
|||
|
'dmg',
|
|||
|
'mp4',
|
|||
|
'mid',
|
|||
|
'mkv',
|
|||
|
'webm',
|
|||
|
'mov',
|
|||
|
'avi',
|
|||
|
'mpg',
|
|||
|
'mp2',
|
|||
|
'mp3',
|
|||
|
'm4a',
|
|||
|
'oga',
|
|||
|
'ogg',
|
|||
|
'ogv',
|
|||
|
'opus',
|
|||
|
'flac',
|
|||
|
'wav',
|
|||
|
'spx',
|
|||
|
'amr',
|
|||
|
'pdf',
|
|||
|
'epub',
|
|||
|
'elf',
|
|||
|
'exe',
|
|||
|
'swf',
|
|||
|
'rtf',
|
|||
|
'wasm',
|
|||
|
'woff',
|
|||
|
'woff2',
|
|||
|
'eot',
|
|||
|
'ttf',
|
|||
|
'otf',
|
|||
|
'ico',
|
|||
|
'flv',
|
|||
|
'ps',
|
|||
|
'xz',
|
|||
|
'sqlite',
|
|||
|
'nes',
|
|||
|
'crx',
|
|||
|
'xpi',
|
|||
|
'cab',
|
|||
|
'deb',
|
|||
|
'ar',
|
|||
|
'rpm',
|
|||
|
'Z',
|
|||
|
'lz',
|
|||
|
'cfb',
|
|||
|
'mxf',
|
|||
|
'mts',
|
|||
|
'blend',
|
|||
|
'bpg',
|
|||
|
'docx',
|
|||
|
'pptx',
|
|||
|
'xlsx',
|
|||
|
'3gp',
|
|||
|
'3g2',
|
|||
|
'j2c',
|
|||
|
'jp2',
|
|||
|
'jpm',
|
|||
|
'jpx',
|
|||
|
'mj2',
|
|||
|
'aif',
|
|||
|
'qcp',
|
|||
|
'odt',
|
|||
|
'ods',
|
|||
|
'odp',
|
|||
|
'xml',
|
|||
|
'mobi',
|
|||
|
'heic',
|
|||
|
'cur',
|
|||
|
'ktx',
|
|||
|
'ape',
|
|||
|
'wv',
|
|||
|
'dcm',
|
|||
|
'ics',
|
|||
|
'glb',
|
|||
|
'pcap',
|
|||
|
'dsf',
|
|||
|
'lnk',
|
|||
|
'alias',
|
|||
|
'voc',
|
|||
|
'ac3',
|
|||
|
'm4v',
|
|||
|
'm4p',
|
|||
|
'm4b',
|
|||
|
'f4v',
|
|||
|
'f4p',
|
|||
|
'f4b',
|
|||
|
'f4a',
|
|||
|
'mie',
|
|||
|
'asf',
|
|||
|
'ogm',
|
|||
|
'ogx',
|
|||
|
'mpc',
|
|||
|
'arrow',
|
|||
|
'shp',
|
|||
|
'aac',
|
|||
|
'mp1',
|
|||
|
'it',
|
|||
|
's3m',
|
|||
|
'xm',
|
|||
|
'ai',
|
|||
|
'skp',
|
|||
|
'avif',
|
|||
|
'eps',
|
|||
|
'lzh',
|
|||
|
'pgp',
|
|||
|
'asar',
|
|||
|
'stl',
|
|||
|
'chm',
|
|||
|
'3mf',
|
|||
|
'zst',
|
|||
|
'jxl',
|
|||
|
'vcf',
|
|||
|
'jls',
|
|||
|
'pst',
|
|||
|
'dwg',
|
|||
|
'parquet',
|
|||
|
'class',
|
|||
|
'arj',
|
|||
|
'cpio',
|
|||
|
'ace',
|
|||
|
'avro',
|
|||
|
];
|
|||
|
|
|||
|
const mimeTypes = [
|
|||
|
'image/jpeg',
|
|||
|
'image/png',
|
|||
|
'image/gif',
|
|||
|
'image/webp',
|
|||
|
'image/flif',
|
|||
|
'image/x-xcf',
|
|||
|
'image/x-canon-cr2',
|
|||
|
'image/x-canon-cr3',
|
|||
|
'image/tiff',
|
|||
|
'image/bmp',
|
|||
|
'image/vnd.ms-photo',
|
|||
|
'image/vnd.adobe.photoshop',
|
|||
|
'application/x-indesign',
|
|||
|
'application/epub+zip',
|
|||
|
'application/x-xpinstall',
|
|||
|
'application/vnd.oasis.opendocument.text',
|
|||
|
'application/vnd.oasis.opendocument.spreadsheet',
|
|||
|
'application/vnd.oasis.opendocument.presentation',
|
|||
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|||
|
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
|
|||
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|||
|
'application/zip',
|
|||
|
'application/x-tar',
|
|||
|
'application/x-rar-compressed',
|
|||
|
'application/gzip',
|
|||
|
'application/x-bzip2',
|
|||
|
'application/x-7z-compressed',
|
|||
|
'application/x-apple-diskimage',
|
|||
|
'application/x-apache-arrow',
|
|||
|
'video/mp4',
|
|||
|
'audio/midi',
|
|||
|
'video/x-matroska',
|
|||
|
'video/webm',
|
|||
|
'video/quicktime',
|
|||
|
'video/vnd.avi',
|
|||
|
'audio/vnd.wave',
|
|||
|
'audio/qcelp',
|
|||
|
'audio/x-ms-asf',
|
|||
|
'video/x-ms-asf',
|
|||
|
'application/vnd.ms-asf',
|
|||
|
'video/mpeg',
|
|||
|
'video/3gpp',
|
|||
|
'audio/mpeg',
|
|||
|
'audio/mp4', // RFC 4337
|
|||
|
'audio/opus',
|
|||
|
'video/ogg',
|
|||
|
'audio/ogg',
|
|||
|
'application/ogg',
|
|||
|
'audio/x-flac',
|
|||
|
'audio/ape',
|
|||
|
'audio/wavpack',
|
|||
|
'audio/amr',
|
|||
|
'application/pdf',
|
|||
|
'application/x-elf',
|
|||
|
'application/x-msdownload',
|
|||
|
'application/x-shockwave-flash',
|
|||
|
'application/rtf',
|
|||
|
'application/wasm',
|
|||
|
'font/woff',
|
|||
|
'font/woff2',
|
|||
|
'application/vnd.ms-fontobject',
|
|||
|
'font/ttf',
|
|||
|
'font/otf',
|
|||
|
'image/x-icon',
|
|||
|
'video/x-flv',
|
|||
|
'application/postscript',
|
|||
|
'application/eps',
|
|||
|
'application/x-xz',
|
|||
|
'application/x-sqlite3',
|
|||
|
'application/x-nintendo-nes-rom',
|
|||
|
'application/x-google-chrome-extension',
|
|||
|
'application/vnd.ms-cab-compressed',
|
|||
|
'application/x-deb',
|
|||
|
'application/x-unix-archive',
|
|||
|
'application/x-rpm',
|
|||
|
'application/x-compress',
|
|||
|
'application/x-lzip',
|
|||
|
'application/x-cfb',
|
|||
|
'application/x-mie',
|
|||
|
'application/mxf',
|
|||
|
'video/mp2t',
|
|||
|
'application/x-blender',
|
|||
|
'image/bpg',
|
|||
|
'image/j2c',
|
|||
|
'image/jp2',
|
|||
|
'image/jpx',
|
|||
|
'image/jpm',
|
|||
|
'image/mj2',
|
|||
|
'audio/aiff',
|
|||
|
'application/xml',
|
|||
|
'application/x-mobipocket-ebook',
|
|||
|
'image/heif',
|
|||
|
'image/heif-sequence',
|
|||
|
'image/heic',
|
|||
|
'image/heic-sequence',
|
|||
|
'image/icns',
|
|||
|
'image/ktx',
|
|||
|
'application/dicom',
|
|||
|
'audio/x-musepack',
|
|||
|
'text/calendar',
|
|||
|
'text/vcard',
|
|||
|
'model/gltf-binary',
|
|||
|
'application/vnd.tcpdump.pcap',
|
|||
|
'audio/x-dsf', // Non-standard
|
|||
|
'application/x.ms.shortcut', // Invented by us
|
|||
|
'application/x.apple.alias', // Invented by us
|
|||
|
'audio/x-voc',
|
|||
|
'audio/vnd.dolby.dd-raw',
|
|||
|
'audio/x-m4a',
|
|||
|
'image/apng',
|
|||
|
'image/x-olympus-orf',
|
|||
|
'image/x-sony-arw',
|
|||
|
'image/x-adobe-dng',
|
|||
|
'image/x-nikon-nef',
|
|||
|
'image/x-panasonic-rw2',
|
|||
|
'image/x-fujifilm-raf',
|
|||
|
'video/x-m4v',
|
|||
|
'video/3gpp2',
|
|||
|
'application/x-esri-shape',
|
|||
|
'audio/aac',
|
|||
|
'audio/x-it',
|
|||
|
'audio/x-s3m',
|
|||
|
'audio/x-xm',
|
|||
|
'video/MP1S',
|
|||
|
'video/MP2P',
|
|||
|
'application/vnd.sketchup.skp',
|
|||
|
'image/avif',
|
|||
|
'application/x-lzh-compressed',
|
|||
|
'application/pgp-encrypted',
|
|||
|
'application/x-asar',
|
|||
|
'model/stl',
|
|||
|
'application/vnd.ms-htmlhelp',
|
|||
|
'model/3mf',
|
|||
|
'image/jxl',
|
|||
|
'application/zstd',
|
|||
|
'image/jls',
|
|||
|
'application/vnd.ms-outlook',
|
|||
|
'image/vnd.dwg',
|
|||
|
'application/x-parquet',
|
|||
|
'application/java-vm',
|
|||
|
'application/x-arj',
|
|||
|
'application/x-cpio',
|
|||
|
'application/x-ace-compressed',
|
|||
|
'application/avro',
|
|||
|
];
|
|||
|
|
|||
|
const minimumBytes = 4100; // A fair amount of file-types are detectable within this range.
|
|||
|
|
|||
|
async function fileTypeFromBuffer(input) {
|
|||
|
if (!(input instanceof Uint8Array || input instanceof ArrayBuffer)) {
|
|||
|
throw new TypeError(`Expected the \`input\` argument to be of type \`Uint8Array\` or \`Buffer\` or \`ArrayBuffer\`, got \`${typeof input}\``);
|
|||
|
}
|
|||
|
|
|||
|
const buffer = input instanceof Uint8Array ? input : new Uint8Array(input);
|
|||
|
|
|||
|
if (!(buffer?.length > 1)) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
return fileTypeFromTokenizer(fromBuffer(buffer));
|
|||
|
}
|
|||
|
|
|||
|
function _check(buffer, headers, options) {
|
|||
|
options = {
|
|||
|
offset: 0,
|
|||
|
...options,
|
|||
|
};
|
|||
|
|
|||
|
for (const [index, header] of headers.entries()) {
|
|||
|
// If a bitmask is set
|
|||
|
if (options.mask) {
|
|||
|
// If header doesn't equal `buf` with bits masked off
|
|||
|
if (header !== (options.mask[index] & buffer[index + options.offset])) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
} else if (header !== buffer[index + options.offset]) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return true;
|
|||
|
}
|
|||
|
|
|||
|
async function fileTypeFromTokenizer(tokenizer) {
|
|||
|
try {
|
|||
|
return new FileTypeParser().parse(tokenizer);
|
|||
|
} catch (error) {
|
|||
|
if (!(error instanceof EndOfStreamError)) {
|
|||
|
throw error;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
class FileTypeParser {
|
|||
|
check(header, options) {
|
|||
|
return _check(this.buffer, header, options);
|
|||
|
}
|
|||
|
|
|||
|
checkString(header, options) {
|
|||
|
return this.check(stringToBytes(header), options);
|
|||
|
}
|
|||
|
|
|||
|
async parse(tokenizer) {
|
|||
|
this.buffer = node_buffer.Buffer.alloc(minimumBytes);
|
|||
|
|
|||
|
// Keep reading until EOF if the file size is unknown.
|
|||
|
if (tokenizer.fileInfo.size === undefined) {
|
|||
|
tokenizer.fileInfo.size = Number.MAX_SAFE_INTEGER;
|
|||
|
}
|
|||
|
|
|||
|
this.tokenizer = tokenizer;
|
|||
|
|
|||
|
await tokenizer.peekBuffer(this.buffer, {length: 12, mayBeLess: true});
|
|||
|
|
|||
|
// -- 2-byte signatures --
|
|||
|
|
|||
|
if (this.check([0x42, 0x4D])) {
|
|||
|
return {
|
|||
|
ext: 'bmp',
|
|||
|
mime: 'image/bmp',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x0B, 0x77])) {
|
|||
|
return {
|
|||
|
ext: 'ac3',
|
|||
|
mime: 'audio/vnd.dolby.dd-raw',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x78, 0x01])) {
|
|||
|
return {
|
|||
|
ext: 'dmg',
|
|||
|
mime: 'application/x-apple-diskimage',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x4D, 0x5A])) {
|
|||
|
return {
|
|||
|
ext: 'exe',
|
|||
|
mime: 'application/x-msdownload',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x25, 0x21])) {
|
|||
|
await tokenizer.peekBuffer(this.buffer, {length: 24, mayBeLess: true});
|
|||
|
|
|||
|
if (
|
|||
|
this.checkString('PS-Adobe-', {offset: 2})
|
|||
|
&& this.checkString(' EPSF-', {offset: 14})
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'eps',
|
|||
|
mime: 'application/eps',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
return {
|
|||
|
ext: 'ps',
|
|||
|
mime: 'application/postscript',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.check([0x1F, 0xA0])
|
|||
|
|| this.check([0x1F, 0x9D])
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'Z',
|
|||
|
mime: 'application/x-compress',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xC7, 0x71])) {
|
|||
|
return {
|
|||
|
ext: 'cpio',
|
|||
|
mime: 'application/x-cpio',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x60, 0xEA])) {
|
|||
|
return {
|
|||
|
ext: 'arj',
|
|||
|
mime: 'application/x-arj',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// -- 3-byte signatures --
|
|||
|
|
|||
|
if (this.check([0xEF, 0xBB, 0xBF])) { // UTF-8-BOM
|
|||
|
// Strip off UTF-8-BOM
|
|||
|
this.tokenizer.ignore(3);
|
|||
|
return this.parse(tokenizer);
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x47, 0x49, 0x46])) {
|
|||
|
return {
|
|||
|
ext: 'gif',
|
|||
|
mime: 'image/gif',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x49, 0x49, 0xBC])) {
|
|||
|
return {
|
|||
|
ext: 'jxr',
|
|||
|
mime: 'image/vnd.ms-photo',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x1F, 0x8B, 0x8])) {
|
|||
|
return {
|
|||
|
ext: 'gz',
|
|||
|
mime: 'application/gzip',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x42, 0x5A, 0x68])) {
|
|||
|
return {
|
|||
|
ext: 'bz2',
|
|||
|
mime: 'application/x-bzip2',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('ID3')) {
|
|||
|
await tokenizer.ignore(6); // Skip ID3 header until the header size
|
|||
|
const id3HeaderLength = await tokenizer.readToken(uint32SyncSafeToken);
|
|||
|
if (tokenizer.position + id3HeaderLength > tokenizer.fileInfo.size) {
|
|||
|
// Guess file type based on ID3 header for backward compatibility
|
|||
|
return {
|
|||
|
ext: 'mp3',
|
|||
|
mime: 'audio/mpeg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
await tokenizer.ignore(id3HeaderLength);
|
|||
|
return fileTypeFromTokenizer(tokenizer); // Skip ID3 header, recursion
|
|||
|
}
|
|||
|
|
|||
|
// Musepack, SV7
|
|||
|
if (this.checkString('MP+')) {
|
|||
|
return {
|
|||
|
ext: 'mpc',
|
|||
|
mime: 'audio/x-musepack',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
(this.buffer[0] === 0x43 || this.buffer[0] === 0x46)
|
|||
|
&& this.check([0x57, 0x53], {offset: 1})
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'swf',
|
|||
|
mime: 'application/x-shockwave-flash',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// -- 4-byte signatures --
|
|||
|
|
|||
|
// Requires a sample size of 4 bytes
|
|||
|
if (this.check([0xFF, 0xD8, 0xFF])) {
|
|||
|
if (this.check([0xF7], {offset: 3})) { // JPG7/SOF55, indicating a ISO/IEC 14495 / JPEG-LS file
|
|||
|
return {
|
|||
|
ext: 'jls',
|
|||
|
mime: 'image/jls',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
return {
|
|||
|
ext: 'jpg',
|
|||
|
mime: 'image/jpeg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x4F, 0x62, 0x6A, 0x01])) {
|
|||
|
return {
|
|||
|
ext: 'avro',
|
|||
|
mime: 'application/avro',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('FLIF')) {
|
|||
|
return {
|
|||
|
ext: 'flif',
|
|||
|
mime: 'image/flif',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('8BPS')) {
|
|||
|
return {
|
|||
|
ext: 'psd',
|
|||
|
mime: 'image/vnd.adobe.photoshop',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('WEBP', {offset: 8})) {
|
|||
|
return {
|
|||
|
ext: 'webp',
|
|||
|
mime: 'image/webp',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Musepack, SV8
|
|||
|
if (this.checkString('MPCK')) {
|
|||
|
return {
|
|||
|
ext: 'mpc',
|
|||
|
mime: 'audio/x-musepack',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('FORM')) {
|
|||
|
return {
|
|||
|
ext: 'aif',
|
|||
|
mime: 'audio/aiff',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('icns', {offset: 0})) {
|
|||
|
return {
|
|||
|
ext: 'icns',
|
|||
|
mime: 'image/icns',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Zip-based file formats
|
|||
|
// Need to be before the `zip` check
|
|||
|
if (this.check([0x50, 0x4B, 0x3, 0x4])) { // Local file header signature
|
|||
|
try {
|
|||
|
while (tokenizer.position + 30 < tokenizer.fileInfo.size) {
|
|||
|
await tokenizer.readBuffer(this.buffer, {length: 30});
|
|||
|
|
|||
|
// https://en.wikipedia.org/wiki/Zip_(file_format)#File_headers
|
|||
|
const zipHeader = {
|
|||
|
compressedSize: this.buffer.readUInt32LE(18),
|
|||
|
uncompressedSize: this.buffer.readUInt32LE(22),
|
|||
|
filenameLength: this.buffer.readUInt16LE(26),
|
|||
|
extraFieldLength: this.buffer.readUInt16LE(28),
|
|||
|
};
|
|||
|
|
|||
|
zipHeader.filename = await tokenizer.readToken(new StringType(zipHeader.filenameLength, 'utf-8'));
|
|||
|
await tokenizer.ignore(zipHeader.extraFieldLength);
|
|||
|
|
|||
|
// Assumes signed `.xpi` from addons.mozilla.org
|
|||
|
if (zipHeader.filename === 'META-INF/mozilla.rsa') {
|
|||
|
return {
|
|||
|
ext: 'xpi',
|
|||
|
mime: 'application/x-xpinstall',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (zipHeader.filename.endsWith('.rels') || zipHeader.filename.endsWith('.xml')) {
|
|||
|
const type = zipHeader.filename.split('/')[0];
|
|||
|
switch (type) {
|
|||
|
case '_rels':
|
|||
|
break;
|
|||
|
case 'word':
|
|||
|
return {
|
|||
|
ext: 'docx',
|
|||
|
mime: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|||
|
};
|
|||
|
case 'ppt':
|
|||
|
return {
|
|||
|
ext: 'pptx',
|
|||
|
mime: 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
|
|||
|
};
|
|||
|
case 'xl':
|
|||
|
return {
|
|||
|
ext: 'xlsx',
|
|||
|
mime: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|||
|
};
|
|||
|
default:
|
|||
|
break;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (zipHeader.filename.startsWith('xl/')) {
|
|||
|
return {
|
|||
|
ext: 'xlsx',
|
|||
|
mime: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (zipHeader.filename.startsWith('3D/') && zipHeader.filename.endsWith('.model')) {
|
|||
|
return {
|
|||
|
ext: '3mf',
|
|||
|
mime: 'model/3mf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// The docx, xlsx and pptx file types extend the Office Open XML file format:
|
|||
|
// https://en.wikipedia.org/wiki/Office_Open_XML_file_formats
|
|||
|
// We look for:
|
|||
|
// - one entry named '[Content_Types].xml' or '_rels/.rels',
|
|||
|
// - one entry indicating specific type of file.
|
|||
|
// MS Office, OpenOffice and LibreOffice may put the parts in different order, so the check should not rely on it.
|
|||
|
if (zipHeader.filename === 'mimetype' && zipHeader.compressedSize === zipHeader.uncompressedSize) {
|
|||
|
let mimeType = await tokenizer.readToken(new StringType(zipHeader.compressedSize, 'utf-8'));
|
|||
|
mimeType = mimeType.trim();
|
|||
|
|
|||
|
switch (mimeType) {
|
|||
|
case 'application/epub+zip':
|
|||
|
return {
|
|||
|
ext: 'epub',
|
|||
|
mime: 'application/epub+zip',
|
|||
|
};
|
|||
|
case 'application/vnd.oasis.opendocument.text':
|
|||
|
return {
|
|||
|
ext: 'odt',
|
|||
|
mime: 'application/vnd.oasis.opendocument.text',
|
|||
|
};
|
|||
|
case 'application/vnd.oasis.opendocument.spreadsheet':
|
|||
|
return {
|
|||
|
ext: 'ods',
|
|||
|
mime: 'application/vnd.oasis.opendocument.spreadsheet',
|
|||
|
};
|
|||
|
case 'application/vnd.oasis.opendocument.presentation':
|
|||
|
return {
|
|||
|
ext: 'odp',
|
|||
|
mime: 'application/vnd.oasis.opendocument.presentation',
|
|||
|
};
|
|||
|
default:
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Try to find next header manually when current one is corrupted
|
|||
|
if (zipHeader.compressedSize === 0) {
|
|||
|
let nextHeaderIndex = -1;
|
|||
|
|
|||
|
while (nextHeaderIndex < 0 && (tokenizer.position < tokenizer.fileInfo.size)) {
|
|||
|
await tokenizer.peekBuffer(this.buffer, {mayBeLess: true});
|
|||
|
|
|||
|
nextHeaderIndex = this.buffer.indexOf('504B0304', 0, 'hex');
|
|||
|
// Move position to the next header if found, skip the whole buffer otherwise
|
|||
|
await tokenizer.ignore(nextHeaderIndex >= 0 ? nextHeaderIndex : this.buffer.length);
|
|||
|
}
|
|||
|
} else {
|
|||
|
await tokenizer.ignore(zipHeader.compressedSize);
|
|||
|
}
|
|||
|
}
|
|||
|
} catch (error) {
|
|||
|
if (!(error instanceof EndOfStreamError)) {
|
|||
|
throw error;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return {
|
|||
|
ext: 'zip',
|
|||
|
mime: 'application/zip',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('OggS')) {
|
|||
|
// This is an OGG container
|
|||
|
await tokenizer.ignore(28);
|
|||
|
const type = node_buffer.Buffer.alloc(8);
|
|||
|
await tokenizer.readBuffer(type);
|
|||
|
|
|||
|
// Needs to be before `ogg` check
|
|||
|
if (_check(type, [0x4F, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64])) {
|
|||
|
return {
|
|||
|
ext: 'opus',
|
|||
|
mime: 'audio/opus',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// If ' theora' in header.
|
|||
|
if (_check(type, [0x80, 0x74, 0x68, 0x65, 0x6F, 0x72, 0x61])) {
|
|||
|
return {
|
|||
|
ext: 'ogv',
|
|||
|
mime: 'video/ogg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// If '\x01video' in header.
|
|||
|
if (_check(type, [0x01, 0x76, 0x69, 0x64, 0x65, 0x6F, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'ogm',
|
|||
|
mime: 'video/ogg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// If ' FLAC' in header https://xiph.org/flac/faq.html
|
|||
|
if (_check(type, [0x7F, 0x46, 0x4C, 0x41, 0x43])) {
|
|||
|
return {
|
|||
|
ext: 'oga',
|
|||
|
mime: 'audio/ogg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// 'Speex ' in header https://en.wikipedia.org/wiki/Speex
|
|||
|
if (_check(type, [0x53, 0x70, 0x65, 0x65, 0x78, 0x20, 0x20])) {
|
|||
|
return {
|
|||
|
ext: 'spx',
|
|||
|
mime: 'audio/ogg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// If '\x01vorbis' in header
|
|||
|
if (_check(type, [0x01, 0x76, 0x6F, 0x72, 0x62, 0x69, 0x73])) {
|
|||
|
return {
|
|||
|
ext: 'ogg',
|
|||
|
mime: 'audio/ogg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Default OGG container https://www.iana.org/assignments/media-types/application/ogg
|
|||
|
return {
|
|||
|
ext: 'ogx',
|
|||
|
mime: 'application/ogg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.check([0x50, 0x4B])
|
|||
|
&& (this.buffer[2] === 0x3 || this.buffer[2] === 0x5 || this.buffer[2] === 0x7)
|
|||
|
&& (this.buffer[3] === 0x4 || this.buffer[3] === 0x6 || this.buffer[3] === 0x8)
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'zip',
|
|||
|
mime: 'application/zip',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
//
|
|||
|
|
|||
|
// File Type Box (https://en.wikipedia.org/wiki/ISO_base_media_file_format)
|
|||
|
// It's not required to be first, but it's recommended to be. Almost all ISO base media files start with `ftyp` box.
|
|||
|
// `ftyp` box must contain a brand major identifier, which must consist of ISO 8859-1 printable characters.
|
|||
|
// Here we check for 8859-1 printable characters (for simplicity, it's a mask which also catches one non-printable character).
|
|||
|
if (
|
|||
|
this.checkString('ftyp', {offset: 4})
|
|||
|
&& (this.buffer[8] & 0x60) !== 0x00 // Brand major, first character ASCII?
|
|||
|
) {
|
|||
|
// They all can have MIME `video/mp4` except `application/mp4` special-case which is hard to detect.
|
|||
|
// For some cases, we're specific, everything else falls to `video/mp4` with `mp4` extension.
|
|||
|
const brandMajor = this.buffer.toString('binary', 8, 12).replace('\0', ' ').trim();
|
|||
|
switch (brandMajor) {
|
|||
|
case 'avif':
|
|||
|
case 'avis':
|
|||
|
return {ext: 'avif', mime: 'image/avif'};
|
|||
|
case 'mif1':
|
|||
|
return {ext: 'heic', mime: 'image/heif'};
|
|||
|
case 'msf1':
|
|||
|
return {ext: 'heic', mime: 'image/heif-sequence'};
|
|||
|
case 'heic':
|
|||
|
case 'heix':
|
|||
|
return {ext: 'heic', mime: 'image/heic'};
|
|||
|
case 'hevc':
|
|||
|
case 'hevx':
|
|||
|
return {ext: 'heic', mime: 'image/heic-sequence'};
|
|||
|
case 'qt':
|
|||
|
return {ext: 'mov', mime: 'video/quicktime'};
|
|||
|
case 'M4V':
|
|||
|
case 'M4VH':
|
|||
|
case 'M4VP':
|
|||
|
return {ext: 'm4v', mime: 'video/x-m4v'};
|
|||
|
case 'M4P':
|
|||
|
return {ext: 'm4p', mime: 'video/mp4'};
|
|||
|
case 'M4B':
|
|||
|
return {ext: 'm4b', mime: 'audio/mp4'};
|
|||
|
case 'M4A':
|
|||
|
return {ext: 'm4a', mime: 'audio/x-m4a'};
|
|||
|
case 'F4V':
|
|||
|
return {ext: 'f4v', mime: 'video/mp4'};
|
|||
|
case 'F4P':
|
|||
|
return {ext: 'f4p', mime: 'video/mp4'};
|
|||
|
case 'F4A':
|
|||
|
return {ext: 'f4a', mime: 'audio/mp4'};
|
|||
|
case 'F4B':
|
|||
|
return {ext: 'f4b', mime: 'audio/mp4'};
|
|||
|
case 'crx':
|
|||
|
return {ext: 'cr3', mime: 'image/x-canon-cr3'};
|
|||
|
default:
|
|||
|
if (brandMajor.startsWith('3g')) {
|
|||
|
if (brandMajor.startsWith('3g2')) {
|
|||
|
return {ext: '3g2', mime: 'video/3gpp2'};
|
|||
|
}
|
|||
|
|
|||
|
return {ext: '3gp', mime: 'video/3gpp'};
|
|||
|
}
|
|||
|
|
|||
|
return {ext: 'mp4', mime: 'video/mp4'};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('MThd')) {
|
|||
|
return {
|
|||
|
ext: 'mid',
|
|||
|
mime: 'audio/midi',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.checkString('wOFF')
|
|||
|
&& (
|
|||
|
this.check([0x00, 0x01, 0x00, 0x00], {offset: 4})
|
|||
|
|| this.checkString('OTTO', {offset: 4})
|
|||
|
)
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'woff',
|
|||
|
mime: 'font/woff',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.checkString('wOF2')
|
|||
|
&& (
|
|||
|
this.check([0x00, 0x01, 0x00, 0x00], {offset: 4})
|
|||
|
|| this.checkString('OTTO', {offset: 4})
|
|||
|
)
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'woff2',
|
|||
|
mime: 'font/woff2',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xD4, 0xC3, 0xB2, 0xA1]) || this.check([0xA1, 0xB2, 0xC3, 0xD4])) {
|
|||
|
return {
|
|||
|
ext: 'pcap',
|
|||
|
mime: 'application/vnd.tcpdump.pcap',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Sony DSD Stream File (DSF)
|
|||
|
if (this.checkString('DSD ')) {
|
|||
|
return {
|
|||
|
ext: 'dsf',
|
|||
|
mime: 'audio/x-dsf', // Non-standard
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('LZIP')) {
|
|||
|
return {
|
|||
|
ext: 'lz',
|
|||
|
mime: 'application/x-lzip',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('fLaC')) {
|
|||
|
return {
|
|||
|
ext: 'flac',
|
|||
|
mime: 'audio/x-flac',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x42, 0x50, 0x47, 0xFB])) {
|
|||
|
return {
|
|||
|
ext: 'bpg',
|
|||
|
mime: 'image/bpg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('wvpk')) {
|
|||
|
return {
|
|||
|
ext: 'wv',
|
|||
|
mime: 'audio/wavpack',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('%PDF')) {
|
|||
|
try {
|
|||
|
await tokenizer.ignore(1350);
|
|||
|
const maxBufferSize = 10 * 1024 * 1024;
|
|||
|
const buffer = node_buffer.Buffer.alloc(Math.min(maxBufferSize, tokenizer.fileInfo.size));
|
|||
|
await tokenizer.readBuffer(buffer, {mayBeLess: true});
|
|||
|
|
|||
|
// Check if this is an Adobe Illustrator file
|
|||
|
if (buffer.includes(node_buffer.Buffer.from('AIPrivateData'))) {
|
|||
|
return {
|
|||
|
ext: 'ai',
|
|||
|
mime: 'application/postscript',
|
|||
|
};
|
|||
|
}
|
|||
|
} catch (error) {
|
|||
|
// Swallow end of stream error if file is too small for the Adobe AI check
|
|||
|
if (!(error instanceof EndOfStreamError)) {
|
|||
|
throw error;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Assume this is just a normal PDF
|
|||
|
return {
|
|||
|
ext: 'pdf',
|
|||
|
mime: 'application/pdf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x00, 0x61, 0x73, 0x6D])) {
|
|||
|
return {
|
|||
|
ext: 'wasm',
|
|||
|
mime: 'application/wasm',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// TIFF, little-endian type
|
|||
|
if (this.check([0x49, 0x49])) {
|
|||
|
const fileType = await this.readTiffHeader(false);
|
|||
|
if (fileType) {
|
|||
|
return fileType;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// TIFF, big-endian type
|
|||
|
if (this.check([0x4D, 0x4D])) {
|
|||
|
const fileType = await this.readTiffHeader(true);
|
|||
|
if (fileType) {
|
|||
|
return fileType;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('MAC ')) {
|
|||
|
return {
|
|||
|
ext: 'ape',
|
|||
|
mime: 'audio/ape',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// https://github.com/threatstack/libmagic/blob/master/magic/Magdir/matroska
|
|||
|
if (this.check([0x1A, 0x45, 0xDF, 0xA3])) { // Root element: EBML
|
|||
|
async function readField() {
|
|||
|
const msb = await tokenizer.peekNumber(UINT8);
|
|||
|
let mask = 0x80;
|
|||
|
let ic = 0; // 0 = A, 1 = B, 2 = C, 3
|
|||
|
// = D
|
|||
|
|
|||
|
while ((msb & mask) === 0 && mask !== 0) {
|
|||
|
++ic;
|
|||
|
mask >>= 1;
|
|||
|
}
|
|||
|
|
|||
|
const id = node_buffer.Buffer.alloc(ic + 1);
|
|||
|
await tokenizer.readBuffer(id);
|
|||
|
return id;
|
|||
|
}
|
|||
|
|
|||
|
async function readElement() {
|
|||
|
const id = await readField();
|
|||
|
const lengthField = await readField();
|
|||
|
lengthField[0] ^= 0x80 >> (lengthField.length - 1);
|
|||
|
const nrLength = Math.min(6, lengthField.length); // JavaScript can max read 6 bytes integer
|
|||
|
return {
|
|||
|
id: id.readUIntBE(0, id.length),
|
|||
|
len: lengthField.readUIntBE(lengthField.length - nrLength, nrLength),
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
async function readChildren(children) {
|
|||
|
while (children > 0) {
|
|||
|
const element = await readElement();
|
|||
|
if (element.id === 0x42_82) {
|
|||
|
const rawValue = await tokenizer.readToken(new StringType(element.len, 'utf-8'));
|
|||
|
return rawValue.replace(/\00.*$/g, ''); // Return DocType
|
|||
|
}
|
|||
|
|
|||
|
await tokenizer.ignore(element.len); // ignore payload
|
|||
|
--children;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
const re = await readElement();
|
|||
|
const docType = await readChildren(re.len);
|
|||
|
|
|||
|
switch (docType) {
|
|||
|
case 'webm':
|
|||
|
return {
|
|||
|
ext: 'webm',
|
|||
|
mime: 'video/webm',
|
|||
|
};
|
|||
|
|
|||
|
case 'matroska':
|
|||
|
return {
|
|||
|
ext: 'mkv',
|
|||
|
mime: 'video/x-matroska',
|
|||
|
};
|
|||
|
|
|||
|
default:
|
|||
|
return;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// RIFF file format which might be AVI, WAV, QCP, etc
|
|||
|
if (this.check([0x52, 0x49, 0x46, 0x46])) {
|
|||
|
if (this.check([0x41, 0x56, 0x49], {offset: 8})) {
|
|||
|
return {
|
|||
|
ext: 'avi',
|
|||
|
mime: 'video/vnd.avi',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x57, 0x41, 0x56, 0x45], {offset: 8})) {
|
|||
|
return {
|
|||
|
ext: 'wav',
|
|||
|
mime: 'audio/vnd.wave',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// QLCM, QCP file
|
|||
|
if (this.check([0x51, 0x4C, 0x43, 0x4D], {offset: 8})) {
|
|||
|
return {
|
|||
|
ext: 'qcp',
|
|||
|
mime: 'audio/qcelp',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('SQLi')) {
|
|||
|
return {
|
|||
|
ext: 'sqlite',
|
|||
|
mime: 'application/x-sqlite3',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x4E, 0x45, 0x53, 0x1A])) {
|
|||
|
return {
|
|||
|
ext: 'nes',
|
|||
|
mime: 'application/x-nintendo-nes-rom',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('Cr24')) {
|
|||
|
return {
|
|||
|
ext: 'crx',
|
|||
|
mime: 'application/x-google-chrome-extension',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.checkString('MSCF')
|
|||
|
|| this.checkString('ISc(')
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'cab',
|
|||
|
mime: 'application/vnd.ms-cab-compressed',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xED, 0xAB, 0xEE, 0xDB])) {
|
|||
|
return {
|
|||
|
ext: 'rpm',
|
|||
|
mime: 'application/x-rpm',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xC5, 0xD0, 0xD3, 0xC6])) {
|
|||
|
return {
|
|||
|
ext: 'eps',
|
|||
|
mime: 'application/eps',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x28, 0xB5, 0x2F, 0xFD])) {
|
|||
|
return {
|
|||
|
ext: 'zst',
|
|||
|
mime: 'application/zstd',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x7F, 0x45, 0x4C, 0x46])) {
|
|||
|
return {
|
|||
|
ext: 'elf',
|
|||
|
mime: 'application/x-elf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x21, 0x42, 0x44, 0x4E])) {
|
|||
|
return {
|
|||
|
ext: 'pst',
|
|||
|
mime: 'application/vnd.ms-outlook',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('PAR1')) {
|
|||
|
return {
|
|||
|
ext: 'parquet',
|
|||
|
mime: 'application/x-parquet',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// -- 5-byte signatures --
|
|||
|
|
|||
|
if (this.check([0x4F, 0x54, 0x54, 0x4F, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'otf',
|
|||
|
mime: 'font/otf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('#!AMR')) {
|
|||
|
return {
|
|||
|
ext: 'amr',
|
|||
|
mime: 'audio/amr',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('{\\rtf')) {
|
|||
|
return {
|
|||
|
ext: 'rtf',
|
|||
|
mime: 'application/rtf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x46, 0x4C, 0x56, 0x01])) {
|
|||
|
return {
|
|||
|
ext: 'flv',
|
|||
|
mime: 'video/x-flv',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('IMPM')) {
|
|||
|
return {
|
|||
|
ext: 'it',
|
|||
|
mime: 'audio/x-it',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.checkString('-lh0-', {offset: 2})
|
|||
|
|| this.checkString('-lh1-', {offset: 2})
|
|||
|
|| this.checkString('-lh2-', {offset: 2})
|
|||
|
|| this.checkString('-lh3-', {offset: 2})
|
|||
|
|| this.checkString('-lh4-', {offset: 2})
|
|||
|
|| this.checkString('-lh5-', {offset: 2})
|
|||
|
|| this.checkString('-lh6-', {offset: 2})
|
|||
|
|| this.checkString('-lh7-', {offset: 2})
|
|||
|
|| this.checkString('-lzs-', {offset: 2})
|
|||
|
|| this.checkString('-lz4-', {offset: 2})
|
|||
|
|| this.checkString('-lz5-', {offset: 2})
|
|||
|
|| this.checkString('-lhd-', {offset: 2})
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'lzh',
|
|||
|
mime: 'application/x-lzh-compressed',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// MPEG program stream (PS or MPEG-PS)
|
|||
|
if (this.check([0x00, 0x00, 0x01, 0xBA])) {
|
|||
|
// MPEG-PS, MPEG-1 Part 1
|
|||
|
if (this.check([0x21], {offset: 4, mask: [0xF1]})) {
|
|||
|
return {
|
|||
|
ext: 'mpg', // May also be .ps, .mpeg
|
|||
|
mime: 'video/MP1S',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// MPEG-PS, MPEG-2 Part 1
|
|||
|
if (this.check([0x44], {offset: 4, mask: [0xC4]})) {
|
|||
|
return {
|
|||
|
ext: 'mpg', // May also be .mpg, .m2p, .vob or .sub
|
|||
|
mime: 'video/MP2P',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('ITSF')) {
|
|||
|
return {
|
|||
|
ext: 'chm',
|
|||
|
mime: 'application/vnd.ms-htmlhelp',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xCA, 0xFE, 0xBA, 0xBE])) {
|
|||
|
return {
|
|||
|
ext: 'class',
|
|||
|
mime: 'application/java-vm',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// -- 6-byte signatures --
|
|||
|
|
|||
|
if (this.check([0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'xz',
|
|||
|
mime: 'application/x-xz',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('<?xml ')) {
|
|||
|
return {
|
|||
|
ext: 'xml',
|
|||
|
mime: 'application/xml',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])) {
|
|||
|
return {
|
|||
|
ext: '7z',
|
|||
|
mime: 'application/x-7z-compressed',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.check([0x52, 0x61, 0x72, 0x21, 0x1A, 0x7])
|
|||
|
&& (this.buffer[6] === 0x0 || this.buffer[6] === 0x1)
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'rar',
|
|||
|
mime: 'application/x-rar-compressed',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('solid ')) {
|
|||
|
return {
|
|||
|
ext: 'stl',
|
|||
|
mime: 'model/stl',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('AC')) {
|
|||
|
const version = this.buffer.toString('binary', 2, 6);
|
|||
|
if (version.match('^d*') && version >= 1000 && version <= 1050) {
|
|||
|
return {
|
|||
|
ext: 'dwg',
|
|||
|
mime: 'image/vnd.dwg',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('070707')) {
|
|||
|
return {
|
|||
|
ext: 'cpio',
|
|||
|
mime: 'application/x-cpio',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// -- 7-byte signatures --
|
|||
|
|
|||
|
if (this.checkString('BLENDER')) {
|
|||
|
return {
|
|||
|
ext: 'blend',
|
|||
|
mime: 'application/x-blender',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('!<arch>')) {
|
|||
|
await tokenizer.ignore(8);
|
|||
|
const string = await tokenizer.readToken(new StringType(13, 'ascii'));
|
|||
|
if (string === 'debian-binary') {
|
|||
|
return {
|
|||
|
ext: 'deb',
|
|||
|
mime: 'application/x-deb',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
return {
|
|||
|
ext: 'ar',
|
|||
|
mime: 'application/x-unix-archive',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('**ACE', {offset: 7})) {
|
|||
|
await tokenizer.peekBuffer(this.buffer, {length: 14, mayBeLess: true});
|
|||
|
if (this.checkString('**', {offset: 12})) {
|
|||
|
return {
|
|||
|
ext: 'ace',
|
|||
|
mime: 'application/x-ace-compressed',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// -- 8-byte signatures --
|
|||
|
|
|||
|
if (this.check([0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A])) {
|
|||
|
// APNG format (https://wiki.mozilla.org/APNG_Specification)
|
|||
|
// 1. Find the first IDAT (image data) chunk (49 44 41 54)
|
|||
|
// 2. Check if there is an "acTL" chunk before the IDAT one (61 63 54 4C)
|
|||
|
|
|||
|
// Offset calculated as follows:
|
|||
|
// - 8 bytes: PNG signature
|
|||
|
// - 4 (length) + 4 (chunk type) + 13 (chunk data) + 4 (CRC): IHDR chunk
|
|||
|
|
|||
|
await tokenizer.ignore(8); // ignore PNG signature
|
|||
|
|
|||
|
async function readChunkHeader() {
|
|||
|
return {
|
|||
|
length: await tokenizer.readToken(INT32_BE),
|
|||
|
type: await tokenizer.readToken(new StringType(4, 'binary')),
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
do {
|
|||
|
const chunk = await readChunkHeader();
|
|||
|
if (chunk.length < 0) {
|
|||
|
return; // Invalid chunk length
|
|||
|
}
|
|||
|
|
|||
|
switch (chunk.type) {
|
|||
|
case 'IDAT':
|
|||
|
return {
|
|||
|
ext: 'png',
|
|||
|
mime: 'image/png',
|
|||
|
};
|
|||
|
case 'acTL':
|
|||
|
return {
|
|||
|
ext: 'apng',
|
|||
|
mime: 'image/apng',
|
|||
|
};
|
|||
|
default:
|
|||
|
await tokenizer.ignore(chunk.length + 4); // Ignore chunk-data + CRC
|
|||
|
}
|
|||
|
} while (tokenizer.position + 8 < tokenizer.fileInfo.size);
|
|||
|
|
|||
|
return {
|
|||
|
ext: 'png',
|
|||
|
mime: 'image/png',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x41, 0x52, 0x52, 0x4F, 0x57, 0x31, 0x00, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'arrow',
|
|||
|
mime: 'application/x-apache-arrow',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x67, 0x6C, 0x54, 0x46, 0x02, 0x00, 0x00, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'glb',
|
|||
|
mime: 'model/gltf-binary',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// `mov` format variants
|
|||
|
if (
|
|||
|
this.check([0x66, 0x72, 0x65, 0x65], {offset: 4}) // `free`
|
|||
|
|| this.check([0x6D, 0x64, 0x61, 0x74], {offset: 4}) // `mdat` MJPEG
|
|||
|
|| this.check([0x6D, 0x6F, 0x6F, 0x76], {offset: 4}) // `moov`
|
|||
|
|| this.check([0x77, 0x69, 0x64, 0x65], {offset: 4}) // `wide`
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'mov',
|
|||
|
mime: 'video/quicktime',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// -- 9-byte signatures --
|
|||
|
|
|||
|
if (this.check([0x49, 0x49, 0x52, 0x4F, 0x08, 0x00, 0x00, 0x00, 0x18])) {
|
|||
|
return {
|
|||
|
ext: 'orf',
|
|||
|
mime: 'image/x-olympus-orf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('gimp xcf ')) {
|
|||
|
return {
|
|||
|
ext: 'xcf',
|
|||
|
mime: 'image/x-xcf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// -- 12-byte signatures --
|
|||
|
|
|||
|
if (this.check([0x49, 0x49, 0x55, 0x00, 0x18, 0x00, 0x00, 0x00, 0x88, 0xE7, 0x74, 0xD8])) {
|
|||
|
return {
|
|||
|
ext: 'rw2',
|
|||
|
mime: 'image/x-panasonic-rw2',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// ASF_Header_Object first 80 bytes
|
|||
|
if (this.check([0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9])) {
|
|||
|
async function readHeader() {
|
|||
|
const guid = node_buffer.Buffer.alloc(16);
|
|||
|
await tokenizer.readBuffer(guid);
|
|||
|
return {
|
|||
|
id: guid,
|
|||
|
size: Number(await tokenizer.readToken(UINT64_LE)),
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
await tokenizer.ignore(30);
|
|||
|
// Search for header should be in first 1KB of file.
|
|||
|
while (tokenizer.position + 24 < tokenizer.fileInfo.size) {
|
|||
|
const header = await readHeader();
|
|||
|
let payload = header.size - 24;
|
|||
|
if (_check(header.id, [0x91, 0x07, 0xDC, 0xB7, 0xB7, 0xA9, 0xCF, 0x11, 0x8E, 0xE6, 0x00, 0xC0, 0x0C, 0x20, 0x53, 0x65])) {
|
|||
|
// Sync on Stream-Properties-Object (B7DC0791-A9B7-11CF-8EE6-00C00C205365)
|
|||
|
const typeId = node_buffer.Buffer.alloc(16);
|
|||
|
payload -= await tokenizer.readBuffer(typeId);
|
|||
|
|
|||
|
if (_check(typeId, [0x40, 0x9E, 0x69, 0xF8, 0x4D, 0x5B, 0xCF, 0x11, 0xA8, 0xFD, 0x00, 0x80, 0x5F, 0x5C, 0x44, 0x2B])) {
|
|||
|
// Found audio:
|
|||
|
return {
|
|||
|
ext: 'asf',
|
|||
|
mime: 'audio/x-ms-asf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (_check(typeId, [0xC0, 0xEF, 0x19, 0xBC, 0x4D, 0x5B, 0xCF, 0x11, 0xA8, 0xFD, 0x00, 0x80, 0x5F, 0x5C, 0x44, 0x2B])) {
|
|||
|
// Found video:
|
|||
|
return {
|
|||
|
ext: 'asf',
|
|||
|
mime: 'video/x-ms-asf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
break;
|
|||
|
}
|
|||
|
|
|||
|
await tokenizer.ignore(payload);
|
|||
|
}
|
|||
|
|
|||
|
// Default to ASF generic extension
|
|||
|
return {
|
|||
|
ext: 'asf',
|
|||
|
mime: 'application/vnd.ms-asf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xAB, 0x4B, 0x54, 0x58, 0x20, 0x31, 0x31, 0xBB, 0x0D, 0x0A, 0x1A, 0x0A])) {
|
|||
|
return {
|
|||
|
ext: 'ktx',
|
|||
|
mime: 'image/ktx',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if ((this.check([0x7E, 0x10, 0x04]) || this.check([0x7E, 0x18, 0x04])) && this.check([0x30, 0x4D, 0x49, 0x45], {offset: 4})) {
|
|||
|
return {
|
|||
|
ext: 'mie',
|
|||
|
mime: 'application/x-mie',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x27, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], {offset: 2})) {
|
|||
|
return {
|
|||
|
ext: 'shp',
|
|||
|
mime: 'application/x-esri-shape',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xFF, 0x4F, 0xFF, 0x51])) {
|
|||
|
return {
|
|||
|
ext: 'j2c',
|
|||
|
mime: 'image/j2c',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x00, 0x00, 0x00, 0x0C, 0x6A, 0x50, 0x20, 0x20, 0x0D, 0x0A, 0x87, 0x0A])) {
|
|||
|
// JPEG-2000 family
|
|||
|
|
|||
|
await tokenizer.ignore(20);
|
|||
|
const type = await tokenizer.readToken(new StringType(4, 'ascii'));
|
|||
|
switch (type) {
|
|||
|
case 'jp2 ':
|
|||
|
return {
|
|||
|
ext: 'jp2',
|
|||
|
mime: 'image/jp2',
|
|||
|
};
|
|||
|
case 'jpx ':
|
|||
|
return {
|
|||
|
ext: 'jpx',
|
|||
|
mime: 'image/jpx',
|
|||
|
};
|
|||
|
case 'jpm ':
|
|||
|
return {
|
|||
|
ext: 'jpm',
|
|||
|
mime: 'image/jpm',
|
|||
|
};
|
|||
|
case 'mjp2':
|
|||
|
return {
|
|||
|
ext: 'mj2',
|
|||
|
mime: 'image/mj2',
|
|||
|
};
|
|||
|
default:
|
|||
|
return;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.check([0xFF, 0x0A])
|
|||
|
|| this.check([0x00, 0x00, 0x00, 0x0C, 0x4A, 0x58, 0x4C, 0x20, 0x0D, 0x0A, 0x87, 0x0A])
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'jxl',
|
|||
|
mime: 'image/jxl',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xFE, 0xFF])) { // UTF-16-BOM-LE
|
|||
|
if (this.check([0, 60, 0, 63, 0, 120, 0, 109, 0, 108], {offset: 2})) {
|
|||
|
return {
|
|||
|
ext: 'xml',
|
|||
|
mime: 'application/xml',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
return undefined; // Some unknown text based format
|
|||
|
}
|
|||
|
|
|||
|
// -- Unsafe signatures --
|
|||
|
|
|||
|
if (
|
|||
|
this.check([0x0, 0x0, 0x1, 0xBA])
|
|||
|
|| this.check([0x0, 0x0, 0x1, 0xB3])
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'mpg',
|
|||
|
mime: 'video/mpeg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x00, 0x01, 0x00, 0x00, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'ttf',
|
|||
|
mime: 'font/ttf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x00, 0x00, 0x01, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'ico',
|
|||
|
mime: 'image/x-icon',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x00, 0x00, 0x02, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'cur',
|
|||
|
mime: 'image/x-icon',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1])) {
|
|||
|
// Detected Microsoft Compound File Binary File (MS-CFB) Format.
|
|||
|
return {
|
|||
|
ext: 'cfb',
|
|||
|
mime: 'application/x-cfb',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Increase sample size from 12 to 256.
|
|||
|
await tokenizer.peekBuffer(this.buffer, {length: Math.min(256, tokenizer.fileInfo.size), mayBeLess: true});
|
|||
|
|
|||
|
// -- 15-byte signatures --
|
|||
|
|
|||
|
if (this.checkString('BEGIN:')) {
|
|||
|
if (this.checkString('VCARD', {offset: 6})) {
|
|||
|
return {
|
|||
|
ext: 'vcf',
|
|||
|
mime: 'text/vcard',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('VCALENDAR', {offset: 6})) {
|
|||
|
return {
|
|||
|
ext: 'ics',
|
|||
|
mime: 'text/calendar',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// `raf` is here just to keep all the raw image detectors together.
|
|||
|
if (this.checkString('FUJIFILMCCD-RAW')) {
|
|||
|
return {
|
|||
|
ext: 'raf',
|
|||
|
mime: 'image/x-fujifilm-raf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('Extended Module:')) {
|
|||
|
return {
|
|||
|
ext: 'xm',
|
|||
|
mime: 'audio/x-xm',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('Creative Voice File')) {
|
|||
|
return {
|
|||
|
ext: 'voc',
|
|||
|
mime: 'audio/x-voc',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x04, 0x00, 0x00, 0x00]) && this.buffer.length >= 16) { // Rough & quick check Pickle/ASAR
|
|||
|
const jsonSize = this.buffer.readUInt32LE(12);
|
|||
|
if (jsonSize > 12 && this.buffer.length >= jsonSize + 16) {
|
|||
|
try {
|
|||
|
const header = this.buffer.slice(16, jsonSize + 16).toString();
|
|||
|
const json = JSON.parse(header);
|
|||
|
// Check if Pickle is ASAR
|
|||
|
if (json.files) { // Final check, assuring Pickle/ASAR format
|
|||
|
return {
|
|||
|
ext: 'asar',
|
|||
|
mime: 'application/x-asar',
|
|||
|
};
|
|||
|
}
|
|||
|
} catch {}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x06, 0x0E, 0x2B, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0D, 0x01, 0x02, 0x01, 0x01, 0x02])) {
|
|||
|
return {
|
|||
|
ext: 'mxf',
|
|||
|
mime: 'application/mxf',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('SCRM', {offset: 44})) {
|
|||
|
return {
|
|||
|
ext: 's3m',
|
|||
|
mime: 'audio/x-s3m',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Raw MPEG-2 transport stream (188-byte packets)
|
|||
|
if (this.check([0x47]) && this.check([0x47], {offset: 188})) {
|
|||
|
return {
|
|||
|
ext: 'mts',
|
|||
|
mime: 'video/mp2t',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Blu-ray Disc Audio-Video (BDAV) MPEG-2 transport stream has 4-byte TP_extra_header before each 188-byte packet
|
|||
|
if (this.check([0x47], {offset: 4}) && this.check([0x47], {offset: 196})) {
|
|||
|
return {
|
|||
|
ext: 'mts',
|
|||
|
mime: 'video/mp2t',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x42, 0x4F, 0x4F, 0x4B, 0x4D, 0x4F, 0x42, 0x49], {offset: 60})) {
|
|||
|
return {
|
|||
|
ext: 'mobi',
|
|||
|
mime: 'application/x-mobipocket-ebook',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x44, 0x49, 0x43, 0x4D], {offset: 128})) {
|
|||
|
return {
|
|||
|
ext: 'dcm',
|
|||
|
mime: 'application/dicom',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x4C, 0x00, 0x00, 0x00, 0x01, 0x14, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x46])) {
|
|||
|
return {
|
|||
|
ext: 'lnk',
|
|||
|
mime: 'application/x.ms.shortcut', // Invented by us
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x62, 0x6F, 0x6F, 0x6B, 0x00, 0x00, 0x00, 0x00, 0x6D, 0x61, 0x72, 0x6B, 0x00, 0x00, 0x00, 0x00])) {
|
|||
|
return {
|
|||
|
ext: 'alias',
|
|||
|
mime: 'application/x.apple.alias', // Invented by us
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (
|
|||
|
this.check([0x4C, 0x50], {offset: 34})
|
|||
|
&& (
|
|||
|
this.check([0x00, 0x00, 0x01], {offset: 8})
|
|||
|
|| this.check([0x01, 0x00, 0x02], {offset: 8})
|
|||
|
|| this.check([0x02, 0x00, 0x02], {offset: 8})
|
|||
|
)
|
|||
|
) {
|
|||
|
return {
|
|||
|
ext: 'eot',
|
|||
|
mime: 'application/vnd.ms-fontobject',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0x06, 0x06, 0xED, 0xF5, 0xD8, 0x1D, 0x46, 0xE5, 0xBD, 0x31, 0xEF, 0xE7, 0xFE, 0x74, 0xB7, 0x1D])) {
|
|||
|
return {
|
|||
|
ext: 'indd',
|
|||
|
mime: 'application/x-indesign',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Increase sample size from 256 to 512
|
|||
|
await tokenizer.peekBuffer(this.buffer, {length: Math.min(512, tokenizer.fileInfo.size), mayBeLess: true});
|
|||
|
|
|||
|
// Requires a buffer size of 512 bytes
|
|||
|
if (tarHeaderChecksumMatches(this.buffer)) {
|
|||
|
return {
|
|||
|
ext: 'tar',
|
|||
|
mime: 'application/x-tar',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xFF, 0xFE])) { // UTF-16-BOM-BE
|
|||
|
if (this.check([60, 0, 63, 0, 120, 0, 109, 0, 108, 0], {offset: 2})) {
|
|||
|
return {
|
|||
|
ext: 'xml',
|
|||
|
mime: 'application/xml',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (this.check([0xFF, 0x0E, 0x53, 0x00, 0x6B, 0x00, 0x65, 0x00, 0x74, 0x00, 0x63, 0x00, 0x68, 0x00, 0x55, 0x00, 0x70, 0x00, 0x20, 0x00, 0x4D, 0x00, 0x6F, 0x00, 0x64, 0x00, 0x65, 0x00, 0x6C, 0x00], {offset: 2})) {
|
|||
|
return {
|
|||
|
ext: 'skp',
|
|||
|
mime: 'application/vnd.sketchup.skp',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
return undefined; // Some text based format
|
|||
|
}
|
|||
|
|
|||
|
if (this.checkString('-----BEGIN PGP MESSAGE-----')) {
|
|||
|
return {
|
|||
|
ext: 'pgp',
|
|||
|
mime: 'application/pgp-encrypted',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Check MPEG 1 or 2 Layer 3 header, or 'layer 0' for ADTS (MPEG sync-word 0xFFE)
|
|||
|
if (this.buffer.length >= 2 && this.check([0xFF, 0xE0], {offset: 0, mask: [0xFF, 0xE0]})) {
|
|||
|
if (this.check([0x10], {offset: 1, mask: [0x16]})) {
|
|||
|
// Check for (ADTS) MPEG-2
|
|||
|
if (this.check([0x08], {offset: 1, mask: [0x08]})) {
|
|||
|
return {
|
|||
|
ext: 'aac',
|
|||
|
mime: 'audio/aac',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Must be (ADTS) MPEG-4
|
|||
|
return {
|
|||
|
ext: 'aac',
|
|||
|
mime: 'audio/aac',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// MPEG 1 or 2 Layer 3 header
|
|||
|
// Check for MPEG layer 3
|
|||
|
if (this.check([0x02], {offset: 1, mask: [0x06]})) {
|
|||
|
return {
|
|||
|
ext: 'mp3',
|
|||
|
mime: 'audio/mpeg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Check for MPEG layer 2
|
|||
|
if (this.check([0x04], {offset: 1, mask: [0x06]})) {
|
|||
|
return {
|
|||
|
ext: 'mp2',
|
|||
|
mime: 'audio/mpeg',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
// Check for MPEG layer 1
|
|||
|
if (this.check([0x06], {offset: 1, mask: [0x06]})) {
|
|||
|
return {
|
|||
|
ext: 'mp1',
|
|||
|
mime: 'audio/mpeg',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
async readTiffTag(bigEndian) {
|
|||
|
const tagId = await this.tokenizer.readToken(bigEndian ? UINT16_BE : UINT16_LE);
|
|||
|
this.tokenizer.ignore(10);
|
|||
|
switch (tagId) {
|
|||
|
case 50_341:
|
|||
|
return {
|
|||
|
ext: 'arw',
|
|||
|
mime: 'image/x-sony-arw',
|
|||
|
};
|
|||
|
case 50_706:
|
|||
|
return {
|
|||
|
ext: 'dng',
|
|||
|
mime: 'image/x-adobe-dng',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
async readTiffIFD(bigEndian) {
|
|||
|
const numberOfTags = await this.tokenizer.readToken(bigEndian ? UINT16_BE : UINT16_LE);
|
|||
|
for (let n = 0; n < numberOfTags; ++n) {
|
|||
|
const fileType = await this.readTiffTag(bigEndian);
|
|||
|
if (fileType) {
|
|||
|
return fileType;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
async readTiffHeader(bigEndian) {
|
|||
|
const version = (bigEndian ? UINT16_BE : UINT16_LE).get(this.buffer, 2);
|
|||
|
const ifdOffset = (bigEndian ? UINT32_BE : UINT32_LE).get(this.buffer, 4);
|
|||
|
|
|||
|
if (version === 42) {
|
|||
|
// TIFF file header
|
|||
|
if (ifdOffset >= 6) {
|
|||
|
if (this.checkString('CR', {offset: 8})) {
|
|||
|
return {
|
|||
|
ext: 'cr2',
|
|||
|
mime: 'image/x-canon-cr2',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (ifdOffset >= 8 && (this.check([0x1C, 0x00, 0xFE, 0x00], {offset: 8}) || this.check([0x1F, 0x00, 0x0B, 0x00], {offset: 8}))) {
|
|||
|
return {
|
|||
|
ext: 'nef',
|
|||
|
mime: 'image/x-nikon-nef',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
await this.tokenizer.ignore(ifdOffset);
|
|||
|
const fileType = await this.readTiffIFD(bigEndian);
|
|||
|
return fileType ?? {
|
|||
|
ext: 'tif',
|
|||
|
mime: 'image/tiff',
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (version === 43) { // Big TIFF file header
|
|||
|
return {
|
|||
|
ext: 'tif',
|
|||
|
mime: 'image/tiff',
|
|||
|
};
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
new Set(extensions);
|
|||
|
new Set(mimeTypes);
|
|||
|
|
|||
|
const imageExtensions = new Set([
|
|||
|
'jpg',
|
|||
|
'png',
|
|||
|
'gif',
|
|||
|
'webp',
|
|||
|
'flif',
|
|||
|
'cr2',
|
|||
|
'tif',
|
|||
|
'bmp',
|
|||
|
'jxr',
|
|||
|
'psd',
|
|||
|
'ico',
|
|||
|
'bpg',
|
|||
|
'jp2',
|
|||
|
'jpm',
|
|||
|
'jpx',
|
|||
|
'heic',
|
|||
|
'cur',
|
|||
|
'dcm',
|
|||
|
'avif',
|
|||
|
]);
|
|||
|
|
|||
|
async function imageType(input) {
|
|||
|
const result = await fileTypeFromBuffer(input);
|
|||
|
return imageExtensions.has(result?.ext) && result;
|
|||
|
}
|
|||
|
|
|||
|
var IMAGE_EXT_LIST = [
|
|||
|
".png",
|
|||
|
".jpg",
|
|||
|
".jpeg",
|
|||
|
".bmp",
|
|||
|
".gif",
|
|||
|
".svg",
|
|||
|
".tiff",
|
|||
|
".webp",
|
|||
|
".avif",
|
|||
|
];
|
|||
|
function isAnImage(ext) {
|
|||
|
return IMAGE_EXT_LIST.includes(ext.toLowerCase());
|
|||
|
}
|
|||
|
function isAssetTypeAnImage(path) {
|
|||
|
return isAnImage(require$$0$1.extname(path));
|
|||
|
}
|
|||
|
function getOS() {
|
|||
|
var appVersion = navigator.appVersion;
|
|||
|
if (appVersion.indexOf("Win") !== -1) {
|
|||
|
return "Windows";
|
|||
|
}
|
|||
|
else if (appVersion.indexOf("Mac") !== -1) {
|
|||
|
return "MacOS";
|
|||
|
}
|
|||
|
else if (appVersion.indexOf("X11") !== -1) {
|
|||
|
return "Linux";
|
|||
|
}
|
|||
|
else {
|
|||
|
return "Unknown OS";
|
|||
|
}
|
|||
|
}
|
|||
|
function streamToString(stream) {
|
|||
|
var _a, stream_1, stream_1_1;
|
|||
|
var _b, e_1, _c, _d;
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var chunks, chunk, e_1_1;
|
|||
|
return __generator(this, function (_e) {
|
|||
|
switch (_e.label) {
|
|||
|
case 0:
|
|||
|
chunks = [];
|
|||
|
_e.label = 1;
|
|||
|
case 1:
|
|||
|
_e.trys.push([1, 6, 7, 12]);
|
|||
|
_a = true, stream_1 = __asyncValues(stream);
|
|||
|
_e.label = 2;
|
|||
|
case 2: return [4 /*yield*/, stream_1.next()];
|
|||
|
case 3:
|
|||
|
if (!(stream_1_1 = _e.sent(), _b = stream_1_1.done, !_b)) return [3 /*break*/, 5];
|
|||
|
_d = stream_1_1.value;
|
|||
|
_a = false;
|
|||
|
try {
|
|||
|
chunk = _d;
|
|||
|
chunks.push(Buffer.from(chunk));
|
|||
|
}
|
|||
|
finally {
|
|||
|
_a = true;
|
|||
|
}
|
|||
|
_e.label = 4;
|
|||
|
case 4: return [3 /*break*/, 2];
|
|||
|
case 5: return [3 /*break*/, 12];
|
|||
|
case 6:
|
|||
|
e_1_1 = _e.sent();
|
|||
|
e_1 = { error: e_1_1 };
|
|||
|
return [3 /*break*/, 12];
|
|||
|
case 7:
|
|||
|
_e.trys.push([7, , 10, 11]);
|
|||
|
if (!(!_a && !_b && (_c = stream_1.return))) return [3 /*break*/, 9];
|
|||
|
return [4 /*yield*/, _c.call(stream_1)];
|
|||
|
case 8:
|
|||
|
_e.sent();
|
|||
|
_e.label = 9;
|
|||
|
case 9: return [3 /*break*/, 11];
|
|||
|
case 10:
|
|||
|
if (e_1) throw e_1.error;
|
|||
|
return [7 /*endfinally*/];
|
|||
|
case 11: return [7 /*endfinally*/];
|
|||
|
case 12: return [2 /*return*/, Buffer.concat(chunks).toString("utf-8")];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
}
|
|||
|
function getUrlAsset(url) {
|
|||
|
return (url = url.substr(1 + url.lastIndexOf("/")).split("?")[0]).split("#")[0];
|
|||
|
}
|
|||
|
function getLastImage(list) {
|
|||
|
var reversedList = list.reverse();
|
|||
|
var lastImage;
|
|||
|
reversedList.forEach(function (item) {
|
|||
|
if (item && item.startsWith("http")) {
|
|||
|
lastImage = item;
|
|||
|
return item;
|
|||
|
}
|
|||
|
});
|
|||
|
return lastImage;
|
|||
|
}
|
|||
|
function arrayToObject(arr, key) {
|
|||
|
var obj = {};
|
|||
|
arr.forEach(function (element) {
|
|||
|
obj[element[key]] = element;
|
|||
|
});
|
|||
|
return obj;
|
|||
|
}
|
|||
|
|
|||
|
var PicGoUploader = /** @class */ (function () {
|
|||
|
function PicGoUploader(settings, plugin) {
|
|||
|
this.settings = settings;
|
|||
|
this.plugin = plugin;
|
|||
|
}
|
|||
|
PicGoUploader.prototype.uploadFiles = function (fileList) {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var response, data, uploadUrlFullResultList;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, obsidian.requestUrl({
|
|||
|
url: this.settings.uploadServer,
|
|||
|
method: "POST",
|
|||
|
headers: { "Content-Type": "application/json" },
|
|||
|
body: JSON.stringify({ list: fileList }),
|
|||
|
})];
|
|||
|
case 1:
|
|||
|
response = _a.sent();
|
|||
|
return [4 /*yield*/, response.json];
|
|||
|
case 2:
|
|||
|
data = _a.sent();
|
|||
|
// piclist
|
|||
|
if (data.fullResult) {
|
|||
|
uploadUrlFullResultList = data.fullResult || [];
|
|||
|
this.settings.uploadedImages = __spreadArray(__spreadArray([], __read((this.settings.uploadedImages || [])), false), __read(uploadUrlFullResultList), false);
|
|||
|
}
|
|||
|
return [2 /*return*/, data];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
PicGoUploader.prototype.uploadFileByClipboard = function () {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var res, data, uploadUrlFullResultList;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, obsidian.requestUrl({
|
|||
|
url: this.settings.uploadServer,
|
|||
|
method: "POST",
|
|||
|
})];
|
|||
|
case 1:
|
|||
|
res = _a.sent();
|
|||
|
return [4 /*yield*/, res.json];
|
|||
|
case 2:
|
|||
|
data = _a.sent();
|
|||
|
// piclist
|
|||
|
if (data.fullResult) {
|
|||
|
uploadUrlFullResultList = data.fullResult || [];
|
|||
|
this.settings.uploadedImages = __spreadArray(__spreadArray([], __read((this.settings.uploadedImages || [])), false), __read(uploadUrlFullResultList), false);
|
|||
|
this.plugin.saveSettings();
|
|||
|
}
|
|||
|
if (res.status !== 200) {
|
|||
|
({ response: data, body: data.msg });
|
|||
|
return [2 /*return*/, {
|
|||
|
code: -1,
|
|||
|
msg: data.msg,
|
|||
|
data: "",
|
|||
|
}];
|
|||
|
}
|
|||
|
else {
|
|||
|
return [2 /*return*/, {
|
|||
|
code: 0,
|
|||
|
msg: "success",
|
|||
|
data: typeof data.result == "string" ? data.result : data.result[0],
|
|||
|
}];
|
|||
|
}
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
return PicGoUploader;
|
|||
|
}());
|
|||
|
var PicGoCoreUploader = /** @class */ (function () {
|
|||
|
function PicGoCoreUploader(settings, plugin) {
|
|||
|
this.settings = settings;
|
|||
|
this.plugin = plugin;
|
|||
|
}
|
|||
|
PicGoCoreUploader.prototype.uploadFiles = function (fileList) {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var length, cli, command, res, splitList, splitListLength, data;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
length = fileList.length;
|
|||
|
cli = this.settings.picgoCorePath || "picgo";
|
|||
|
command = "".concat(cli, " upload ").concat(fileList
|
|||
|
.map(function (item) { return "\"".concat(item, "\""); })
|
|||
|
.join(" "));
|
|||
|
return [4 /*yield*/, this.exec(command)];
|
|||
|
case 1:
|
|||
|
res = _a.sent();
|
|||
|
splitList = res.split("\n");
|
|||
|
splitListLength = splitList.length;
|
|||
|
data = splitList.splice(splitListLength - 1 - length, length);
|
|||
|
if (res.includes("PicGo ERROR")) {
|
|||
|
console.log(command, res);
|
|||
|
return [2 /*return*/, {
|
|||
|
success: false,
|
|||
|
msg: "失败",
|
|||
|
}];
|
|||
|
}
|
|||
|
else {
|
|||
|
return [2 /*return*/, {
|
|||
|
success: true,
|
|||
|
result: data,
|
|||
|
}];
|
|||
|
}
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
// PicGo-Core 上传处理
|
|||
|
PicGoCoreUploader.prototype.uploadFileByClipboard = function () {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var res, splitList, lastImage;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, this.uploadByClip()];
|
|||
|
case 1:
|
|||
|
res = _a.sent();
|
|||
|
splitList = res.split("\n");
|
|||
|
lastImage = getLastImage(splitList);
|
|||
|
if (lastImage) {
|
|||
|
return [2 /*return*/, {
|
|||
|
code: 0,
|
|||
|
msg: "success",
|
|||
|
data: lastImage,
|
|||
|
}];
|
|||
|
}
|
|||
|
else {
|
|||
|
console.log(splitList);
|
|||
|
// new Notice(`"Please check PicGo-Core config"\n${res}`);
|
|||
|
return [2 /*return*/, {
|
|||
|
code: -1,
|
|||
|
msg: "\"Please check PicGo-Core config\"\n".concat(res),
|
|||
|
data: "",
|
|||
|
}];
|
|||
|
}
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
// PicGo-Core的剪切上传反馈
|
|||
|
PicGoCoreUploader.prototype.uploadByClip = function () {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var command, res;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
if (this.settings.picgoCorePath) {
|
|||
|
command = "".concat(this.settings.picgoCorePath, " upload");
|
|||
|
}
|
|||
|
else {
|
|||
|
command = "picgo upload";
|
|||
|
}
|
|||
|
return [4 /*yield*/, this.exec(command)];
|
|||
|
case 1:
|
|||
|
res = _a.sent();
|
|||
|
// const res = await this.spawnChild();
|
|||
|
return [2 /*return*/, res];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
PicGoCoreUploader.prototype.exec = function (command) {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var stdout, res;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, require$$0$2.exec(command)];
|
|||
|
case 1:
|
|||
|
stdout = (_a.sent()).stdout;
|
|||
|
return [4 /*yield*/, streamToString(stdout)];
|
|||
|
case 2:
|
|||
|
res = _a.sent();
|
|||
|
return [2 /*return*/, res];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
PicGoCoreUploader.prototype.spawnChild = function () {
|
|||
|
var _a, e_1, _b, _c, _d, e_2, _e, _f;
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var spawn, child, data, _g, _h, _j, chunk, e_1_1, error, _k, _l, _m, chunk, e_2_1, exitCode;
|
|||
|
return __generator(this, function (_o) {
|
|||
|
switch (_o.label) {
|
|||
|
case 0:
|
|||
|
spawn = require("child_process").spawn;
|
|||
|
child = spawn("picgo", ["upload"], {
|
|||
|
shell: true,
|
|||
|
});
|
|||
|
data = "";
|
|||
|
_o.label = 1;
|
|||
|
case 1:
|
|||
|
_o.trys.push([1, 6, 7, 12]);
|
|||
|
_g = true, _h = __asyncValues(child.stdout);
|
|||
|
_o.label = 2;
|
|||
|
case 2: return [4 /*yield*/, _h.next()];
|
|||
|
case 3:
|
|||
|
if (!(_j = _o.sent(), _a = _j.done, !_a)) return [3 /*break*/, 5];
|
|||
|
_c = _j.value;
|
|||
|
_g = false;
|
|||
|
try {
|
|||
|
chunk = _c;
|
|||
|
data += chunk;
|
|||
|
}
|
|||
|
finally {
|
|||
|
_g = true;
|
|||
|
}
|
|||
|
_o.label = 4;
|
|||
|
case 4: return [3 /*break*/, 2];
|
|||
|
case 5: return [3 /*break*/, 12];
|
|||
|
case 6:
|
|||
|
e_1_1 = _o.sent();
|
|||
|
e_1 = { error: e_1_1 };
|
|||
|
return [3 /*break*/, 12];
|
|||
|
case 7:
|
|||
|
_o.trys.push([7, , 10, 11]);
|
|||
|
if (!(!_g && !_a && (_b = _h.return))) return [3 /*break*/, 9];
|
|||
|
return [4 /*yield*/, _b.call(_h)];
|
|||
|
case 8:
|
|||
|
_o.sent();
|
|||
|
_o.label = 9;
|
|||
|
case 9: return [3 /*break*/, 11];
|
|||
|
case 10:
|
|||
|
if (e_1) throw e_1.error;
|
|||
|
return [7 /*endfinally*/];
|
|||
|
case 11: return [7 /*endfinally*/];
|
|||
|
case 12:
|
|||
|
error = "";
|
|||
|
_o.label = 13;
|
|||
|
case 13:
|
|||
|
_o.trys.push([13, 18, 19, 24]);
|
|||
|
_k = true, _l = __asyncValues(child.stderr);
|
|||
|
_o.label = 14;
|
|||
|
case 14: return [4 /*yield*/, _l.next()];
|
|||
|
case 15:
|
|||
|
if (!(_m = _o.sent(), _d = _m.done, !_d)) return [3 /*break*/, 17];
|
|||
|
_f = _m.value;
|
|||
|
_k = false;
|
|||
|
try {
|
|||
|
chunk = _f;
|
|||
|
error += chunk;
|
|||
|
}
|
|||
|
finally {
|
|||
|
_k = true;
|
|||
|
}
|
|||
|
_o.label = 16;
|
|||
|
case 16: return [3 /*break*/, 14];
|
|||
|
case 17: return [3 /*break*/, 24];
|
|||
|
case 18:
|
|||
|
e_2_1 = _o.sent();
|
|||
|
e_2 = { error: e_2_1 };
|
|||
|
return [3 /*break*/, 24];
|
|||
|
case 19:
|
|||
|
_o.trys.push([19, , 22, 23]);
|
|||
|
if (!(!_k && !_d && (_e = _l.return))) return [3 /*break*/, 21];
|
|||
|
return [4 /*yield*/, _e.call(_l)];
|
|||
|
case 20:
|
|||
|
_o.sent();
|
|||
|
_o.label = 21;
|
|||
|
case 21: return [3 /*break*/, 23];
|
|||
|
case 22:
|
|||
|
if (e_2) throw e_2.error;
|
|||
|
return [7 /*endfinally*/];
|
|||
|
case 23: return [7 /*endfinally*/];
|
|||
|
case 24: return [4 /*yield*/, new Promise(function (resolve, reject) {
|
|||
|
child.on("close", resolve);
|
|||
|
})];
|
|||
|
case 25:
|
|||
|
exitCode = _o.sent();
|
|||
|
if (exitCode) {
|
|||
|
throw new Error("subprocess error exit ".concat(exitCode, ", ").concat(error));
|
|||
|
}
|
|||
|
return [2 /*return*/, data];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
return PicGoCoreUploader;
|
|||
|
}());
|
|||
|
|
|||
|
var PicGoDeleter = /** @class */ (function () {
|
|||
|
function PicGoDeleter(plugin) {
|
|||
|
this.plugin = plugin;
|
|||
|
}
|
|||
|
PicGoDeleter.prototype.deleteImage = function (configMap) {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var response, data;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, obsidian.requestUrl({
|
|||
|
url: this.plugin.settings.deleteServer,
|
|||
|
method: "POST",
|
|||
|
headers: { "Content-Type": "application/json" },
|
|||
|
body: JSON.stringify({
|
|||
|
list: configMap,
|
|||
|
}),
|
|||
|
})];
|
|||
|
case 1:
|
|||
|
response = _a.sent();
|
|||
|
data = response.json;
|
|||
|
return [2 /*return*/, data];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
return PicGoDeleter;
|
|||
|
}());
|
|||
|
|
|||
|
// ![](./dsa/aa.png) local image should has ext
|
|||
|
// ![](https://dasdasda) internet image should not has ext
|
|||
|
var REGEX_FILE = /\!\[(.*?)\]\((\S+\.\w+)\)|\!\[(.*?)\]\((https?:\/\/.*?)\)/g;
|
|||
|
var REGEX_WIKI_FILE = /\!\[\[(.*?)(\s*?\|.*?)?\]\]/g;
|
|||
|
var Helper = /** @class */ (function () {
|
|||
|
function Helper(app) {
|
|||
|
this.app = app;
|
|||
|
}
|
|||
|
Helper.prototype.getFrontmatterValue = function (key, defaultValue) {
|
|||
|
if (defaultValue === void 0) { defaultValue = undefined; }
|
|||
|
var file = this.app.workspace.getActiveFile();
|
|||
|
if (!file) {
|
|||
|
return undefined;
|
|||
|
}
|
|||
|
var path = file.path;
|
|||
|
var cache = this.app.metadataCache.getCache(path);
|
|||
|
var value = defaultValue;
|
|||
|
if ((cache === null || cache === void 0 ? void 0 : cache.frontmatter) && cache.frontmatter.hasOwnProperty(key)) {
|
|||
|
value = cache.frontmatter[key];
|
|||
|
}
|
|||
|
return value;
|
|||
|
};
|
|||
|
Helper.prototype.getEditor = function () {
|
|||
|
var mdView = this.app.workspace.getActiveViewOfType(obsidian.MarkdownView);
|
|||
|
if (mdView) {
|
|||
|
return mdView.editor;
|
|||
|
}
|
|||
|
else {
|
|||
|
return null;
|
|||
|
}
|
|||
|
};
|
|||
|
Helper.prototype.getValue = function () {
|
|||
|
var editor = this.getEditor();
|
|||
|
return editor.getValue();
|
|||
|
};
|
|||
|
Helper.prototype.setValue = function (value) {
|
|||
|
var editor = this.getEditor();
|
|||
|
var _a = editor.getScrollInfo(), left = _a.left, top = _a.top;
|
|||
|
var position = editor.getCursor();
|
|||
|
editor.setValue(value);
|
|||
|
editor.scrollTo(left, top);
|
|||
|
editor.setCursor(position);
|
|||
|
};
|
|||
|
// get all file urls, include local and internet
|
|||
|
Helper.prototype.getAllFiles = function () {
|
|||
|
var editor = this.getEditor();
|
|||
|
var value = editor.getValue();
|
|||
|
return this.getImageLink(value);
|
|||
|
};
|
|||
|
Helper.prototype.getImageLink = function (value) {
|
|||
|
var e_1, _a, e_2, _b;
|
|||
|
var matches = value.matchAll(REGEX_FILE);
|
|||
|
var WikiMatches = value.matchAll(REGEX_WIKI_FILE);
|
|||
|
var fileArray = [];
|
|||
|
try {
|
|||
|
for (var matches_1 = __values(matches), matches_1_1 = matches_1.next(); !matches_1_1.done; matches_1_1 = matches_1.next()) {
|
|||
|
var match = matches_1_1.value;
|
|||
|
var source = match[0];
|
|||
|
var name_1 = match[1];
|
|||
|
var path = match[2];
|
|||
|
if (name_1 === undefined) {
|
|||
|
name_1 = match[3];
|
|||
|
}
|
|||
|
if (path === undefined) {
|
|||
|
path = match[4];
|
|||
|
}
|
|||
|
fileArray.push({
|
|||
|
path: path,
|
|||
|
name: name_1,
|
|||
|
source: source,
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|||
|
finally {
|
|||
|
try {
|
|||
|
if (matches_1_1 && !matches_1_1.done && (_a = matches_1.return)) _a.call(matches_1);
|
|||
|
}
|
|||
|
finally { if (e_1) throw e_1.error; }
|
|||
|
}
|
|||
|
try {
|
|||
|
for (var WikiMatches_1 = __values(WikiMatches), WikiMatches_1_1 = WikiMatches_1.next(); !WikiMatches_1_1.done; WikiMatches_1_1 = WikiMatches_1.next()) {
|
|||
|
var match = WikiMatches_1_1.value;
|
|||
|
console.log(match);
|
|||
|
var name_2 = require$$0$1.parse(match[1]).name;
|
|||
|
var path = match[1];
|
|||
|
var source = match[0];
|
|||
|
if (match[2]) {
|
|||
|
name_2 = "".concat(name_2).concat(match[2]);
|
|||
|
}
|
|||
|
fileArray.push({
|
|||
|
path: path,
|
|||
|
name: name_2,
|
|||
|
source: source,
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
|||
|
finally {
|
|||
|
try {
|
|||
|
if (WikiMatches_1_1 && !WikiMatches_1_1.done && (_b = WikiMatches_1.return)) _b.call(WikiMatches_1);
|
|||
|
}
|
|||
|
finally { if (e_2) throw e_2.error; }
|
|||
|
}
|
|||
|
console.log(fileArray);
|
|||
|
return fileArray;
|
|||
|
};
|
|||
|
Helper.prototype.hasBlackDomain = function (src, blackDomains) {
|
|||
|
if (blackDomains.trim() === "") {
|
|||
|
return false;
|
|||
|
}
|
|||
|
var blackDomainList = blackDomains.split(",").filter(function (item) { return item !== ""; });
|
|||
|
var url = new URL(src);
|
|||
|
var domain = url.hostname;
|
|||
|
return blackDomainList.some(function (blackDomain) { return domain.includes(blackDomain); });
|
|||
|
};
|
|||
|
return Helper;
|
|||
|
}());
|
|||
|
|
|||
|
// العربية
|
|||
|
var ar = {};
|
|||
|
|
|||
|
// čeština
|
|||
|
var cz = {};
|
|||
|
|
|||
|
// Dansk
|
|||
|
var da = {};
|
|||
|
|
|||
|
// Deutsch
|
|||
|
var de = {};
|
|||
|
|
|||
|
// English
|
|||
|
var en = {
|
|||
|
// setting.ts
|
|||
|
"Plugin Settings": "Plugin Settings",
|
|||
|
"Auto pasted upload": "Auto pasted upload",
|
|||
|
"If you set this value true, when you paste image, it will be auto uploaded(you should set the picGo server rightly)": "If you set this value true, when you paste image, it will be auto uploaded(you should set the picGo server rightly)",
|
|||
|
"Default uploader": "Default uploader",
|
|||
|
"PicGo server": "PicGo server",
|
|||
|
"Please input PicGo server": "Please input PicGo server",
|
|||
|
"PicGo delete server": "PicGo server delete route(you need to use PicList app)",
|
|||
|
"PicList desc": "Search PicList on Github to download and install",
|
|||
|
"Please input PicGo delete server": "Please input PicGo delete server",
|
|||
|
"Delete image using PicList": "Delete image using PicList",
|
|||
|
"PicGo-Core path": "PicGo-Core path",
|
|||
|
"Delete successfully": "Delete successfully",
|
|||
|
"Delete failed": "Delete failed",
|
|||
|
"Image size suffix": "Image size suffix",
|
|||
|
"Image size suffix Description": "like |300 for resize image in ob.",
|
|||
|
"Please input image size suffix": "Please input image size suffix",
|
|||
|
"Error, could not delete": "Error, could not delete",
|
|||
|
"Please input PicGo-Core path, default using environment variables": "Please input PicGo-Core path, default using environment variables",
|
|||
|
"Work on network": "Work on network",
|
|||
|
"Work on network Description": "Allow upload network image by 'Upload all' command.\n Or when you paste, md standard image link in your clipboard will be auto upload.",
|
|||
|
fixPath: "fixPath",
|
|||
|
fixPathWarning: "This option is used to fix PicGo-core upload failures on Linux and Mac. It modifies the PATH variable within Obsidian. If Obsidian encounters any bugs, turn off the option, try again! ",
|
|||
|
"Upload when clipboard has image and text together": "Upload when clipboard has image and text together",
|
|||
|
"When you copy, some application like Excel will image and text to clipboard, you can upload or not.": "When you copy, some application like Excel will image and text to clipboard, you can upload or not.",
|
|||
|
"Network Domain Black List": "Network Domain Black List",
|
|||
|
"Network Domain Black List Description": "Image in the domain list will not be upload,use comma separated",
|
|||
|
"Delete source file after you upload file": "Delete source file after you upload file",
|
|||
|
"Delete source file in ob assets after you upload file.": "Delete source file in ob assets after you upload file.",
|
|||
|
"Image desc": "Image desc",
|
|||
|
reserve: "default",
|
|||
|
"remove all": "none",
|
|||
|
"remove default": "remove image.png",
|
|||
|
};
|
|||
|
|
|||
|
// British English
|
|||
|
var enGB = {};
|
|||
|
|
|||
|
// Español
|
|||
|
var es = {};
|
|||
|
|
|||
|
// français
|
|||
|
var fr = {};
|
|||
|
|
|||
|
// हिन्दी
|
|||
|
var hi = {};
|
|||
|
|
|||
|
// Bahasa Indonesia
|
|||
|
var id = {};
|
|||
|
|
|||
|
// Italiano
|
|||
|
var it = {};
|
|||
|
|
|||
|
// 日本語
|
|||
|
var ja = {};
|
|||
|
|
|||
|
// 한국어
|
|||
|
var ko = {};
|
|||
|
|
|||
|
// Nederlands
|
|||
|
var nl = {};
|
|||
|
|
|||
|
// Norsk
|
|||
|
var no = {};
|
|||
|
|
|||
|
// język polski
|
|||
|
var pl = {};
|
|||
|
|
|||
|
// Português
|
|||
|
var pt = {};
|
|||
|
|
|||
|
// Português do Brasil
|
|||
|
// Brazilian Portuguese
|
|||
|
var ptBR = {};
|
|||
|
|
|||
|
// Română
|
|||
|
var ro = {};
|
|||
|
|
|||
|
// русский
|
|||
|
var ru = {};
|
|||
|
|
|||
|
// Türkçe
|
|||
|
var tr = {};
|
|||
|
|
|||
|
// 简体中文
|
|||
|
var zhCN = {
|
|||
|
// setting.ts
|
|||
|
"Plugin Settings": "插件设置",
|
|||
|
"Auto pasted upload": "剪切板自动上传",
|
|||
|
"If you set this value true, when you paste image, it will be auto uploaded(you should set the picGo server rightly)": "启用该选项后,黏贴图片时会自动上传(你需要正确配置picgo)",
|
|||
|
"Default uploader": "默认上传器",
|
|||
|
"PicGo server": "PicGo server",
|
|||
|
"Please input PicGo server": "请输入 PicGo server",
|
|||
|
"PicGo delete server": "PicGo server 删除接口(请使用PicList来启用此功能)",
|
|||
|
"PicList desc": "PicList是PicGo二次开发版,请Github搜索PicList下载",
|
|||
|
"Please input PicGo delete server": "请输入 PicGo server 删除接口",
|
|||
|
"Delete image using PicList": "使用 PicList 删除图片",
|
|||
|
"PicGo-Core path": "PicGo-Core 路径",
|
|||
|
"Delete successfully": "删除成功",
|
|||
|
"Delete failed": "删除失败",
|
|||
|
"Error, could not delete": "错误,无法删除",
|
|||
|
"Image size suffix": "图片大小后缀",
|
|||
|
"Image size suffix Description": "比如:|300 用于调整图片大小",
|
|||
|
"Please input image size suffix": "请输入图片大小后缀",
|
|||
|
"Please input PicGo-Core path, default using environment variables": "请输入 PicGo-Core path,默认使用环境变量",
|
|||
|
"Work on network": "应用网络图片",
|
|||
|
"Work on network Description": "当你上传所有图片时,也会上传网络图片。以及当你进行黏贴时,剪切板中的标准 md 图片会被上传",
|
|||
|
fixPath: "修正PATH变量",
|
|||
|
fixPathWarning: "此选项用于修复Linux和Mac上 PicGo-Core 上传失败的问题。它会修改 Obsidian 内的 PATH 变量,如果 Obsidian 遇到任何BUG,先关闭这个选项试试!",
|
|||
|
"Upload when clipboard has image and text together": "当剪切板同时拥有文本和图片剪切板数据时是否上传图片",
|
|||
|
"When you copy, some application like Excel will image and text to clipboard, you can upload or not.": "当你复制时,某些应用例如 Excel 会在剪切板同时文本和图像数据,确认是否上传。",
|
|||
|
"Network Domain Black List": "网络图片域名黑名单",
|
|||
|
"Network Domain Black List Description": "黑名单域名中的图片将不会被上传,用英文逗号分割",
|
|||
|
"Delete source file after you upload file": "上传文件后移除源文件",
|
|||
|
"Delete source file in ob assets after you upload file.": "上传文件后移除在ob附件文件夹中的文件",
|
|||
|
"Image desc": "图片描述",
|
|||
|
reserve: "默认",
|
|||
|
"remove all": "无",
|
|||
|
"remove default": "移除image.png",
|
|||
|
};
|
|||
|
|
|||
|
// 繁體中文
|
|||
|
var zhTW = {};
|
|||
|
|
|||
|
var localeMap = {
|
|||
|
ar: ar,
|
|||
|
cs: cz,
|
|||
|
da: da,
|
|||
|
de: de,
|
|||
|
en: en,
|
|||
|
'en-gb': enGB,
|
|||
|
es: es,
|
|||
|
fr: fr,
|
|||
|
hi: hi,
|
|||
|
id: id,
|
|||
|
it: it,
|
|||
|
ja: ja,
|
|||
|
ko: ko,
|
|||
|
nl: nl,
|
|||
|
nn: no,
|
|||
|
pl: pl,
|
|||
|
pt: pt,
|
|||
|
'pt-br': ptBR,
|
|||
|
ro: ro,
|
|||
|
ru: ru,
|
|||
|
tr: tr,
|
|||
|
'zh-cn': zhCN,
|
|||
|
'zh-tw': zhTW,
|
|||
|
};
|
|||
|
var locale = localeMap[obsidian.moment.locale()];
|
|||
|
function t(str) {
|
|||
|
return (locale && locale[str]) || en[str];
|
|||
|
}
|
|||
|
|
|||
|
var DEFAULT_SETTINGS = {
|
|||
|
uploadByClipSwitch: true,
|
|||
|
uploader: "PicGo",
|
|||
|
uploadServer: "http://127.0.0.1:36677/upload",
|
|||
|
deleteServer: "http://127.0.0.1:36677/delete",
|
|||
|
imageSizeSuffix: "",
|
|||
|
picgoCorePath: "",
|
|||
|
workOnNetWork: false,
|
|||
|
fixPath: false,
|
|||
|
applyImage: true,
|
|||
|
newWorkBlackDomains: "",
|
|||
|
deleteSource: false,
|
|||
|
imageDesc: "origin",
|
|||
|
};
|
|||
|
var SettingTab = /** @class */ (function (_super) {
|
|||
|
__extends(SettingTab, _super);
|
|||
|
function SettingTab(app, plugin) {
|
|||
|
var _this = _super.call(this, app, plugin) || this;
|
|||
|
_this.plugin = plugin;
|
|||
|
return _this;
|
|||
|
}
|
|||
|
SettingTab.prototype.display = function () {
|
|||
|
var _this = this;
|
|||
|
var containerEl = this.containerEl;
|
|||
|
var os = getOS();
|
|||
|
containerEl.empty();
|
|||
|
containerEl.createEl("h2", { text: t("Plugin Settings") });
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("Auto pasted upload"))
|
|||
|
.setDesc(t("If you set this value true, when you paste image, it will be auto uploaded(you should set the picGo server rightly)"))
|
|||
|
.addToggle(function (toggle) {
|
|||
|
return toggle
|
|||
|
.setValue(_this.plugin.settings.uploadByClipSwitch)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.uploadByClipSwitch = value;
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("Default uploader"))
|
|||
|
.setDesc(t("Default uploader"))
|
|||
|
.addDropdown(function (cb) {
|
|||
|
return cb
|
|||
|
.addOption("PicGo", "PicGo(app)")
|
|||
|
.addOption("PicGo-Core", "PicGo-Core")
|
|||
|
.setValue(_this.plugin.settings.uploader)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.uploader = value;
|
|||
|
this.display();
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
if (this.plugin.settings.uploader === "PicGo") {
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("PicGo server"))
|
|||
|
.setDesc(t("PicGo server"))
|
|||
|
.addText(function (text) {
|
|||
|
return text
|
|||
|
.setPlaceholder(t("Please input PicGo server"))
|
|||
|
.setValue(_this.plugin.settings.uploadServer)
|
|||
|
.onChange(function (key) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.uploadServer = key;
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("PicGo delete server"))
|
|||
|
.setDesc(t("PicList desc"))
|
|||
|
.addText(function (text) {
|
|||
|
return text
|
|||
|
.setPlaceholder(t("Please input PicGo delete server"))
|
|||
|
.setValue(_this.plugin.settings.deleteServer)
|
|||
|
.onChange(function (key) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.deleteServer = key;
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
}
|
|||
|
if (this.plugin.settings.uploader === "PicGo-Core") {
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("PicGo-Core path"))
|
|||
|
.setDesc(t("Please input PicGo-Core path, default using environment variables"))
|
|||
|
.addText(function (text) {
|
|||
|
return text
|
|||
|
.setPlaceholder("")
|
|||
|
.setValue(_this.plugin.settings.picgoCorePath)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.picgoCorePath = value;
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
if (os !== "Windows") {
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("fixPath"))
|
|||
|
.setDesc(t("fixPathWarning"))
|
|||
|
.addToggle(function (toggle) {
|
|||
|
return toggle
|
|||
|
.setValue(_this.plugin.settings.fixPath)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.fixPath = value;
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
// image desc setting
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("Image desc"))
|
|||
|
.setDesc(t("Image desc"))
|
|||
|
.addDropdown(function (cb) {
|
|||
|
return cb
|
|||
|
.addOption("origin", t("reserve")) // 保留全部
|
|||
|
.addOption("none", t("remove all")) // 移除全部
|
|||
|
.addOption("removeDefault", t("remove default")) // 只移除默认即 image.png
|
|||
|
.setValue(_this.plugin.settings.imageDesc)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.imageDesc = value;
|
|||
|
this.display();
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("Image size suffix"))
|
|||
|
.setDesc(t("Image size suffix Description"))
|
|||
|
.addText(function (text) {
|
|||
|
return text
|
|||
|
.setPlaceholder(t("Please input image size suffix"))
|
|||
|
.setValue(_this.plugin.settings.imageSizeSuffix)
|
|||
|
.onChange(function (key) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.imageSizeSuffix = key;
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("Work on network"))
|
|||
|
.setDesc(t("Work on network Description"))
|
|||
|
.addToggle(function (toggle) {
|
|||
|
return toggle
|
|||
|
.setValue(_this.plugin.settings.workOnNetWork)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.workOnNetWork = value;
|
|||
|
this.display();
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("Network Domain Black List"))
|
|||
|
.setDesc(t("Network Domain Black List Description"))
|
|||
|
.addTextArea(function (textArea) {
|
|||
|
return textArea
|
|||
|
.setValue(_this.plugin.settings.newWorkBlackDomains)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.newWorkBlackDomains = value;
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("Upload when clipboard has image and text together"))
|
|||
|
.setDesc(t("When you copy, some application like Excel will image and text to clipboard, you can upload or not."))
|
|||
|
.addToggle(function (toggle) {
|
|||
|
return toggle
|
|||
|
.setValue(_this.plugin.settings.applyImage)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.applyImage = value;
|
|||
|
this.display();
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
new obsidian.Setting(containerEl)
|
|||
|
.setName(t("Delete source file after you upload file"))
|
|||
|
.setDesc(t("Delete source file in ob assets after you upload file."))
|
|||
|
.addToggle(function (toggle) {
|
|||
|
return toggle
|
|||
|
.setValue(_this.plugin.settings.deleteSource)
|
|||
|
.onChange(function (value) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
this.plugin.settings.deleteSource = value;
|
|||
|
this.display();
|
|||
|
return [4 /*yield*/, this.plugin.saveSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
};
|
|||
|
return SettingTab;
|
|||
|
}(obsidian.PluginSettingTab));
|
|||
|
|
|||
|
var imageAutoUploadPlugin = /** @class */ (function (_super) {
|
|||
|
__extends(imageAutoUploadPlugin, _super);
|
|||
|
function imageAutoUploadPlugin() {
|
|||
|
var _this = _super !== null && _super.apply(this, arguments) || this;
|
|||
|
_this.addMenu = function (menu, imgPath, editor) {
|
|||
|
menu.addItem(function (item) {
|
|||
|
return item
|
|||
|
.setIcon("trash-2")
|
|||
|
.setTitle(t("Delete image using PicList"))
|
|||
|
.onClick(function () { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
var selectedItem, res, selection;
|
|||
|
return __generator(this, function (_b) {
|
|||
|
switch (_b.label) {
|
|||
|
case 0:
|
|||
|
_b.trys.push([0, 3, , 4]);
|
|||
|
selectedItem = this.settings.uploadedImages.find(function (item) { return item.imgUrl === imgPath; });
|
|||
|
if (!selectedItem) return [3 /*break*/, 2];
|
|||
|
return [4 /*yield*/, this.picGoDeleter.deleteImage([selectedItem])];
|
|||
|
case 1:
|
|||
|
res = _b.sent();
|
|||
|
if (res.success) {
|
|||
|
new obsidian.Notice(t("Delete successfully"));
|
|||
|
selection = editor.getSelection();
|
|||
|
if (selection) {
|
|||
|
editor.replaceSelection("");
|
|||
|
}
|
|||
|
this.settings.uploadedImages =
|
|||
|
this.settings.uploadedImages.filter(function (item) { return item.imgUrl !== imgPath; });
|
|||
|
this.saveSettings();
|
|||
|
}
|
|||
|
else {
|
|||
|
new obsidian.Notice(t("Delete failed"));
|
|||
|
}
|
|||
|
_b.label = 2;
|
|||
|
case 2: return [3 /*break*/, 4];
|
|||
|
case 3:
|
|||
|
_b.sent();
|
|||
|
new obsidian.Notice(t("Error, could not delete"));
|
|||
|
return [3 /*break*/, 4];
|
|||
|
case 4: return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); });
|
|||
|
});
|
|||
|
};
|
|||
|
return _this;
|
|||
|
}
|
|||
|
imageAutoUploadPlugin.prototype.loadSettings = function () {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var _a, _b, _c, _d;
|
|||
|
return __generator(this, function (_e) {
|
|||
|
switch (_e.label) {
|
|||
|
case 0:
|
|||
|
_a = this;
|
|||
|
_c = (_b = Object).assign;
|
|||
|
_d = [DEFAULT_SETTINGS];
|
|||
|
return [4 /*yield*/, this.loadData()];
|
|||
|
case 1:
|
|||
|
_a.settings = _c.apply(_b, _d.concat([_e.sent()]));
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.saveSettings = function () {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, this.saveData(this.settings)];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.onunload = function () { };
|
|||
|
imageAutoUploadPlugin.prototype.onload = function () {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var _this = this;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, this.loadSettings()];
|
|||
|
case 1:
|
|||
|
_a.sent();
|
|||
|
this.helper = new Helper(this.app);
|
|||
|
this.picGoUploader = new PicGoUploader(this.settings, this);
|
|||
|
this.picGoDeleter = new PicGoDeleter(this);
|
|||
|
this.picGoCoreUploader = new PicGoCoreUploader(this.settings, this);
|
|||
|
if (this.settings.uploader === "PicGo") {
|
|||
|
this.uploader = this.picGoUploader;
|
|||
|
}
|
|||
|
else if (this.settings.uploader === "PicGo-Core") {
|
|||
|
this.uploader = this.picGoCoreUploader;
|
|||
|
if (this.settings.fixPath) {
|
|||
|
fixPath();
|
|||
|
}
|
|||
|
}
|
|||
|
else {
|
|||
|
new obsidian.Notice("unknown uploader");
|
|||
|
}
|
|||
|
obsidian.addIcon("upload", "<svg t=\"1636630783429\" class=\"icon\" viewBox=\"0 0 100 100\" version=\"1.1\" p-id=\"4649\" xmlns=\"http://www.w3.org/2000/svg\">\n <path d=\"M 71.638 35.336 L 79.408 35.336 C 83.7 35.336 87.178 38.662 87.178 42.765 L 87.178 84.864 C 87.178 88.969 83.7 92.295 79.408 92.295 L 17.249 92.295 C 12.957 92.295 9.479 88.969 9.479 84.864 L 9.479 42.765 C 9.479 38.662 12.957 35.336 17.249 35.336 L 25.019 35.336 L 25.019 42.765 L 17.249 42.765 L 17.249 84.864 L 79.408 84.864 L 79.408 42.765 L 71.638 42.765 L 71.638 35.336 Z M 49.014 10.179 L 67.326 27.688 L 61.835 32.942 L 52.849 24.352 L 52.849 59.731 L 45.078 59.731 L 45.078 24.455 L 36.194 32.947 L 30.702 27.692 L 49.012 10.181 Z\" p-id=\"4650\" fill=\"#8a8a8a\"></path>\n </svg>");
|
|||
|
this.addSettingTab(new SettingTab(this.app, this));
|
|||
|
this.addCommand({
|
|||
|
id: "Upload all images",
|
|||
|
name: "Upload all images",
|
|||
|
checkCallback: function (checking) {
|
|||
|
var leaf = _this.app.workspace.activeLeaf;
|
|||
|
if (leaf) {
|
|||
|
if (!checking) {
|
|||
|
_this.uploadAllFile();
|
|||
|
}
|
|||
|
return true;
|
|||
|
}
|
|||
|
return false;
|
|||
|
},
|
|||
|
});
|
|||
|
this.addCommand({
|
|||
|
id: "Download all images",
|
|||
|
name: "Download all images",
|
|||
|
checkCallback: function (checking) {
|
|||
|
var leaf = _this.app.workspace.activeLeaf;
|
|||
|
if (leaf) {
|
|||
|
if (!checking) {
|
|||
|
_this.downloadAllImageFiles();
|
|||
|
}
|
|||
|
return true;
|
|||
|
}
|
|||
|
return false;
|
|||
|
},
|
|||
|
});
|
|||
|
this.setupPasteHandler();
|
|||
|
this.registerFileMenu();
|
|||
|
this.registerSelection();
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.registerSelection = function () {
|
|||
|
var _this = this;
|
|||
|
this.registerEvent(this.app.workspace.on("editor-menu", function (menu, editor, info) {
|
|||
|
if (_this.app.workspace.getLeavesOfType("markdown").length === 0) {
|
|||
|
return;
|
|||
|
}
|
|||
|
var selection = editor.getSelection();
|
|||
|
if (selection) {
|
|||
|
var markdownRegex = /!\[.*\]\((.*)\)/g;
|
|||
|
var markdownMatch = markdownRegex.exec(selection);
|
|||
|
if (markdownMatch && markdownMatch.length > 1) {
|
|||
|
var markdownUrl_1 = markdownMatch[1];
|
|||
|
if (_this.settings.uploadedImages.find(function (item) { return item.imgUrl === markdownUrl_1; })) {
|
|||
|
_this.addMenu(menu, markdownUrl_1, editor);
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}));
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.downloadAllImageFiles = function () {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var folderPath, fileArray, imageArray, nameSet, fileArray_1, fileArray_1_1, file, url, asset, name_1, response, activeFolder, abstractActiveFolder, e_1_1, value;
|
|||
|
var e_1, _a;
|
|||
|
var _this = this;
|
|||
|
return __generator(this, function (_b) {
|
|||
|
switch (_b.label) {
|
|||
|
case 0:
|
|||
|
folderPath = this.getFileAssetPath();
|
|||
|
fileArray = this.helper.getAllFiles();
|
|||
|
if (!require$$0.existsSync(folderPath)) {
|
|||
|
require$$0.mkdirSync(folderPath);
|
|||
|
}
|
|||
|
imageArray = [];
|
|||
|
nameSet = new Set();
|
|||
|
_b.label = 1;
|
|||
|
case 1:
|
|||
|
_b.trys.push([1, 6, 7, 8]);
|
|||
|
fileArray_1 = __values(fileArray), fileArray_1_1 = fileArray_1.next();
|
|||
|
_b.label = 2;
|
|||
|
case 2:
|
|||
|
if (!!fileArray_1_1.done) return [3 /*break*/, 5];
|
|||
|
file = fileArray_1_1.value;
|
|||
|
if (!file.path.startsWith("http")) {
|
|||
|
return [3 /*break*/, 4];
|
|||
|
}
|
|||
|
url = file.path;
|
|||
|
asset = getUrlAsset(url);
|
|||
|
name_1 = decodeURI(require$$0$1.parse(asset).name).replaceAll(/[\\\\/:*?\"<>|]/g, "-");
|
|||
|
// 如果文件名已存在,则用随机值替换,不对文件后缀进行判断
|
|||
|
if (require$$0.existsSync(require$$0$1.join(folderPath))) {
|
|||
|
name_1 = (Math.random() + 1).toString(36).substr(2, 5);
|
|||
|
}
|
|||
|
if (nameSet.has(name_1)) {
|
|||
|
name_1 = "".concat(name_1, "-").concat((Math.random() + 1).toString(36).substr(2, 5));
|
|||
|
}
|
|||
|
nameSet.add(name_1);
|
|||
|
return [4 /*yield*/, this.download(url, folderPath, name_1)];
|
|||
|
case 3:
|
|||
|
response = _b.sent();
|
|||
|
if (response.ok) {
|
|||
|
activeFolder = obsidian.normalizePath(this.app.workspace.getActiveFile().parent.path);
|
|||
|
abstractActiveFolder = this.app.vault.adapter.getFullPath(activeFolder);
|
|||
|
imageArray.push({
|
|||
|
source: file.source,
|
|||
|
name: name_1,
|
|||
|
path: obsidian.normalizePath(require$$0$1.relative(abstractActiveFolder, response.path)),
|
|||
|
});
|
|||
|
}
|
|||
|
_b.label = 4;
|
|||
|
case 4:
|
|||
|
fileArray_1_1 = fileArray_1.next();
|
|||
|
return [3 /*break*/, 2];
|
|||
|
case 5: return [3 /*break*/, 8];
|
|||
|
case 6:
|
|||
|
e_1_1 = _b.sent();
|
|||
|
e_1 = { error: e_1_1 };
|
|||
|
return [3 /*break*/, 8];
|
|||
|
case 7:
|
|||
|
try {
|
|||
|
if (fileArray_1_1 && !fileArray_1_1.done && (_a = fileArray_1.return)) _a.call(fileArray_1);
|
|||
|
}
|
|||
|
finally { if (e_1) throw e_1.error; }
|
|||
|
return [7 /*endfinally*/];
|
|||
|
case 8:
|
|||
|
value = this.helper.getValue();
|
|||
|
imageArray.map(function (image) {
|
|||
|
var name = _this.handleName(image.name);
|
|||
|
value = value.replace(image.source, "![".concat(name, "](").concat(encodeURI(image.path), ")"));
|
|||
|
});
|
|||
|
this.helper.setValue(value);
|
|||
|
new obsidian.Notice("all: ".concat(fileArray.length, "\nsuccess: ").concat(imageArray.length, "\nfailed: ").concat(fileArray.length - imageArray.length));
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
// 获取当前文件所属的附件文件夹
|
|||
|
imageAutoUploadPlugin.prototype.getFileAssetPath = function () {
|
|||
|
var basePath = this.app.vault.adapter.getBasePath();
|
|||
|
// @ts-ignore
|
|||
|
var assetFolder = this.app.vault.config.attachmentFolderPath;
|
|||
|
var activeFile = this.app.vault.getAbstractFileByPath(this.app.workspace.getActiveFile().path);
|
|||
|
// 当前文件夹下的子文件夹
|
|||
|
if (assetFolder.startsWith("./")) {
|
|||
|
var activeFolder = decodeURI(require$$0$1.resolve(basePath, activeFile.parent.path));
|
|||
|
return require$$0$1.join(activeFolder, assetFolder);
|
|||
|
}
|
|||
|
else {
|
|||
|
// 根文件夹
|
|||
|
return require$$0$1.join(basePath, assetFolder);
|
|||
|
}
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.download = function (url, folderPath, name) {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var response, type, buffer, path;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, obsidian.requestUrl({ url: url })];
|
|||
|
case 1:
|
|||
|
response = _a.sent();
|
|||
|
return [4 /*yield*/, imageType(new Uint8Array(response.arrayBuffer))];
|
|||
|
case 2:
|
|||
|
type = _a.sent();
|
|||
|
if (response.status !== 200) {
|
|||
|
return [2 /*return*/, {
|
|||
|
ok: false,
|
|||
|
msg: "error",
|
|||
|
}];
|
|||
|
}
|
|||
|
if (!type) {
|
|||
|
return [2 /*return*/, {
|
|||
|
ok: false,
|
|||
|
msg: "error",
|
|||
|
}];
|
|||
|
}
|
|||
|
buffer = Buffer.from(response.arrayBuffer);
|
|||
|
try {
|
|||
|
path = require$$0$1.join(folderPath, "".concat(name, ".").concat(type.ext));
|
|||
|
require$$0.writeFileSync(path, buffer);
|
|||
|
return [2 /*return*/, {
|
|||
|
ok: true,
|
|||
|
msg: "ok",
|
|||
|
path: path,
|
|||
|
type: type,
|
|||
|
}];
|
|||
|
}
|
|||
|
catch (err) {
|
|||
|
return [2 /*return*/, {
|
|||
|
ok: false,
|
|||
|
msg: err,
|
|||
|
}];
|
|||
|
}
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.registerFileMenu = function () {
|
|||
|
var _this = this;
|
|||
|
this.registerEvent(this.app.workspace.on("file-menu", function (menu, file, source, leaf) {
|
|||
|
if (source === "canvas-menu")
|
|||
|
return false;
|
|||
|
if (!isAssetTypeAnImage(file.path))
|
|||
|
return false;
|
|||
|
menu.addItem(function (item) {
|
|||
|
item
|
|||
|
.setTitle("Upload")
|
|||
|
.setIcon("upload")
|
|||
|
.onClick(function () {
|
|||
|
if (!(file instanceof obsidian.TFile)) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
_this.fileMenuUpload(file);
|
|||
|
});
|
|||
|
});
|
|||
|
}));
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.fileMenuUpload = function (file) {
|
|||
|
var e_2, _a;
|
|||
|
var _this = this;
|
|||
|
var content = this.helper.getValue();
|
|||
|
var basePath = this.app.vault.adapter.getBasePath();
|
|||
|
var imageList = [];
|
|||
|
var fileArray = this.helper.getAllFiles();
|
|||
|
try {
|
|||
|
for (var fileArray_2 = __values(fileArray), fileArray_2_1 = fileArray_2.next(); !fileArray_2_1.done; fileArray_2_1 = fileArray_2.next()) {
|
|||
|
var match = fileArray_2_1.value;
|
|||
|
var imageName = match.name;
|
|||
|
var encodedUri = match.path;
|
|||
|
var fileName = require$$0$1.basename(decodeURI(encodedUri));
|
|||
|
if (file && file.name === fileName) {
|
|||
|
var abstractImageFile = require$$0$1.join(basePath, file.path);
|
|||
|
if (isAssetTypeAnImage(abstractImageFile)) {
|
|||
|
imageList.push({
|
|||
|
path: abstractImageFile,
|
|||
|
name: imageName,
|
|||
|
source: match.source,
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
catch (e_2_1) { e_2 = { error: e_2_1 }; }
|
|||
|
finally {
|
|||
|
try {
|
|||
|
if (fileArray_2_1 && !fileArray_2_1.done && (_a = fileArray_2.return)) _a.call(fileArray_2);
|
|||
|
}
|
|||
|
finally { if (e_2) throw e_2.error; }
|
|||
|
}
|
|||
|
if (imageList.length === 0) {
|
|||
|
new obsidian.Notice("没有解析到图像文件");
|
|||
|
return;
|
|||
|
}
|
|||
|
this.uploader.uploadFiles(imageList.map(function (item) { return item.path; })).then(function (res) {
|
|||
|
if (res.success) {
|
|||
|
var uploadUrlList_1 = res.result;
|
|||
|
imageList.map(function (item) {
|
|||
|
var uploadImage = uploadUrlList_1.shift();
|
|||
|
var name = _this.handleName(item.name);
|
|||
|
content = content.replaceAll(item.source, "![".concat(name, "](").concat(uploadImage, ")"));
|
|||
|
});
|
|||
|
_this.helper.setValue(content);
|
|||
|
if (_this.settings.deleteSource) {
|
|||
|
imageList.map(function (image) {
|
|||
|
if (!image.path.startsWith("http")) {
|
|||
|
require$$0.unlink(image.path, function () { });
|
|||
|
}
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
else {
|
|||
|
new obsidian.Notice("Upload error");
|
|||
|
}
|
|||
|
});
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.filterFile = function (fileArray) {
|
|||
|
var e_3, _a;
|
|||
|
var imageList = [];
|
|||
|
try {
|
|||
|
for (var fileArray_3 = __values(fileArray), fileArray_3_1 = fileArray_3.next(); !fileArray_3_1.done; fileArray_3_1 = fileArray_3.next()) {
|
|||
|
var match = fileArray_3_1.value;
|
|||
|
if (match.path.startsWith("http")) {
|
|||
|
if (this.settings.workOnNetWork) {
|
|||
|
if (!this.helper.hasBlackDomain(match.path, this.settings.newWorkBlackDomains)) {
|
|||
|
imageList.push({
|
|||
|
path: match.path,
|
|||
|
name: match.name,
|
|||
|
source: match.source,
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
else {
|
|||
|
imageList.push({
|
|||
|
path: match.path,
|
|||
|
name: match.name,
|
|||
|
source: match.source,
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
catch (e_3_1) { e_3 = { error: e_3_1 }; }
|
|||
|
finally {
|
|||
|
try {
|
|||
|
if (fileArray_3_1 && !fileArray_3_1.done && (_a = fileArray_3.return)) _a.call(fileArray_3);
|
|||
|
}
|
|||
|
finally { if (e_3) throw e_3.error; }
|
|||
|
}
|
|||
|
return imageList;
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.getFile = function (fileName, fileMap) {
|
|||
|
if (!fileMap) {
|
|||
|
fileMap = arrayToObject(this.app.vault.getFiles(), "name");
|
|||
|
}
|
|||
|
return fileMap[fileName];
|
|||
|
};
|
|||
|
// uploda all file
|
|||
|
imageAutoUploadPlugin.prototype.uploadAllFile = function () {
|
|||
|
var e_4, _a;
|
|||
|
var _this = this;
|
|||
|
var content = this.helper.getValue();
|
|||
|
var basePath = this.app.vault.adapter.getBasePath();
|
|||
|
var activeFile = this.app.workspace.getActiveFile();
|
|||
|
var fileMap = arrayToObject(this.app.vault.getFiles(), "name");
|
|||
|
var filePathMap = arrayToObject(this.app.vault.getFiles(), "path");
|
|||
|
var imageList = [];
|
|||
|
var fileArray = this.filterFile(this.helper.getAllFiles());
|
|||
|
try {
|
|||
|
for (var fileArray_4 = __values(fileArray), fileArray_4_1 = fileArray_4.next(); !fileArray_4_1.done; fileArray_4_1 = fileArray_4.next()) {
|
|||
|
var match = fileArray_4_1.value;
|
|||
|
var imageName = match.name;
|
|||
|
var encodedUri = match.path;
|
|||
|
if (encodedUri.startsWith("http")) {
|
|||
|
imageList.push({
|
|||
|
path: match.path,
|
|||
|
name: imageName,
|
|||
|
source: match.source,
|
|||
|
});
|
|||
|
}
|
|||
|
else {
|
|||
|
var fileName = require$$0$1.basename(decodeURI(encodedUri));
|
|||
|
var file = void 0;
|
|||
|
// 绝对路径
|
|||
|
if (filePathMap[decodeURI(encodedUri)]) {
|
|||
|
file = filePathMap[decodeURI(encodedUri)];
|
|||
|
}
|
|||
|
// 相对路径
|
|||
|
if ((!file && decodeURI(encodedUri).startsWith("./")) ||
|
|||
|
decodeURI(encodedUri).startsWith("../")) {
|
|||
|
var filePath = require$$0$1.resolve(require$$0$1.join(basePath, require$$0$1.dirname(activeFile.path)), decodeURI(encodedUri));
|
|||
|
if (require$$0.existsSync(filePath)) {
|
|||
|
var path = obsidian.normalizePath(require$$0$1.relative(basePath, require$$0$1.resolve(require$$0$1.join(basePath, require$$0$1.dirname(activeFile.path)), decodeURI(encodedUri))));
|
|||
|
file = filePathMap[path];
|
|||
|
}
|
|||
|
}
|
|||
|
// 尽可能短路径
|
|||
|
if (!file) {
|
|||
|
file = this.getFile(fileName, fileMap);
|
|||
|
}
|
|||
|
if (file) {
|
|||
|
var abstractImageFile = require$$0$1.join(basePath, file.path);
|
|||
|
if (isAssetTypeAnImage(abstractImageFile)) {
|
|||
|
imageList.push({
|
|||
|
path: abstractImageFile,
|
|||
|
name: imageName,
|
|||
|
source: match.source,
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
catch (e_4_1) { e_4 = { error: e_4_1 }; }
|
|||
|
finally {
|
|||
|
try {
|
|||
|
if (fileArray_4_1 && !fileArray_4_1.done && (_a = fileArray_4.return)) _a.call(fileArray_4);
|
|||
|
}
|
|||
|
finally { if (e_4) throw e_4.error; }
|
|||
|
}
|
|||
|
if (imageList.length === 0) {
|
|||
|
new obsidian.Notice("没有解析到图像文件");
|
|||
|
return;
|
|||
|
}
|
|||
|
else {
|
|||
|
new obsidian.Notice("\u5171\u627E\u5230".concat(imageList.length, "\u4E2A\u56FE\u50CF\u6587\u4EF6\uFF0C\u5F00\u59CB\u4E0A\u4F20"));
|
|||
|
}
|
|||
|
this.uploader.uploadFiles(imageList.map(function (item) { return item.path; })).then(function (res) {
|
|||
|
if (res.success) {
|
|||
|
var uploadUrlList_2 = res.result;
|
|||
|
imageList.map(function (item) {
|
|||
|
var uploadImage = uploadUrlList_2.shift();
|
|||
|
var name = _this.handleName(item.name);
|
|||
|
content = content.replaceAll(item.source, "![".concat(name, "](").concat(uploadImage, ")"));
|
|||
|
});
|
|||
|
_this.helper.setValue(content);
|
|||
|
if (_this.settings.deleteSource) {
|
|||
|
imageList.map(function (image) {
|
|||
|
if (!image.path.startsWith("http")) {
|
|||
|
require$$0.unlink(image.path, function () { });
|
|||
|
}
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
else {
|
|||
|
new obsidian.Notice("Upload error");
|
|||
|
}
|
|||
|
});
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.setupPasteHandler = function () {
|
|||
|
var _this = this;
|
|||
|
this.registerEvent(this.app.workspace.on("editor-paste", function (evt, editor, markdownView) {
|
|||
|
var allowUpload = _this.helper.getFrontmatterValue("image-auto-upload", _this.settings.uploadByClipSwitch);
|
|||
|
evt.clipboardData.files;
|
|||
|
if (!allowUpload) {
|
|||
|
return;
|
|||
|
}
|
|||
|
// 剪贴板内容有md格式的图片时
|
|||
|
if (_this.settings.workOnNetWork) {
|
|||
|
var clipboardValue = evt.clipboardData.getData("text/plain");
|
|||
|
var imageList_1 = _this.helper
|
|||
|
.getImageLink(clipboardValue)
|
|||
|
.filter(function (image) { return image.path.startsWith("http"); })
|
|||
|
.filter(function (image) {
|
|||
|
return !_this.helper.hasBlackDomain(image.path, _this.settings.newWorkBlackDomains);
|
|||
|
});
|
|||
|
if (imageList_1.length !== 0) {
|
|||
|
_this.uploader
|
|||
|
.uploadFiles(imageList_1.map(function (item) { return item.path; }))
|
|||
|
.then(function (res) {
|
|||
|
var value = _this.helper.getValue();
|
|||
|
if (res.success) {
|
|||
|
var uploadUrlList_3 = res.result;
|
|||
|
imageList_1.map(function (item) {
|
|||
|
var uploadImage = uploadUrlList_3.shift();
|
|||
|
var name = _this.handleName(item.name);
|
|||
|
value = value.replaceAll(item.source, "![".concat(name, "](").concat(uploadImage, ")"));
|
|||
|
});
|
|||
|
_this.helper.setValue(value);
|
|||
|
}
|
|||
|
else {
|
|||
|
new obsidian.Notice("Upload error");
|
|||
|
}
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
// 剪贴板中是图片时进行上传
|
|||
|
if (_this.canUpload(evt.clipboardData)) {
|
|||
|
_this.uploadFileAndEmbedImgurImage(editor, function (editor, pasteId) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
var res, url;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0: return [4 /*yield*/, this.uploader.uploadFileByClipboard()];
|
|||
|
case 1:
|
|||
|
res = _a.sent();
|
|||
|
if (res.code !== 0) {
|
|||
|
this.handleFailedUpload(editor, pasteId, res.msg);
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
url = res.data;
|
|||
|
return [2 /*return*/, url];
|
|||
|
}
|
|||
|
});
|
|||
|
}); }, evt.clipboardData).catch();
|
|||
|
evt.preventDefault();
|
|||
|
}
|
|||
|
}));
|
|||
|
this.registerEvent(this.app.workspace.on("editor-drop", function (evt, editor, markdownView) { return __awaiter(_this, void 0, void 0, function () {
|
|||
|
var allowUpload, files, sendFiles_1, files_1, data;
|
|||
|
var _this = this;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
allowUpload = this.helper.getFrontmatterValue("image-auto-upload", this.settings.uploadByClipSwitch);
|
|||
|
files = evt.dataTransfer.files;
|
|||
|
if (!allowUpload) {
|
|||
|
return [2 /*return*/];
|
|||
|
}
|
|||
|
if (!(files.length !== 0 && files[0].type.startsWith("image"))) return [3 /*break*/, 2];
|
|||
|
sendFiles_1 = [];
|
|||
|
files_1 = evt.dataTransfer.files;
|
|||
|
Array.from(files_1).forEach(function (item, index) {
|
|||
|
sendFiles_1.push(item.path);
|
|||
|
});
|
|||
|
evt.preventDefault();
|
|||
|
return [4 /*yield*/, this.uploader.uploadFiles(sendFiles_1)];
|
|||
|
case 1:
|
|||
|
data = _a.sent();
|
|||
|
if (data.success) {
|
|||
|
data.result.map(function (value) {
|
|||
|
var pasteId = (Math.random() + 1).toString(36).substr(2, 5);
|
|||
|
_this.insertTemporaryText(editor, pasteId);
|
|||
|
_this.embedMarkDownImage(editor, pasteId, value, files_1[0].name);
|
|||
|
});
|
|||
|
}
|
|||
|
else {
|
|||
|
new obsidian.Notice("Upload error");
|
|||
|
}
|
|||
|
_a.label = 2;
|
|||
|
case 2: return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
}); }));
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.canUpload = function (clipboardData) {
|
|||
|
this.settings.applyImage;
|
|||
|
var files = clipboardData.files;
|
|||
|
var text = clipboardData.getData("text");
|
|||
|
var hasImageFile = files.length !== 0 && files[0].type.startsWith("image");
|
|||
|
if (hasImageFile) {
|
|||
|
if (!!text) {
|
|||
|
return this.settings.applyImage;
|
|||
|
}
|
|||
|
else {
|
|||
|
return true;
|
|||
|
}
|
|||
|
}
|
|||
|
else {
|
|||
|
return false;
|
|||
|
}
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.uploadFileAndEmbedImgurImage = function (editor, callback, clipboardData) {
|
|||
|
return __awaiter(this, void 0, void 0, function () {
|
|||
|
var pasteId, name, url, e_5;
|
|||
|
return __generator(this, function (_a) {
|
|||
|
switch (_a.label) {
|
|||
|
case 0:
|
|||
|
pasteId = (Math.random() + 1).toString(36).substr(2, 5);
|
|||
|
this.insertTemporaryText(editor, pasteId);
|
|||
|
name = clipboardData.files[0].name;
|
|||
|
_a.label = 1;
|
|||
|
case 1:
|
|||
|
_a.trys.push([1, 3, , 4]);
|
|||
|
return [4 /*yield*/, callback(editor, pasteId)];
|
|||
|
case 2:
|
|||
|
url = _a.sent();
|
|||
|
this.embedMarkDownImage(editor, pasteId, url, name);
|
|||
|
return [3 /*break*/, 4];
|
|||
|
case 3:
|
|||
|
e_5 = _a.sent();
|
|||
|
this.handleFailedUpload(editor, pasteId, e_5);
|
|||
|
return [3 /*break*/, 4];
|
|||
|
case 4: return [2 /*return*/];
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.insertTemporaryText = function (editor, pasteId) {
|
|||
|
var progressText = imageAutoUploadPlugin.progressTextFor(pasteId);
|
|||
|
editor.replaceSelection(progressText + "\n");
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.progressTextFor = function (id) {
|
|||
|
return "![Uploading file...".concat(id, "]()");
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.embedMarkDownImage = function (editor, pasteId, imageUrl, name) {
|
|||
|
if (name === void 0) { name = ""; }
|
|||
|
var progressText = imageAutoUploadPlugin.progressTextFor(pasteId);
|
|||
|
name = this.handleName(name);
|
|||
|
var markDownImage = "![".concat(name, "](").concat(imageUrl, ")");
|
|||
|
imageAutoUploadPlugin.replaceFirstOccurrence(editor, progressText, markDownImage);
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.handleFailedUpload = function (editor, pasteId, reason) {
|
|||
|
new obsidian.Notice(reason);
|
|||
|
console.error("Failed request: ", reason);
|
|||
|
var progressText = imageAutoUploadPlugin.progressTextFor(pasteId);
|
|||
|
imageAutoUploadPlugin.replaceFirstOccurrence(editor, progressText, "⚠️upload failed, check dev console");
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.prototype.handleName = function (name) {
|
|||
|
var imageSizeSuffix = this.settings.imageSizeSuffix || "";
|
|||
|
if (this.settings.imageDesc === "origin") {
|
|||
|
return "".concat(name).concat(imageSizeSuffix);
|
|||
|
}
|
|||
|
else if (this.settings.imageDesc === "none") {
|
|||
|
return "";
|
|||
|
}
|
|||
|
else if (this.settings.imageDesc === "removeDefault") {
|
|||
|
if (name === "image.png") {
|
|||
|
return "";
|
|||
|
}
|
|||
|
else {
|
|||
|
return "".concat(name).concat(imageSizeSuffix);
|
|||
|
}
|
|||
|
}
|
|||
|
else {
|
|||
|
return "".concat(name).concat(imageSizeSuffix);
|
|||
|
}
|
|||
|
};
|
|||
|
imageAutoUploadPlugin.replaceFirstOccurrence = function (editor, target, replacement) {
|
|||
|
var lines = editor.getValue().split("\n");
|
|||
|
for (var i = 0; i < lines.length; i++) {
|
|||
|
var ch = lines[i].indexOf(target);
|
|||
|
if (ch != -1) {
|
|||
|
var from = { line: i, ch: ch };
|
|||
|
var to = { line: i, ch: ch + target.length };
|
|||
|
editor.replaceRange(replacement, from, to);
|
|||
|
break;
|
|||
|
}
|
|||
|
}
|
|||
|
};
|
|||
|
return imageAutoUploadPlugin;
|
|||
|
}(obsidian.Plugin));
|
|||
|
|
|||
|
module.exports = imageAutoUploadPlugin;
|
|||
|
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcyIsInNvdXJjZXMiOlsibm9kZV9tb2R1bGVzL3RzbGliL3RzbGliLmVzNi5qcyIsIm5vZGVfbW9kdWxlcy9pc2V4ZS93aW5kb3dzLmpzIiwibm9kZV9tb2R1bGVzL2lzZXhlL21vZGUuanMiLCJub2RlX21vZHVsZXMvaXNleGUvaW5kZXguanMiLCJub2RlX21vZHVsZXMvd2hpY2gvd2hpY2guanMiLCJub2RlX21vZHVsZXMvcGF0aC1rZXkvaW5kZXguanMiLCJub2RlX21vZHVsZXMvY3Jvc3Mtc3Bhd24vbGliL3V0aWwvcmVzb2x2ZUNvbW1hbmQuanMiLCJub2RlX21vZHVsZXMvY3Jvc3Mtc3Bhd24vbGliL3V0aWwvZXNjYXBlLmpzIiwibm9kZV9tb2R1bGVzL3NoZWJhbmctcmVnZXgvaW5kZXguanMiLCJub2RlX21vZHVsZXMvc2hlYmFuZy1jb21tYW5kL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL2Nyb3NzLXNwYXduL2xpYi91dGlsL3JlYWRTaGViYW5nLmpzIiwibm9kZV9tb2R1bGVzL2Nyb3NzLXNwYXduL2xpYi9wYXJzZS5qcyIsIm5vZGVfbW9kdWxlcy9jcm9zcy1zcGF3bi9saWIvZW5vZW50LmpzIiwibm9kZV9tb2R1bGVzL2Nyb3NzLXNwYXduL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL3N0cmlwLWZpbmFsLW5ld2xpbmUvaW5kZXguanMiLCJub2RlX21vZHVsZXMvbnBtLXJ1bi1wYXRoL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL21pbWljLWZuL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL29uZXRpbWUvaW5kZXguanMiLCJub2RlX21vZHVsZXMvaHVtYW4tc2lnbmFscy9idWlsZC9zcmMvY29yZS5qcyIsIm5vZGVfbW9kdWxlcy9odW1hbi1zaWduYWxzL2J1aWxkL3NyYy9yZWFsdGltZS5qcyIsIm5vZGVfbW9kdWxlcy9odW1hbi1zaWduYWxzL2J1aWxkL3NyYy9zaWduYWxzLmpzIiwibm9kZV9tb2R1bGVzL2h1bWFuLXNpZ25hbHMvYnVpbGQvc3JjL21haW4uanMiLCJub2RlX21vZHVsZXMvZXhlY2EvbGliL2Vycm9yLmpzIiwibm9kZV9tb2R1bGVzL2V4ZWNhL2xpYi9zdGRpby5qcyIsIm5vZGVfbW9kdWxlcy9zaWduYWwtZXhpdC9zaWduYWxzLmpzIiwibm9kZV9tb2R1bGVzL3NpZ25hbC1leGl0L2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL2V4ZWNhL2xpYi9raWxsLmpzIiwibm9kZV9tb2R1bGVzL2lzLXN0cmVhbS9pbmRleC5qcyIsIm5vZGVfbW9kdWxlcy9leGVjYS9ub2RlX21vZHVsZXMvZ2V0LXN0cmVhbS9idWZmZXItc3RyZWFtLmpzIiwibm9kZV9tb2R1bGVzL2V4ZWNhL25vZGVfbW9kdWxlcy9nZXQtc3RyZWFtL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL21lcmdlLXN0cmVhbS9pbmRleC5qcyIsIm5vZGVfbW9kdWxlcy9leGVjYS9saWIvc3RyZWFtLmpzIiwibm9kZV9tb2R1bGVzL2V4ZWNhL2xpYi9wcm9taXNlLmpzIiwibm9kZV9tb2R1bGVzL2V4ZWNhL2xpYi9jb21tYW5kLmpzIiwibm9kZV9tb2R1bGVzL2V4ZWNhL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL2Fuc2ktcmVnZXgvaW5kZXguanMiLCJub2RlX21vZHVsZXMvc3RyaXAtYW5zaS9pbmRleC5qcyIsIm5vZGVfbW9kdWxlcy9kZWZhdWx0LXNoZWxsL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL3NoZWxsLWVudi9pbmRleC5qcyIsIm5vZGVfbW9kdWxlcy9zaGVsbC1wYXRoL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL2ZpeC1wYXRoL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL3JlYWRhYmxlLXN0cmVhbS9saWIvaW50ZXJuYWwvc3RyZWFtcy9zdHJlYW0uanMiLCJub2RlX21vZHVsZXMvcmVhZGFibGUtc3RyZWFtL2xpYi9pbnRlcm5hbC9zdHJlYW1zL2J1ZmZlcl9saXN0LmpzIiwibm9kZV9tb2R1bGVzL3JlYWRhYmxlLXN0cmVhbS9saWIvaW50ZXJuYWwvc3RyZWFtcy9kZXN0cm95LmpzIiwibm9kZV9tb2R1bGVzL3JlYWRhYmxlLXN0cmVhbS9lcnJvcnMuanMiLCJub2RlX21vZHVsZXMvcmVhZGFibGUtc3RyZWFtL2xpYi9pbnRlcm5hbC9zdHJlYW1zL3N0YXRlLmpzIiwibm9kZV9tb2R1bGVzL2luaGVyaXRzL2luaGVyaXRzX2Jyb3dzZXIuanMiLCJub2RlX21vZHVsZXMvaW5oZXJpdHMvaW5oZXJpdHMuanMiLCJub2RlX21vZHVsZXMvdXRpbC1kZXByZWNhdGUvbm9kZS5qcyIsIm5vZGVfbW9kdWxlcy9yZWFkYWJsZS1zdHJlYW0vbGliL19zdHJlYW1fd3JpdGFibGUuanMiLCJub2RlX21vZHVsZXMvcmVhZGFibGUtc3RyZWFtL2xpYi9fc3RyZWFtX2R1cGxleC5qcyIsIm5vZGVfbW9kdWxlcy9zYWZlLWJ1ZmZlci9pbmRleC5qcyIsIm5vZGVfbW9kdWxlcy9zdHJpbmdfZGVjb2Rlci9saWIvc3RyaW5nX2RlY29kZXIuanMiLCJub2RlX21vZHVsZXMvcmVhZGFibGUtc3RyZWFtL2xpYi9pbnRlcm5hbC9zdHJlYW1zL2VuZC1vZi1zdHJlYW0uanMiLCJub2RlX21vZHVsZXMvcmVhZGFibGUtc3RyZWFtL2xpYi9pbnRlcm5hbC9zdHJlYW1zL2FzeW5jX2l0ZXJhdG9yLmpzIiwibm9kZV9tb2R1bGVzL3JlYWRhYmxlLXN0cmVhbS9saWIvaW50ZXJuYWwvc3RyZWFtcy9mcm9tLmpzIiwibm9kZV9tb2R1bGVzL3JlYWRhYmxlLXN0cmVhbS9saWIvX3N0cmVhbV9yZWFkYWJsZS5qcyIsIm5vZGVfbW9kdWxlcy9yZWFkYWJsZS1zdHJlYW0vbGliL19zdHJlYW1fdHJhbnNmb3JtLmpzIiwibm9kZV9tb2R1bGVzL3JlYWRhYmxlLXN0cmVhbS9saWIvX3N0cmVhbV9wYXNzdGhyb3VnaC5qcyIsIm5vZGVfbW9kdWxlcy9yZWFkYWJsZS1zdHJlYW0vbGliL2ludGVybmFsL3N0cmVhbXMvcGlwZWxpbmUuanMiLCJub2RlX21vZHVsZXMvcmVhZGFibGUtc3RyZWFtL3JlYWRhYmxlLmpzIiwibm9kZV9tb2R1bGVzL3JlYWRhYmxlLXdlYi10by1ub2RlLXN0cmVhbS9saWIvaW5kZXguanMiLCJub2RlX21vZHVsZXMvdG9rZW4tdHlwZXMvbGliL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL3BlZWstcmVhZGFibGUvbGliL0VuZE9mRmlsZVN0cmVhbS5qcyIsIm5vZGVfbW9kdWxlcy9zdHJ0b2szL2xpYi9BYnN0cmFjdFRva2VuaXplci5qcyIsIm5vZGVfbW9kdWxlcy9zdHJ0b2szL2xpYi9CdWZmZXJUb2tlbml6ZXIuanMiLCJub2RlX21vZHVsZXMvc3RydG9rMy9saWIvY29yZS5qcyIsIm5vZGVfbW9kdWxlcy9maWxlLXR5cGUvdXRpbC5qcyIsIm5vZGV
|