generated from DNDs/dnd-template
11328 lines
1.3 MiB
JavaScript
11328 lines
1.3 MiB
JavaScript
|
/*
|
||
|
THIS IS A GENERATED/BUNDLED FILE BY ROLLUP
|
||
|
if you want to view the source visit the plugins github repository
|
||
|
*/
|
||
|
|
||
|
'use strict';
|
||
|
|
||
|
var fs = require('fs');
|
||
|
var path$1 = require('path');
|
||
|
var obsidian = require('obsidian');
|
||
|
var child_process = require('child_process');
|
||
|
var cnst = require('constants');
|
||
|
var os = require('os');
|
||
|
var assert = require('assert');
|
||
|
var require$$0 = require('util');
|
||
|
var require$$0$1 = require('events');
|
||
|
|
||
|
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||
|
|
||
|
function _interopNamespace(e) {
|
||
|
if (e && e.__esModule) return e;
|
||
|
var n = Object.create(null);
|
||
|
if (e) {
|
||
|
Object.keys(e).forEach(function (k) {
|
||
|
if (k !== 'default') {
|
||
|
var d = Object.getOwnPropertyDescriptor(e, k);
|
||
|
Object.defineProperty(n, k, d.get ? d : {
|
||
|
enumerable: true,
|
||
|
get: function () {
|
||
|
return e[k];
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
n['default'] = e;
|
||
|
return Object.freeze(n);
|
||
|
}
|
||
|
|
||
|
var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
|
||
|
var fs__namespace = /*#__PURE__*/_interopNamespace(fs);
|
||
|
var path__default = /*#__PURE__*/_interopDefaultLegacy(path$1);
|
||
|
var path__namespace = /*#__PURE__*/_interopNamespace(path$1);
|
||
|
var cnst__default = /*#__PURE__*/_interopDefaultLegacy(cnst);
|
||
|
var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
|
||
|
var assert__default = /*#__PURE__*/_interopDefaultLegacy(assert);
|
||
|
var require$$0__default = /*#__PURE__*/_interopDefaultLegacy(require$$0);
|
||
|
var require$$0__default$1 = /*#__PURE__*/_interopDefaultLegacy(require$$0$1);
|
||
|
|
||
|
/*! *****************************************************************************
|
||
|
Copyright (c) Microsoft Corporation.
|
||
|
|
||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||
|
purpose with or without fee is hereby granted.
|
||
|
|
||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||
|
PERFORMANCE OF THIS SOFTWARE.
|
||
|
***************************************************************************** */
|
||
|
|
||
|
function __awaiter(thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
}
|
||
|
|
||
|
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
||
|
|
||
|
function createCommonjsModule(fn, basedir, module) {
|
||
|
return module = {
|
||
|
path: basedir,
|
||
|
exports: {},
|
||
|
require: function (path, base) {
|
||
|
return commonjsRequire(path, (base === undefined || base === null) ? module.path : base);
|
||
|
}
|
||
|
}, fn(module, module.exports), module.exports;
|
||
|
}
|
||
|
|
||
|
function commonjsRequire () {
|
||
|
throw new Error('Dynamic requires are not currently supported by @rollup/plugin-commonjs');
|
||
|
}
|
||
|
|
||
|
var lib = createCommonjsModule(function (module, exports) {
|
||
|
var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||
|
}) : (function(o, m, k, k2) {
|
||
|
if (k2 === undefined) k2 = k;
|
||
|
o[k2] = m[k];
|
||
|
}));
|
||
|
var __setModuleDefault = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||
|
}) : function(o, v) {
|
||
|
o["default"] = v;
|
||
|
});
|
||
|
var __importStar = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) {
|
||
|
if (mod && mod.__esModule) return mod;
|
||
|
var result = {};
|
||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||
|
__setModuleDefault(result, mod);
|
||
|
return result;
|
||
|
};
|
||
|
var __awaiter = (commonjsGlobal && commonjsGlobal.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||
|
});
|
||
|
};
|
||
|
var __generator = (commonjsGlobal && commonjsGlobal.__generator) || function (thisArg, body) {
|
||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||
|
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||
|
function step(op) {
|
||
|
if (f) throw new TypeError("Generator is already executing.");
|
||
|
while (_) try {
|
||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||
|
switch (op[0]) {
|
||
|
case 0: case 1: t = op; break;
|
||
|
case 4: _.label++; return { value: op[1], done: false };
|
||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||
|
default:
|
||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||
|
if (t[2]) _.ops.pop();
|
||
|
_.trys.pop(); continue;
|
||
|
}
|
||
|
op = body.call(thisArg, _);
|
||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||
|
}
|
||
|
};
|
||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||
|
exports.lookpath = void 0;
|
||
|
var fs = __importStar(fs__default['default']);
|
||
|
var path = __importStar(path__default['default']);
|
||
|
var isWindows = /^win/i.test(process.platform);
|
||
|
/**
|
||
|
* Sometimes, people want to look for local executable files
|
||
|
* which are specified with either relative or absolute file path.
|
||
|
* @private
|
||
|
* @param cmd
|
||
|
* @return {string} An absolute path of given command, or undefined.
|
||
|
*/
|
||
|
var isFilepath = function (cmd) {
|
||
|
return cmd.includes(path.sep) ? path.resolve(cmd) : undefined;
|
||
|
};
|
||
|
/**
|
||
|
* Just promisifies "fs.access"
|
||
|
* @private
|
||
|
* @param {string} fpath An absolute file path with an applicable extension appended.
|
||
|
* @return {Promise<string>} Resolves absolute path or empty string.
|
||
|
*/
|
||
|
var access = function (fpath) {
|
||
|
return new Promise(function (resolve) { return fs.access(fpath, fs.constants.X_OK, function (err) { return resolve(err ? undefined : fpath); }); });
|
||
|
};
|
||
|
/**
|
||
|
* Resolves if the given file is executable or not, regarding "PATHEXT" to be applied.
|
||
|
* @private
|
||
|
* @param {string} abspath A file path to be checked.
|
||
|
* @return {Promise<string>} Resolves the absolute file path just checked, or undefined.
|
||
|
*/
|
||
|
var isExecutable = function (abspath) { return __awaiter(void 0, void 0, void 0, function () {
|
||
|
var exts, bins;
|
||
|
return __generator(this, function (_a) {
|
||
|
switch (_a.label) {
|
||
|
case 0:
|
||
|
exts = (process.env.PATHEXT || '').split(path.delimiter).concat('');
|
||
|
return [4 /*yield*/, Promise.all(exts.map(function (ext) { return access(abspath + ext); }))];
|
||
|
case 1:
|
||
|
bins = _a.sent();
|
||
|
return [2 /*return*/, bins.find(function (bin) { return !!bin; })];
|
||
|
}
|
||
|
});
|
||
|
}); };
|
||
|
/**
|
||
|
* Returns a list of directories on which the target command should be looked for.
|
||
|
* @private
|
||
|
* @param {string[]} opt.include Will be added to "PATH" env.
|
||
|
* @param {string[]} opt.exclude Will be filtered from "PATH" env.
|
||
|
* @return {string[]} Directories to dig into.
|
||
|
*/
|
||
|
var getDirsToWalkThrough = function (opt) {
|
||
|
var envname = isWindows ? 'Path' : 'PATH';
|
||
|
return (process.env[envname] || '').split(path.delimiter).concat(opt.include || []).filter(function (p) { return !(opt.exclude || []).includes(p); });
|
||
|
};
|
||
|
/**
|
||
|
* Returns async promise with absolute file path of given command,
|
||
|
* and resolves with undefined if the command not found.
|
||
|
* @param {string} command Command name to look for.
|
||
|
* @param {LookPathOption} opt Options for lookpath.
|
||
|
* @return {Promise<string|undefined>} Resolves absolute file path, or undefined if not found.
|
||
|
*/
|
||
|
function lookpath(command, opt) {
|
||
|
if (opt === void 0) { opt = {}; }
|
||
|
return __awaiter(this, void 0, void 0, function () {
|
||
|
var directpath, dirs, bins;
|
||
|
return __generator(this, function (_a) {
|
||
|
switch (_a.label) {
|
||
|
case 0:
|
||
|
directpath = isFilepath(command);
|
||
|
if (directpath)
|
||
|
return [2 /*return*/, isExecutable(directpath)];
|
||
|
dirs = getDirsToWalkThrough(opt);
|
||
|
return [4 /*yield*/, Promise.all(dirs.map(function (dir) { return isExecutable(path.join(dir, command)); }))];
|
||
|
case 1:
|
||
|
bins = _a.sent();
|
||
|
return [2 /*return*/, bins.find(function (bin) { return !!bin; })];
|
||
|
}
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
exports.lookpath = lookpath;
|
||
|
});
|
||
|
|
||
|
/*
|
||
|
* pandoc.ts
|
||
|
*
|
||
|
* This module handles spawning Pandoc, passing it arguments, and streaming
|
||
|
* to/from STDIN/STDOUT buffers if desired.
|
||
|
*
|
||
|
* Loosely based on https://github.com/eshinn/node-pandoc (MIT licensed)
|
||
|
*
|
||
|
*/
|
||
|
const inputExtensions = ['md', 'docx', 'csv', 'html', 'tex', 'odt'];
|
||
|
// List of [pretty name, pandoc format name, file extension, shortened pretty name]
|
||
|
const outputFormats = [
|
||
|
['AsciiDoc (adoc)', 'asciidoc', 'adoc', 'AsciiDoc'],
|
||
|
['Word Document (docx)', 'docx', 'docx', 'Word'],
|
||
|
['Pandoc Markdown', 'markdown', 'pandoc.md', 'markdown'],
|
||
|
['HTML (without Pandoc)', 'html', 'html', 'HTML'],
|
||
|
['LaTeX', 'latex', 'tex', 'LaTeX'],
|
||
|
['OpenDocument (odt)', 'odt', 'odt', 'OpenDocument'],
|
||
|
['PowerPoint (pptx)', 'pptx', 'pptx', 'PowerPoint'],
|
||
|
['ePub', 'epub', 'epub', 'ePub'],
|
||
|
['PDF (via LaTeX)', 'pdf', 'pdf', 'PDF'],
|
||
|
['Reveal.js Slides', 'revealjs', 'reveal.html', 'Reveal.js'],
|
||
|
['Beamer Slides', 'beamer', 'beamer.tex', 'Beamer'],
|
||
|
['reStructured Text (RST)', 'rst', 'rst', 'RST'],
|
||
|
['DokuWiki', 'dokuwiki', 'txt', 'DokuWiki'],
|
||
|
['MediaWiki', 'mediawiki', 'mediawiki', 'MediaWiki'],
|
||
|
];
|
||
|
function needsLaTeX(format) {
|
||
|
return format === 'pdf';
|
||
|
}
|
||
|
function needsPandoc(format) {
|
||
|
return format !== 'html';
|
||
|
}
|
||
|
function needsStandaloneFlag(output) {
|
||
|
return output.file.endsWith('html')
|
||
|
|| output.format === 'html'
|
||
|
|| output.format === 'revealjs'
|
||
|
|| output.format === 'latex'
|
||
|
|| output.format === 'beamer';
|
||
|
}
|
||
|
// Note: extraParams is a list of strings like ['-o', 'file.md']
|
||
|
// This rejects if the file doesn't get created
|
||
|
const pandoc = (input, output, extraParams) => __awaiter(void 0, void 0, void 0, function* () {
|
||
|
return new Promise((resolve, reject) => __awaiter(void 0, void 0, void 0, function* () {
|
||
|
const stdin = input.file === 'STDIN';
|
||
|
const stdout = output.file === 'STDOUT';
|
||
|
let pandoc;
|
||
|
let result = '';
|
||
|
let error = '';
|
||
|
// Construct the Pandoc arguments list
|
||
|
let args = [];
|
||
|
if (input.format) {
|
||
|
args.push('--from');
|
||
|
args.push(input.format);
|
||
|
}
|
||
|
if (output.format) {
|
||
|
args.push('--to');
|
||
|
args.push(output.format);
|
||
|
}
|
||
|
if (needsStandaloneFlag(output))
|
||
|
args.push('-s');
|
||
|
if (!stdout) {
|
||
|
args.push('-o');
|
||
|
args.push(output.file);
|
||
|
}
|
||
|
else {
|
||
|
args.push('-o');
|
||
|
args.push('-');
|
||
|
}
|
||
|
// // Support Unicode in the PDF output if XeLaTeX is installed
|
||
|
if (output.format === 'pdf' && (yield lib.lookpath('xelatex')))
|
||
|
args.push('--pdf-engine=xelatex');
|
||
|
if (!stdin) {
|
||
|
args.push(input.file);
|
||
|
}
|
||
|
// The metadata title is needed for ePub and standalone HTML formats
|
||
|
// We use a metadata file to avoid being vulnerable to command injection
|
||
|
if (input.metadataFile)
|
||
|
args.push('--metadata-file', input.metadataFile);
|
||
|
// Extra parameters
|
||
|
if (extraParams) {
|
||
|
extraParams = extraParams.flatMap(x => x.split(' ')).filter(x => x.length);
|
||
|
args.push(...extraParams);
|
||
|
}
|
||
|
function start() {
|
||
|
// Spawn a Pandoc child process
|
||
|
// Assumes Pandoc is installed and that the arguments are valid
|
||
|
// The arguments aren't sanitised, so be careful!
|
||
|
const env = Object.assign(process.env);
|
||
|
if (input.pdflatex) {
|
||
|
// Workaround for Windows having different PATH delimiters
|
||
|
// to *every other operating system in existence*
|
||
|
// *sigh*
|
||
|
if (process.platform === 'win32')
|
||
|
env.PATH += ";";
|
||
|
else
|
||
|
env.PATH += ":";
|
||
|
env.PATH += path__namespace.dirname(input.pdflatex);
|
||
|
}
|
||
|
pandoc = child_process.spawn(input.pandoc || 'pandoc', args, { env: process.env });
|
||
|
if (stdin) {
|
||
|
// TODO: strip some unicode characters but not others
|
||
|
// Currently we're stripping footnote back arrows but no
|
||
|
// other characters to avoid localisation issues
|
||
|
const contents = input.contents.replace(/[\u21a9\ufe0e]/g, '');
|
||
|
pandoc.stdin.write(contents);
|
||
|
pandoc.stdin.end();
|
||
|
}
|
||
|
// Handlers
|
||
|
pandoc.stdout.on('data', (data) => {
|
||
|
result += data;
|
||
|
});
|
||
|
pandoc.stderr.on('data', (err) => {
|
||
|
error += err;
|
||
|
});
|
||
|
pandoc.stdout.on('end', () => {
|
||
|
const value = {
|
||
|
result, error,
|
||
|
command: 'pandoc ' + args.join(' ')
|
||
|
};
|
||
|
if (output.file !== 'STDOUT') {
|
||
|
fs__namespace.stat(output.file, (err, stats) => {
|
||
|
// Call resolve if the file exists, reject otherwise
|
||
|
if (stats && stats.isFile()) {
|
||
|
resolve(value);
|
||
|
}
|
||
|
else {
|
||
|
reject(error);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
else {
|
||
|
// Call resolve iff there is a nonempty result
|
||
|
(result.length ? resolve : reject)(value);
|
||
|
if (result.length) {
|
||
|
resolve(value);
|
||
|
}
|
||
|
else {
|
||
|
reject(error);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
if (input.file === 'STDIN') {
|
||
|
start();
|
||
|
}
|
||
|
else {
|
||
|
// Check if the input file exists, and then start
|
||
|
fs.stat(input.file, (err, stats) => {
|
||
|
if (stats.isFile())
|
||
|
start();
|
||
|
else
|
||
|
reject(new Error('Input file does not exist'));
|
||
|
});
|
||
|
}
|
||
|
}));
|
||
|
});
|
||
|
|
||
|
const ALIAS = Symbol.for('yaml.alias');
|
||
|
const DOC = Symbol.for('yaml.document');
|
||
|
const MAP = Symbol.for('yaml.map');
|
||
|
const PAIR = Symbol.for('yaml.pair');
|
||
|
const SCALAR$1 = Symbol.for('yaml.scalar');
|
||
|
const SEQ = Symbol.for('yaml.seq');
|
||
|
const NODE_TYPE = Symbol.for('yaml.node.type');
|
||
|
const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
|
||
|
const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
|
||
|
const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
|
||
|
const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
|
||
|
const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR$1;
|
||
|
const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
|
||
|
function isCollection(node) {
|
||
|
if (node && typeof node === 'object')
|
||
|
switch (node[NODE_TYPE]) {
|
||
|
case MAP:
|
||
|
case SEQ:
|
||
|
return true;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
function isNode(node) {
|
||
|
if (node && typeof node === 'object')
|
||
|
switch (node[NODE_TYPE]) {
|
||
|
case ALIAS:
|
||
|
case MAP:
|
||
|
case SCALAR$1:
|
||
|
case SEQ:
|
||
|
return true;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
class NodeBase {
|
||
|
constructor(type) {
|
||
|
Object.defineProperty(this, NODE_TYPE, { value: type });
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const BREAK = Symbol('break visit');
|
||
|
const SKIP = Symbol('skip children');
|
||
|
const REMOVE = Symbol('remove node');
|
||
|
/**
|
||
|
* Apply a visitor to an AST node or document.
|
||
|
*
|
||
|
* Walks through the tree (depth-first) starting from `node`, calling a
|
||
|
* `visitor` function with three arguments:
|
||
|
* - `key`: For sequence values and map `Pair`, the node's index in the
|
||
|
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
|
||
|
* `null` for the root node.
|
||
|
* - `node`: The current node.
|
||
|
* - `path`: The ancestry of the current node.
|
||
|
*
|
||
|
* The return value of the visitor may be used to control the traversal:
|
||
|
* - `undefined` (default): Do nothing and continue
|
||
|
* - `visit.SKIP`: Do not visit the children of this node, continue with next
|
||
|
* sibling
|
||
|
* - `visit.BREAK`: Terminate traversal completely
|
||
|
* - `visit.REMOVE`: Remove the current node, then continue with the next one
|
||
|
* - `Node`: Replace the current node, then continue by visiting it
|
||
|
* - `number`: While iterating the items of a sequence or map, set the index
|
||
|
* of the next step. This is useful especially if the index of the current
|
||
|
* node has changed.
|
||
|
*
|
||
|
* If `visitor` is a single function, it will be called with all values
|
||
|
* encountered in the tree, including e.g. `null` values. Alternatively,
|
||
|
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
|
||
|
* `Alias` and `Scalar` node.
|
||
|
*/
|
||
|
function visit(node, visitor) {
|
||
|
if (isDocument(node)) {
|
||
|
const cd = _visit(null, node.contents, visitor, Object.freeze([node]));
|
||
|
if (cd === REMOVE)
|
||
|
node.contents = null;
|
||
|
}
|
||
|
else
|
||
|
_visit(null, node, visitor, Object.freeze([]));
|
||
|
}
|
||
|
// Without the `as symbol` casts, TS declares these in the `visit`
|
||
|
// namespace using `var`, but then complains about that because
|
||
|
// `unique symbol` must be `const`.
|
||
|
/** Terminate visit traversal completely */
|
||
|
visit.BREAK = BREAK;
|
||
|
/** Do not visit the children of the current node */
|
||
|
visit.SKIP = SKIP;
|
||
|
/** Remove the current node */
|
||
|
visit.REMOVE = REMOVE;
|
||
|
function _visit(key, node, visitor, path) {
|
||
|
let ctrl = undefined;
|
||
|
if (typeof visitor === 'function')
|
||
|
ctrl = visitor(key, node, path);
|
||
|
else if (isMap(node)) {
|
||
|
if (visitor.Map)
|
||
|
ctrl = visitor.Map(key, node, path);
|
||
|
}
|
||
|
else if (isSeq(node)) {
|
||
|
if (visitor.Seq)
|
||
|
ctrl = visitor.Seq(key, node, path);
|
||
|
}
|
||
|
else if (isPair(node)) {
|
||
|
if (visitor.Pair)
|
||
|
ctrl = visitor.Pair(key, node, path);
|
||
|
}
|
||
|
else if (isScalar(node)) {
|
||
|
if (visitor.Scalar)
|
||
|
ctrl = visitor.Scalar(key, node, path);
|
||
|
}
|
||
|
else if (isAlias(node)) {
|
||
|
if (visitor.Alias)
|
||
|
ctrl = visitor.Alias(key, node, path);
|
||
|
}
|
||
|
if (isNode(ctrl) || isPair(ctrl)) {
|
||
|
const parent = path[path.length - 1];
|
||
|
if (isCollection(parent)) {
|
||
|
parent.items[key] = ctrl;
|
||
|
}
|
||
|
else if (isPair(parent)) {
|
||
|
if (key === 'key')
|
||
|
parent.key = ctrl;
|
||
|
else
|
||
|
parent.value = ctrl;
|
||
|
}
|
||
|
else if (isDocument(parent)) {
|
||
|
parent.contents = ctrl;
|
||
|
}
|
||
|
else {
|
||
|
const pt = isAlias(parent) ? 'alias' : 'scalar';
|
||
|
throw new Error(`Cannot replace node with ${pt} parent`);
|
||
|
}
|
||
|
return _visit(key, ctrl, visitor, path);
|
||
|
}
|
||
|
if (typeof ctrl !== 'symbol') {
|
||
|
if (isCollection(node)) {
|
||
|
path = Object.freeze(path.concat(node));
|
||
|
for (let i = 0; i < node.items.length; ++i) {
|
||
|
const ci = _visit(i, node.items[i], visitor, path);
|
||
|
if (typeof ci === 'number')
|
||
|
i = ci - 1;
|
||
|
else if (ci === BREAK)
|
||
|
return BREAK;
|
||
|
else if (ci === REMOVE) {
|
||
|
node.items.splice(i, 1);
|
||
|
i -= 1;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
else if (isPair(node)) {
|
||
|
path = Object.freeze(path.concat(node));
|
||
|
const ck = _visit('key', node.key, visitor, path);
|
||
|
if (ck === BREAK)
|
||
|
return BREAK;
|
||
|
else if (ck === REMOVE)
|
||
|
node.key = null;
|
||
|
const cv = _visit('value', node.value, visitor, path);
|
||
|
if (cv === BREAK)
|
||
|
return BREAK;
|
||
|
else if (cv === REMOVE)
|
||
|
node.value = null;
|
||
|
}
|
||
|
}
|
||
|
return ctrl;
|
||
|
}
|
||
|
|
||
|
const escapeChars = {
|
||
|
'!': '%21',
|
||
|
',': '%2C',
|
||
|
'[': '%5B',
|
||
|
']': '%5D',
|
||
|
'{': '%7B',
|
||
|
'}': '%7D'
|
||
|
};
|
||
|
const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
|
||
|
class Directives {
|
||
|
constructor(yaml, tags) {
|
||
|
/**
|
||
|
* The directives-end/doc-start marker `---`. If `null`, a marker may still be
|
||
|
* included in the document's stringified representation.
|
||
|
*/
|
||
|
this.marker = null;
|
||
|
this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
|
||
|
this.tags = Object.assign({}, Directives.defaultTags, tags);
|
||
|
}
|
||
|
/**
|
||
|
* During parsing, get a Directives instance for the current document and
|
||
|
* update the stream state according to the current version's spec.
|
||
|
*/
|
||
|
atDocument() {
|
||
|
const res = new Directives(this.yaml, this.tags);
|
||
|
switch (this.yaml.version) {
|
||
|
case '1.1':
|
||
|
this.atNextDocument = true;
|
||
|
break;
|
||
|
case '1.2':
|
||
|
this.atNextDocument = false;
|
||
|
this.yaml = {
|
||
|
explicit: Directives.defaultYaml.explicit,
|
||
|
version: '1.2'
|
||
|
};
|
||
|
this.tags = Object.assign({}, Directives.defaultTags);
|
||
|
break;
|
||
|
}
|
||
|
return res;
|
||
|
}
|
||
|
/**
|
||
|
* @param onError - May be called even if the action was successful
|
||
|
* @returns `true` on success
|
||
|
*/
|
||
|
add(line, onError) {
|
||
|
if (this.atNextDocument) {
|
||
|
this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
|
||
|
this.tags = Object.assign({}, Directives.defaultTags);
|
||
|
this.atNextDocument = false;
|
||
|
}
|
||
|
const parts = line.trim().split(/[ \t]+/);
|
||
|
const name = parts.shift();
|
||
|
switch (name) {
|
||
|
case '%TAG': {
|
||
|
if (parts.length !== 2) {
|
||
|
onError(0, '%TAG directive should contain exactly two parts');
|
||
|
if (parts.length < 2)
|
||
|
return false;
|
||
|
}
|
||
|
const [handle, prefix] = parts;
|
||
|
this.tags[handle] = prefix;
|
||
|
return true;
|
||
|
}
|
||
|
case '%YAML': {
|
||
|
this.yaml.explicit = true;
|
||
|
if (parts.length < 1) {
|
||
|
onError(0, '%YAML directive should contain exactly one part');
|
||
|
return false;
|
||
|
}
|
||
|
const [version] = parts;
|
||
|
if (version === '1.1' || version === '1.2') {
|
||
|
this.yaml.version = version;
|
||
|
return true;
|
||
|
}
|
||
|
else {
|
||
|
onError(6, `Unsupported YAML version ${version}`, true);
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
default:
|
||
|
onError(0, `Unknown directive ${name}`, true);
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Resolves a tag, matching handles to those defined in %TAG directives.
|
||
|
*
|
||
|
* @returns Resolved tag, which may also be the non-specific tag `'!'` or a
|
||
|
* `'!local'` tag, or `null` if unresolvable.
|
||
|
*/
|
||
|
tagName(source, onError) {
|
||
|
if (source === '!')
|
||
|
return '!'; // non-specific tag
|
||
|
if (source[0] !== '!') {
|
||
|
onError(`Not a valid tag: ${source}`);
|
||
|
return null;
|
||
|
}
|
||
|
if (source[1] === '<') {
|
||
|
const verbatim = source.slice(2, -1);
|
||
|
if (verbatim === '!' || verbatim === '!!') {
|
||
|
onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
|
||
|
return null;
|
||
|
}
|
||
|
if (source[source.length - 1] !== '>')
|
||
|
onError('Verbatim tags must end with a >');
|
||
|
return verbatim;
|
||
|
}
|
||
|
const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);
|
||
|
if (!suffix)
|
||
|
onError(`The ${source} tag has no suffix`);
|
||
|
const prefix = this.tags[handle];
|
||
|
if (prefix)
|
||
|
return prefix + decodeURIComponent(suffix);
|
||
|
if (handle === '!')
|
||
|
return source; // local tag
|
||
|
onError(`Could not resolve tag: ${source}`);
|
||
|
return null;
|
||
|
}
|
||
|
/**
|
||
|
* Given a fully resolved tag, returns its printable string form,
|
||
|
* taking into account current tag prefixes and defaults.
|
||
|
*/
|
||
|
tagString(tag) {
|
||
|
for (const [handle, prefix] of Object.entries(this.tags)) {
|
||
|
if (tag.startsWith(prefix))
|
||
|
return handle + escapeTagName(tag.substring(prefix.length));
|
||
|
}
|
||
|
return tag[0] === '!' ? tag : `!<${tag}>`;
|
||
|
}
|
||
|
toString(doc) {
|
||
|
const lines = this.yaml.explicit
|
||
|
? [`%YAML ${this.yaml.version || '1.2'}`]
|
||
|
: [];
|
||
|
const tagEntries = Object.entries(this.tags);
|
||
|
let tagNames;
|
||
|
if (doc && tagEntries.length > 0 && isNode(doc.contents)) {
|
||
|
const tags = {};
|
||
|
visit(doc.contents, (_key, node) => {
|
||
|
if (isNode(node) && node.tag)
|
||
|
tags[node.tag] = true;
|
||
|
});
|
||
|
tagNames = Object.keys(tags);
|
||
|
}
|
||
|
else
|
||
|
tagNames = [];
|
||
|
for (const [handle, prefix] of tagEntries) {
|
||
|
if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
|
||
|
continue;
|
||
|
if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
|
||
|
lines.push(`%TAG ${handle} ${prefix}`);
|
||
|
}
|
||
|
return lines.join('\n');
|
||
|
}
|
||
|
}
|
||
|
Directives.defaultYaml = { explicit: false, version: '1.2' };
|
||
|
Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
|
||
|
|
||
|
/**
|
||
|
* Recursively convert any node or its contents to native JavaScript
|
||
|
*
|
||
|
* @param value - The input value
|
||
|
* @param arg - If `value` defines a `toJSON()` method, use this
|
||
|
* as its first argument
|
||
|
* @param ctx - Conversion context, originally set in Document#toJS(). If
|
||
|
* `{ keep: true }` is not set, output should be suitable for JSON
|
||
|
* stringification.
|
||
|
*/
|
||
|
function toJS(value, arg, ctx) {
|
||
|
if (Array.isArray(value))
|
||
|
return value.map((v, i) => toJS(v, String(i), ctx));
|
||
|
if (value && typeof value.toJSON === 'function') {
|
||
|
if (!ctx)
|
||
|
return value.toJSON(arg);
|
||
|
const anchor = ctx.anchors && ctx.anchors.get(value);
|
||
|
if (anchor)
|
||
|
ctx.onCreate = res => {
|
||
|
anchor.res = res;
|
||
|
delete ctx.onCreate;
|
||
|
};
|
||
|
const res = value.toJSON(arg, ctx);
|
||
|
if (anchor && ctx.onCreate)
|
||
|
ctx.onCreate(res);
|
||
|
return res;
|
||
|
}
|
||
|
if (!(ctx && ctx.keep) && typeof value === 'bigint')
|
||
|
return Number(value);
|
||
|
return value;
|
||
|
}
|
||
|
|
||
|
class Alias extends NodeBase {
|
||
|
constructor(source) {
|
||
|
super(ALIAS);
|
||
|
this.source = source;
|
||
|
Object.defineProperty(this, 'tag', {
|
||
|
set() {
|
||
|
throw new Error('Alias nodes cannot have tags');
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
toJSON(arg, ctx) {
|
||
|
if (!ctx)
|
||
|
return toJS(this.source, typeof arg === 'string' ? arg : null, ctx);
|
||
|
const { anchors, maxAliasCount } = ctx;
|
||
|
const anchor = anchors && anchors.get(this.source);
|
||
|
/* istanbul ignore if */
|
||
|
if (!anchor || anchor.res === undefined) {
|
||
|
const msg = 'This should not happen: Alias anchor was not resolved?';
|
||
|
throw new ReferenceError(msg);
|
||
|
}
|
||
|
if (maxAliasCount >= 0) {
|
||
|
anchor.count += 1;
|
||
|
if (anchor.aliasCount === 0)
|
||
|
anchor.aliasCount = getAliasCount(this.source, anchors);
|
||
|
if (anchor.count * anchor.aliasCount > maxAliasCount) {
|
||
|
const msg = 'Excessive alias count indicates a resource exhaustion attack';
|
||
|
throw new ReferenceError(msg);
|
||
|
}
|
||
|
}
|
||
|
return anchor.res;
|
||
|
}
|
||
|
// Only called when stringifying an alias mapping key while constructing
|
||
|
// Object output.
|
||
|
toString({ anchors, doc, implicitKey, inStringifyKey }, _onComment, _onChompKeep) {
|
||
|
let anchor = Object.keys(anchors).find(a => anchors[a] === this.source);
|
||
|
if (!anchor && inStringifyKey)
|
||
|
anchor = doc.anchors.getName(this.source) || doc.anchors.newName();
|
||
|
if (anchor)
|
||
|
return `*${anchor}${implicitKey ? ' ' : ''}`;
|
||
|
const msg = doc.anchors.getName(this.source)
|
||
|
? 'Alias node must be after source node'
|
||
|
: 'Source node not found for alias node';
|
||
|
throw new Error(`${msg} [${this.range}]`);
|
||
|
}
|
||
|
}
|
||
|
function getAliasCount(node, anchors) {
|
||
|
if (isAlias(node)) {
|
||
|
const anchor = anchors && anchors.get(node.source);
|
||
|
return anchor ? anchor.count * anchor.aliasCount : 0;
|
||
|
}
|
||
|
else if (isCollection(node)) {
|
||
|
let count = 0;
|
||
|
for (const item of node.items) {
|
||
|
const c = getAliasCount(item, anchors);
|
||
|
if (c > count)
|
||
|
count = c;
|
||
|
}
|
||
|
return count;
|
||
|
}
|
||
|
else if (isPair(node)) {
|
||
|
const kc = getAliasCount(node.key, anchors);
|
||
|
const vc = getAliasCount(node.value, anchors);
|
||
|
return Math.max(kc, vc);
|
||
|
}
|
||
|
return 1;
|
||
|
}
|
||
|
|
||
|
const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
|
||
|
class Scalar extends NodeBase {
|
||
|
constructor(value) {
|
||
|
super(SCALAR$1);
|
||
|
this.value = value;
|
||
|
}
|
||
|
toJSON(arg, ctx) {
|
||
|
return ctx && ctx.keep ? this.value : toJS(this.value, arg, ctx);
|
||
|
}
|
||
|
toString() {
|
||
|
return String(this.value);
|
||
|
}
|
||
|
}
|
||
|
Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
|
||
|
Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
|
||
|
Scalar.PLAIN = 'PLAIN';
|
||
|
Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
|
||
|
Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
|
||
|
|
||
|
const defaultTagPrefix = 'tag:yaml.org,2002:';
|
||
|
function findTagObject(value, tagName, tags) {
|
||
|
if (tagName) {
|
||
|
const match = tags.filter(t => t.tag === tagName);
|
||
|
const tagObj = match.find(t => !t.format) || match[0];
|
||
|
if (!tagObj)
|
||
|
throw new Error(`Tag ${tagName} not found`);
|
||
|
return tagObj;
|
||
|
}
|
||
|
return tags.find(t => t.identify && t.identify(value) && !t.format);
|
||
|
}
|
||
|
function createNode(value, tagName, ctx) {
|
||
|
var _a, _b;
|
||
|
if (isNode(value))
|
||
|
return value;
|
||
|
if (isPair(value)) {
|
||
|
const map = (_b = (_a = ctx.schema.map).createNode) === null || _b === void 0 ? void 0 : _b.call(_a, ctx.schema, null, ctx);
|
||
|
map.items.push(value);
|
||
|
return map;
|
||
|
}
|
||
|
if (value instanceof String ||
|
||
|
value instanceof Number ||
|
||
|
value instanceof Boolean ||
|
||
|
(typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere
|
||
|
) {
|
||
|
// https://tc39.es/ecma262/#sec-serializejsonproperty
|
||
|
value = value.valueOf();
|
||
|
}
|
||
|
const { onAlias, onTagObj, prevObjects } = ctx;
|
||
|
const { map, seq, tags } = ctx.schema;
|
||
|
if (tagName && tagName.startsWith('!!'))
|
||
|
tagName = defaultTagPrefix + tagName.slice(2);
|
||
|
let tagObj = findTagObject(value, tagName, tags);
|
||
|
if (!tagObj) {
|
||
|
if (value && typeof value.toJSON === 'function')
|
||
|
value = value.toJSON();
|
||
|
if (!value || typeof value !== 'object')
|
||
|
return new Scalar(value);
|
||
|
tagObj =
|
||
|
value instanceof Map ? map : Symbol.iterator in Object(value) ? seq : map;
|
||
|
}
|
||
|
if (onTagObj) {
|
||
|
onTagObj(tagObj);
|
||
|
delete ctx.onTagObj;
|
||
|
}
|
||
|
// Detect duplicate references to the same object & use Alias nodes for all
|
||
|
// after first. The `ref` wrapper allows for circular references to resolve.
|
||
|
const ref = { value: undefined, node: undefined };
|
||
|
if (value && typeof value === 'object') {
|
||
|
const prev = prevObjects.get(value);
|
||
|
if (prev)
|
||
|
return onAlias(prev);
|
||
|
ref.value = value;
|
||
|
prevObjects.set(value, ref);
|
||
|
}
|
||
|
const node = (tagObj === null || tagObj === void 0 ? void 0 : tagObj.createNode)
|
||
|
? tagObj.createNode(ctx.schema, value, ctx)
|
||
|
: new Scalar(value);
|
||
|
if (tagName)
|
||
|
node.tag = tagName;
|
||
|
ref.node = node;
|
||
|
return node;
|
||
|
}
|
||
|
|
||
|
function addCommentBefore(str, indent, comment) {
|
||
|
if (!comment)
|
||
|
return str;
|
||
|
const cc = comment.replace(/[\s\S]^/gm, `$&${indent}#`);
|
||
|
return `#${cc}\n${indent}${str}`;
|
||
|
}
|
||
|
function addComment(str, indent, comment) {
|
||
|
return !comment
|
||
|
? str
|
||
|
: comment.includes('\n')
|
||
|
? `${str}\n` + comment.replace(/^/gm, `${indent || ''}#`)
|
||
|
: str.endsWith(' ')
|
||
|
? `${str}#${comment}`
|
||
|
: `${str} #${comment}`;
|
||
|
}
|
||
|
|
||
|
const FOLD_FLOW = 'flow';
|
||
|
const FOLD_BLOCK = 'block';
|
||
|
const FOLD_QUOTED = 'quoted';
|
||
|
/**
|
||
|
* Tries to keep input at up to `lineWidth` characters, splitting only on spaces
|
||
|
* not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
|
||
|
* terminated with `\n` and started with `indent`.
|
||
|
*/
|
||
|
function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
|
||
|
if (!lineWidth || lineWidth < 0)
|
||
|
return text;
|
||
|
const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
|
||
|
if (text.length <= endStep)
|
||
|
return text;
|
||
|
const folds = [];
|
||
|
const escapedFolds = {};
|
||
|
let end = lineWidth - indent.length;
|
||
|
if (typeof indentAtStart === 'number') {
|
||
|
if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
|
||
|
folds.push(0);
|
||
|
else
|
||
|
end = lineWidth - indentAtStart;
|
||
|
}
|
||
|
let split = undefined;
|
||
|
let prev = undefined;
|
||
|
let overflow = false;
|
||
|
let i = -1;
|
||
|
let escStart = -1;
|
||
|
let escEnd = -1;
|
||
|
if (mode === FOLD_BLOCK) {
|
||
|
i = consumeMoreIndentedLines(text, i);
|
||
|
if (i !== -1)
|
||
|
end = i + endStep;
|
||
|
}
|
||
|
for (let ch; (ch = text[(i += 1)]);) {
|
||
|
if (mode === FOLD_QUOTED && ch === '\\') {
|
||
|
escStart = i;
|
||
|
switch (text[i + 1]) {
|
||
|
case 'x':
|
||
|
i += 3;
|
||
|
break;
|
||
|
case 'u':
|
||
|
i += 5;
|
||
|
break;
|
||
|
case 'U':
|
||
|
i += 9;
|
||
|
break;
|
||
|
default:
|
||
|
i += 1;
|
||
|
}
|
||
|
escEnd = i;
|
||
|
}
|
||
|
if (ch === '\n') {
|
||
|
if (mode === FOLD_BLOCK)
|
||
|
i = consumeMoreIndentedLines(text, i);
|
||
|
end = i + endStep;
|
||
|
split = undefined;
|
||
|
}
|
||
|
else {
|
||
|
if (ch === ' ' &&
|
||
|
prev &&
|
||
|
prev !== ' ' &&
|
||
|
prev !== '\n' &&
|
||
|
prev !== '\t') {
|
||
|
// space surrounded by non-space can be replaced with newline + indent
|
||
|
const next = text[i + 1];
|
||
|
if (next && next !== ' ' && next !== '\n' && next !== '\t')
|
||
|
split = i;
|
||
|
}
|
||
|
if (i >= end) {
|
||
|
if (split) {
|
||
|
folds.push(split);
|
||
|
end = split + endStep;
|
||
|
split = undefined;
|
||
|
}
|
||
|
else if (mode === FOLD_QUOTED) {
|
||
|
// white-space collected at end may stretch past lineWidth
|
||
|
while (prev === ' ' || prev === '\t') {
|
||
|
prev = ch;
|
||
|
ch = text[(i += 1)];
|
||
|
overflow = true;
|
||
|
}
|
||
|
// Account for newline escape, but don't break preceding escape
|
||
|
const j = i > escEnd + 1 ? i - 2 : escStart - 1;
|
||
|
// Bail out if lineWidth & minContentWidth are shorter than an escape string
|
||
|
if (escapedFolds[j])
|
||
|
return text;
|
||
|
folds.push(j);
|
||
|
escapedFolds[j] = true;
|
||
|
end = j + endStep;
|
||
|
split = undefined;
|
||
|
}
|
||
|
else {
|
||
|
overflow = true;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
prev = ch;
|
||
|
}
|
||
|
if (overflow && onOverflow)
|
||
|
onOverflow();
|
||
|
if (folds.length === 0)
|
||
|
return text;
|
||
|
if (onFold)
|
||
|
onFold();
|
||
|
let res = text.slice(0, folds[0]);
|
||
|
for (let i = 0; i < folds.length; ++i) {
|
||
|
const fold = folds[i];
|
||
|
const end = folds[i + 1] || text.length;
|
||
|
if (fold === 0)
|
||
|
res = `\n${indent}${text.slice(0, end)}`;
|
||
|
else {
|
||
|
if (mode === FOLD_QUOTED && escapedFolds[fold])
|
||
|
res += `${text[fold]}\\`;
|
||
|
res += `\n${indent}${text.slice(fold + 1, end)}`;
|
||
|
}
|
||
|
}
|
||
|
return res;
|
||
|
}
|
||
|
/**
|
||
|
* Presumes `i + 1` is at the start of a line
|
||
|
* @returns index of last newline in more-indented block
|
||
|
*/
|
||
|
function consumeMoreIndentedLines(text, i) {
|
||
|
let ch = text[i + 1];
|
||
|
while (ch === ' ' || ch === '\t') {
|
||
|
do {
|
||
|
ch = text[(i += 1)];
|
||
|
} while (ch && ch !== '\n');
|
||
|
ch = text[i + 1];
|
||
|
}
|
||
|
return i;
|
||
|
}
|
||
|
|
||
|
const getFoldOptions = (ctx) => ({
|
||
|
indentAtStart: ctx.indentAtStart,
|
||
|
lineWidth: ctx.options.lineWidth,
|
||
|
minContentWidth: ctx.options.minContentWidth
|
||
|
});
|
||
|
// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
|
||
|
// presume that's starting a new document.
|
||
|
const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
|
||
|
function lineLengthOverLimit(str, lineWidth, indentLength) {
|
||
|
if (!lineWidth || lineWidth < 0)
|
||
|
return false;
|
||
|
const limit = lineWidth - indentLength;
|
||
|
const strLen = str.length;
|
||
|
if (strLen <= limit)
|
||
|
return false;
|
||
|
for (let i = 0, start = 0; i < strLen; ++i) {
|
||
|
if (str[i] === '\n') {
|
||
|
if (i - start > limit)
|
||
|
return true;
|
||
|
start = i + 1;
|
||
|
if (strLen - start <= limit)
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
function doubleQuotedString(value, ctx) {
|
||
|
const json = JSON.stringify(value);
|
||
|
if (ctx.options.doubleQuotedAsJSON)
|
||
|
return json;
|
||
|
const { implicitKey } = ctx;
|
||
|
const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
|
||
|
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
|
||
|
let str = '';
|
||
|
let start = 0;
|
||
|
for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
|
||
|
if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
|
||
|
// space before newline needs to be escaped to not be folded
|
||
|
str += json.slice(start, i) + '\\ ';
|
||
|
i += 1;
|
||
|
start = i;
|
||
|
ch = '\\';
|
||
|
}
|
||
|
if (ch === '\\')
|
||
|
switch (json[i + 1]) {
|
||
|
case 'u':
|
||
|
{
|
||
|
str += json.slice(start, i);
|
||
|
const code = json.substr(i + 2, 4);
|
||
|
switch (code) {
|
||
|
case '0000':
|
||
|
str += '\\0';
|
||
|
break;
|
||
|
case '0007':
|
||
|
str += '\\a';
|
||
|
break;
|
||
|
case '000b':
|
||
|
str += '\\v';
|
||
|
break;
|
||
|
case '001b':
|
||
|
str += '\\e';
|
||
|
break;
|
||
|
case '0085':
|
||
|
str += '\\N';
|
||
|
break;
|
||
|
case '00a0':
|
||
|
str += '\\_';
|
||
|
break;
|
||
|
case '2028':
|
||
|
str += '\\L';
|
||
|
break;
|
||
|
case '2029':
|
||
|
str += '\\P';
|
||
|
break;
|
||
|
default:
|
||
|
if (code.substr(0, 2) === '00')
|
||
|
str += '\\x' + code.substr(2);
|
||
|
else
|
||
|
str += json.substr(i, 6);
|
||
|
}
|
||
|
i += 5;
|
||
|
start = i + 1;
|
||
|
}
|
||
|
break;
|
||
|
case 'n':
|
||
|
if (implicitKey ||
|
||
|
json[i + 2] === '"' ||
|
||
|
json.length < minMultiLineLength) {
|
||
|
i += 1;
|
||
|
}
|
||
|
else {
|
||
|
// folding will eat first newline
|
||
|
str += json.slice(start, i) + '\n\n';
|
||
|
while (json[i + 2] === '\\' &&
|
||
|
json[i + 3] === 'n' &&
|
||
|
json[i + 4] !== '"') {
|
||
|
str += '\n';
|
||
|
i += 2;
|
||
|
}
|
||
|
str += indent;
|
||
|
// space after newline needs to be escaped to not be folded
|
||
|
if (json[i + 2] === ' ')
|
||
|
str += '\\';
|
||
|
i += 1;
|
||
|
start = i + 1;
|
||
|
}
|
||
|
break;
|
||
|
default:
|
||
|
i += 1;
|
||
|
}
|
||
|
}
|
||
|
str = start ? str + json.slice(start) : json;
|
||
|
return implicitKey
|
||
|
? str
|
||
|
: foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx));
|
||
|
}
|
||
|
function singleQuotedString(value, ctx) {
|
||
|
if (ctx.implicitKey) {
|
||
|
if (/\n/.test(value))
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
else {
|
||
|
// single quoted string can't have leading or trailing whitespace around newline
|
||
|
if (/[ \t]\n|\n[ \t]/.test(value))
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
|
||
|
const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
|
||
|
return ctx.implicitKey
|
||
|
? res
|
||
|
: foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx));
|
||
|
}
|
||
|
function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
|
||
|
// 1. Block can't end in whitespace unless the last line is non-empty.
|
||
|
// 2. Strings consisting of only whitespace are best rendered explicitly.
|
||
|
if (/\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
const indent = ctx.indent ||
|
||
|
(ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
|
||
|
const indentSize = indent ? '2' : '1'; // root is at -1
|
||
|
const literal = type === Scalar.BLOCK_FOLDED
|
||
|
? false
|
||
|
: type === Scalar.BLOCK_LITERAL
|
||
|
? true
|
||
|
: !lineLengthOverLimit(value, ctx.options.lineWidth, indent.length);
|
||
|
let header = literal ? '|' : '>';
|
||
|
if (!value)
|
||
|
return header + '\n';
|
||
|
let wsStart = '';
|
||
|
let wsEnd = '';
|
||
|
value = value
|
||
|
.replace(/[\n\t ]*$/, ws => {
|
||
|
const n = ws.indexOf('\n');
|
||
|
if (n === -1) {
|
||
|
header += '-'; // strip
|
||
|
}
|
||
|
else if (value === ws || n !== ws.length - 1) {
|
||
|
header += '+'; // keep
|
||
|
if (onChompKeep)
|
||
|
onChompKeep();
|
||
|
}
|
||
|
wsEnd = ws.replace(/\n$/, '');
|
||
|
return '';
|
||
|
})
|
||
|
.replace(/^[\n ]*/, ws => {
|
||
|
if (ws.indexOf(' ') !== -1)
|
||
|
header += indentSize;
|
||
|
const m = ws.match(/ +$/);
|
||
|
if (m) {
|
||
|
wsStart = ws.slice(0, -m[0].length);
|
||
|
return m[0];
|
||
|
}
|
||
|
else {
|
||
|
wsStart = ws;
|
||
|
return '';
|
||
|
}
|
||
|
});
|
||
|
if (wsEnd)
|
||
|
wsEnd = wsEnd.replace(/\n+(?!\n|$)/g, `$&${indent}`);
|
||
|
if (wsStart)
|
||
|
wsStart = wsStart.replace(/\n+/g, `$&${indent}`);
|
||
|
if (comment) {
|
||
|
header += ' #' + comment.replace(/ ?[\r\n]+/g, ' ');
|
||
|
if (onComment)
|
||
|
onComment();
|
||
|
}
|
||
|
if (!value)
|
||
|
return `${header}${indentSize}\n${indent}${wsEnd}`;
|
||
|
if (literal) {
|
||
|
value = value.replace(/\n+/g, `$&${indent}`);
|
||
|
return `${header}\n${indent}${wsStart}${value}${wsEnd}`;
|
||
|
}
|
||
|
value = value
|
||
|
.replace(/\n+/g, '\n$&')
|
||
|
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
|
||
|
// ^ ind.line ^ empty ^ capture next empty lines only at end of indent
|
||
|
.replace(/\n+/g, `$&${indent}`);
|
||
|
const body = foldFlowLines(`${wsStart}${value}${wsEnd}`, indent, FOLD_BLOCK, getFoldOptions(ctx));
|
||
|
return `${header}\n${indent}${body}`;
|
||
|
}
|
||
|
function plainString(item, ctx, onComment, onChompKeep) {
|
||
|
var _a;
|
||
|
const { comment, type, value } = item;
|
||
|
const { actualString, implicitKey, indent, inFlow } = ctx;
|
||
|
if ((implicitKey && /[\n[\]{},]/.test(value)) ||
|
||
|
(inFlow && /[[\]{},]/.test(value))) {
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
if (!value ||
|
||
|
/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
|
||
|
const hasDouble = value.indexOf('"') !== -1;
|
||
|
const hasSingle = value.indexOf("'") !== -1;
|
||
|
let quotedString;
|
||
|
if (hasDouble && !hasSingle) {
|
||
|
quotedString = singleQuotedString;
|
||
|
}
|
||
|
else if (hasSingle && !hasDouble) {
|
||
|
quotedString = doubleQuotedString;
|
||
|
}
|
||
|
else if (ctx.options.singleQuote) {
|
||
|
quotedString = singleQuotedString;
|
||
|
}
|
||
|
else {
|
||
|
quotedString = doubleQuotedString;
|
||
|
}
|
||
|
// not allowed:
|
||
|
// - empty string, '-' or '?'
|
||
|
// - start with an indicator character (except [?:-]) or /[?-] /
|
||
|
// - '\n ', ': ' or ' \n' anywhere
|
||
|
// - '#' not preceded by a non-space char
|
||
|
// - end with ' ' or ':'
|
||
|
return implicitKey || inFlow || value.indexOf('\n') === -1
|
||
|
? quotedString(value, ctx)
|
||
|
: blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
if (!implicitKey &&
|
||
|
!inFlow &&
|
||
|
type !== Scalar.PLAIN &&
|
||
|
value.indexOf('\n') !== -1) {
|
||
|
// Where allowed & type not set explicitly, prefer block style for multiline strings
|
||
|
return blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
if (indent === '' && containsDocumentMarker(value)) {
|
||
|
ctx.forceBlockIndent = true;
|
||
|
return blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
const str = value.replace(/\n+/g, `$&\n${indent}`);
|
||
|
// Verify that output will be parsed as a string, as e.g. plain numbers and
|
||
|
// booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
|
||
|
// and others in v1.1.
|
||
|
if (actualString) {
|
||
|
for (const tag of ctx.doc.schema.tags) {
|
||
|
if (tag.default &&
|
||
|
tag.tag !== 'tag:yaml.org,2002:str' &&
|
||
|
((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(str)))
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
}
|
||
|
const body = implicitKey
|
||
|
? str
|
||
|
: foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx));
|
||
|
if (comment &&
|
||
|
!inFlow &&
|
||
|
(body.indexOf('\n') !== -1 || comment.indexOf('\n') !== -1)) {
|
||
|
if (onComment)
|
||
|
onComment();
|
||
|
return addCommentBefore(body, indent, comment);
|
||
|
}
|
||
|
return body;
|
||
|
}
|
||
|
function stringifyString(item, ctx, onComment, onChompKeep) {
|
||
|
const { implicitKey, inFlow } = ctx;
|
||
|
const ss = typeof item.value === 'string'
|
||
|
? item
|
||
|
: Object.assign({}, item, { value: String(item.value) });
|
||
|
let { type } = item;
|
||
|
if (type !== Scalar.QUOTE_DOUBLE) {
|
||
|
// force double quotes on control characters & unpaired surrogates
|
||
|
if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
|
||
|
type = Scalar.QUOTE_DOUBLE;
|
||
|
}
|
||
|
const _stringify = (_type) => {
|
||
|
switch (_type) {
|
||
|
case Scalar.BLOCK_FOLDED:
|
||
|
case Scalar.BLOCK_LITERAL:
|
||
|
return implicitKey || inFlow
|
||
|
? doubleQuotedString(ss.value, ctx) // blocks are not valid inside flow containers
|
||
|
: blockString(ss, ctx, onComment, onChompKeep);
|
||
|
case Scalar.QUOTE_DOUBLE:
|
||
|
return doubleQuotedString(ss.value, ctx);
|
||
|
case Scalar.QUOTE_SINGLE:
|
||
|
return singleQuotedString(ss.value, ctx);
|
||
|
case Scalar.PLAIN:
|
||
|
return plainString(ss, ctx, onComment, onChompKeep);
|
||
|
default:
|
||
|
return null;
|
||
|
}
|
||
|
};
|
||
|
let res = _stringify(type);
|
||
|
if (res === null) {
|
||
|
const { defaultKeyType, defaultStringType } = ctx.options;
|
||
|
const t = (implicitKey && defaultKeyType) || defaultStringType;
|
||
|
res = _stringify(t);
|
||
|
if (res === null)
|
||
|
throw new Error(`Unsupported default string type ${t}`);
|
||
|
}
|
||
|
return res;
|
||
|
}
|
||
|
|
||
|
const createStringifyContext = (doc, options) => ({
|
||
|
anchors: Object.create(null),
|
||
|
doc,
|
||
|
indent: '',
|
||
|
indentStep: typeof options.indent === 'number' ? ' '.repeat(options.indent) : ' ',
|
||
|
options: Object.assign({
|
||
|
defaultKeyType: null,
|
||
|
defaultStringType: 'PLAIN',
|
||
|
directives: null,
|
||
|
doubleQuotedAsJSON: false,
|
||
|
doubleQuotedMinMultiLineLength: 40,
|
||
|
falseStr: 'false',
|
||
|
indentSeq: true,
|
||
|
lineWidth: 80,
|
||
|
minContentWidth: 20,
|
||
|
nullStr: 'null',
|
||
|
simpleKeys: false,
|
||
|
singleQuote: false,
|
||
|
trueStr: 'true'
|
||
|
}, options)
|
||
|
});
|
||
|
function getTagObject(tags, item) {
|
||
|
if (item.tag) {
|
||
|
const match = tags.filter(t => t.tag === item.tag);
|
||
|
if (match.length > 0)
|
||
|
return match.find(t => t.format === item.format) || match[0];
|
||
|
}
|
||
|
let tagObj = undefined;
|
||
|
let obj;
|
||
|
if (isScalar(item)) {
|
||
|
obj = item.value;
|
||
|
const match = tags.filter(t => t.identify && t.identify(obj));
|
||
|
tagObj =
|
||
|
match.find(t => t.format === item.format) || match.find(t => !t.format);
|
||
|
}
|
||
|
else {
|
||
|
obj = item;
|
||
|
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
|
||
|
}
|
||
|
if (!tagObj) {
|
||
|
// @ts-ignore
|
||
|
const name = obj && obj.constructor ? obj.constructor.name : typeof obj;
|
||
|
throw new Error(`Tag not resolved for ${name} value`);
|
||
|
}
|
||
|
return tagObj;
|
||
|
}
|
||
|
// needs to be called before value stringifier to allow for circular anchor refs
|
||
|
function stringifyProps(node, tagObj, { anchors, doc }) {
|
||
|
const props = [];
|
||
|
const anchor = doc.anchors.getName(node);
|
||
|
if (anchor) {
|
||
|
anchors[anchor] = node;
|
||
|
props.push(`&${anchor}`);
|
||
|
}
|
||
|
if (node.tag) {
|
||
|
props.push(doc.directives.tagString(node.tag));
|
||
|
}
|
||
|
else if (!tagObj.default) {
|
||
|
props.push(doc.directives.tagString(tagObj.tag));
|
||
|
}
|
||
|
return props.join(' ');
|
||
|
}
|
||
|
function stringify$1(item, ctx, onComment, onChompKeep) {
|
||
|
if (isPair(item))
|
||
|
return item.toString(ctx, onComment, onChompKeep);
|
||
|
if (isAlias(item))
|
||
|
return item.toString(ctx);
|
||
|
let tagObj = undefined;
|
||
|
const node = isNode(item)
|
||
|
? item
|
||
|
: ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
|
||
|
if (!tagObj)
|
||
|
tagObj = getTagObject(ctx.doc.schema.tags, node);
|
||
|
const props = stringifyProps(node, tagObj, ctx);
|
||
|
if (props.length > 0)
|
||
|
ctx.indentAtStart = (ctx.indentAtStart || 0) + props.length + 1;
|
||
|
const str = typeof tagObj.stringify === 'function'
|
||
|
? tagObj.stringify(node, ctx, onComment, onChompKeep)
|
||
|
: isScalar(node)
|
||
|
? stringifyString(node, ctx, onComment, onChompKeep)
|
||
|
: node.toString(ctx, onComment, onChompKeep);
|
||
|
if (!props)
|
||
|
return str;
|
||
|
return isScalar(node) || str[0] === '{' || str[0] === '['
|
||
|
? `${props} ${str}`
|
||
|
: `${props}\n${ctx.indent}${str}`;
|
||
|
}
|
||
|
|
||
|
function collectionFromPath(schema, path, value) {
|
||
|
let v = value;
|
||
|
for (let i = path.length - 1; i >= 0; --i) {
|
||
|
const k = path[i];
|
||
|
if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
|
||
|
const a = [];
|
||
|
a[k] = v;
|
||
|
v = a;
|
||
|
}
|
||
|
else {
|
||
|
const o = {};
|
||
|
Object.defineProperty(o, typeof k === 'symbol' ? k : String(k), {
|
||
|
value: v,
|
||
|
writable: true,
|
||
|
enumerable: true,
|
||
|
configurable: true
|
||
|
});
|
||
|
v = o;
|
||
|
}
|
||
|
}
|
||
|
return createNode(v, undefined, {
|
||
|
onAlias() {
|
||
|
throw new Error('Repeated objects are not supported here');
|
||
|
},
|
||
|
prevObjects: new Map(),
|
||
|
schema
|
||
|
});
|
||
|
}
|
||
|
// null, undefined, or an empty non-string iterable (e.g. [])
|
||
|
const isEmptyPath = (path) => path == null ||
|
||
|
(typeof path === 'object' && !!path[Symbol.iterator]().next().done);
|
||
|
class Collection extends NodeBase {
|
||
|
constructor(type, schema) {
|
||
|
super(type);
|
||
|
Object.defineProperty(this, 'schema', {
|
||
|
value: schema,
|
||
|
configurable: true,
|
||
|
enumerable: false,
|
||
|
writable: true
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Adds a value to the collection. For `!!map` and `!!omap` the value must
|
||
|
* be a Pair instance or a `{ key, value }` object, which may not have a key
|
||
|
* that already exists in the map.
|
||
|
*/
|
||
|
addIn(path, value) {
|
||
|
if (isEmptyPath(path))
|
||
|
this.add(value);
|
||
|
else {
|
||
|
const [key, ...rest] = path;
|
||
|
const node = this.get(key, true);
|
||
|
if (isCollection(node))
|
||
|
node.addIn(rest, value);
|
||
|
else if (node === undefined && this.schema)
|
||
|
this.set(key, collectionFromPath(this.schema, rest, value));
|
||
|
else
|
||
|
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Removes a value from the collection.
|
||
|
* @returns `true` if the item was found and removed.
|
||
|
*/
|
||
|
deleteIn([key, ...rest]) {
|
||
|
if (rest.length === 0)
|
||
|
return this.delete(key);
|
||
|
const node = this.get(key, true);
|
||
|
if (isCollection(node))
|
||
|
return node.deleteIn(rest);
|
||
|
else
|
||
|
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||
|
}
|
||
|
/**
|
||
|
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||
|
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||
|
* `true` (collections are always returned intact).
|
||
|
*/
|
||
|
getIn([key, ...rest], keepScalar) {
|
||
|
const node = this.get(key, true);
|
||
|
if (rest.length === 0)
|
||
|
return !keepScalar && isScalar(node) ? node.value : node;
|
||
|
else
|
||
|
return isCollection(node) ? node.getIn(rest, keepScalar) : undefined;
|
||
|
}
|
||
|
hasAllNullValues(allowScalar) {
|
||
|
return this.items.every(node => {
|
||
|
if (!node || isNode(node))
|
||
|
return false;
|
||
|
const n = node.value;
|
||
|
return (n == null ||
|
||
|
(allowScalar &&
|
||
|
isScalar(n) &&
|
||
|
n.value == null &&
|
||
|
!n.commentBefore &&
|
||
|
!n.comment &&
|
||
|
!n.tag));
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Checks if the collection includes a value with the key `key`.
|
||
|
*/
|
||
|
hasIn([key, ...rest]) {
|
||
|
if (rest.length === 0)
|
||
|
return this.has(key);
|
||
|
const node = this.get(key, true);
|
||
|
return isCollection(node) ? node.hasIn(rest) : false;
|
||
|
}
|
||
|
/**
|
||
|
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||
|
* boolean to add/remove the item from the set.
|
||
|
*/
|
||
|
setIn([key, ...rest], value) {
|
||
|
if (rest.length === 0) {
|
||
|
this.set(key, value);
|
||
|
}
|
||
|
else {
|
||
|
const node = this.get(key, true);
|
||
|
if (isCollection(node))
|
||
|
node.setIn(rest, value);
|
||
|
else if (node === undefined && this.schema)
|
||
|
this.set(key, collectionFromPath(this.schema, rest, value));
|
||
|
else
|
||
|
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||
|
}
|
||
|
}
|
||
|
_toString(ctx, { blockItem, flowChars, itemIndent }, onComment, onChompKeep) {
|
||
|
const { indent, indentStep } = ctx;
|
||
|
const inFlow = this.flow || ctx.inFlow;
|
||
|
if (inFlow)
|
||
|
itemIndent += indentStep;
|
||
|
ctx = Object.assign({}, ctx, { indent: itemIndent, inFlow, type: null });
|
||
|
let chompKeep = false;
|
||
|
let hasItemWithNewLine = false;
|
||
|
const nodes = this.items.reduce((nodes, item, i) => {
|
||
|
let comment = null;
|
||
|
if (isNode(item) || isPair(item)) {
|
||
|
if (!chompKeep && item.spaceBefore)
|
||
|
nodes.push({ type: 'comment', str: '' });
|
||
|
if (item.commentBefore) {
|
||
|
// This match will always succeed on a non-empty string
|
||
|
for (const line of item.commentBefore.match(/^.*$/gm))
|
||
|
nodes.push({ type: 'comment', str: `#${line}` });
|
||
|
}
|
||
|
if (item.comment)
|
||
|
comment = item.comment;
|
||
|
const pair = item; // Apply guards manually in the following
|
||
|
if (inFlow &&
|
||
|
((!chompKeep && item.spaceBefore) ||
|
||
|
item.commentBefore ||
|
||
|
item.comment ||
|
||
|
(pair.key && (pair.key.commentBefore || pair.key.comment)) ||
|
||
|
(pair.value && (pair.value.commentBefore || pair.value.comment))))
|
||
|
hasItemWithNewLine = true;
|
||
|
}
|
||
|
chompKeep = false;
|
||
|
let str = stringify$1(item, ctx, () => (comment = null), () => (chompKeep = true));
|
||
|
if (inFlow && !hasItemWithNewLine && str.includes('\n'))
|
||
|
hasItemWithNewLine = true;
|
||
|
if (inFlow && i < this.items.length - 1)
|
||
|
str += ',';
|
||
|
str = addComment(str, itemIndent, comment);
|
||
|
if (chompKeep && (comment || inFlow))
|
||
|
chompKeep = false;
|
||
|
nodes.push({ type: 'item', str });
|
||
|
return nodes;
|
||
|
}, []);
|
||
|
let str;
|
||
|
if (nodes.length === 0) {
|
||
|
str = flowChars.start + flowChars.end;
|
||
|
}
|
||
|
else if (inFlow) {
|
||
|
const { start, end } = flowChars;
|
||
|
const strings = nodes.map(n => n.str);
|
||
|
if (hasItemWithNewLine ||
|
||
|
strings.reduce((sum, str) => sum + str.length + 2, 2) >
|
||
|
Collection.maxFlowStringSingleLineLength) {
|
||
|
str = start;
|
||
|
for (const s of strings) {
|
||
|
str += s ? `\n${indentStep}${indent}${s}` : '\n';
|
||
|
}
|
||
|
str += `\n${indent}${end}`;
|
||
|
}
|
||
|
else {
|
||
|
str = `${start} ${strings.join(' ')} ${end}`;
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
const strings = nodes.map(blockItem);
|
||
|
str = strings.shift() || '';
|
||
|
for (const s of strings)
|
||
|
str += s ? `\n${indent}${s}` : '\n';
|
||
|
}
|
||
|
if (this.comment) {
|
||
|
str += '\n' + this.comment.replace(/^/gm, `${indent}#`);
|
||
|
if (onComment)
|
||
|
onComment();
|
||
|
}
|
||
|
else if (chompKeep && onChompKeep)
|
||
|
onChompKeep();
|
||
|
return str;
|
||
|
}
|
||
|
}
|
||
|
Collection.maxFlowStringSingleLineLength = 60;
|
||
|
|
||
|
function warn(logLevel, warning) {
|
||
|
if (logLevel === 'debug' || logLevel === 'warn') {
|
||
|
if (typeof process !== 'undefined' && process.emitWarning)
|
||
|
process.emitWarning(warning);
|
||
|
else
|
||
|
console.warn(warning);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function createPair(key, value, ctx) {
|
||
|
const k = createNode(key, undefined, ctx);
|
||
|
const v = createNode(value, undefined, ctx);
|
||
|
return new Pair(k, v);
|
||
|
}
|
||
|
const isMergeKey = (key) => key === Pair.MERGE_KEY ||
|
||
|
(isScalar(key) &&
|
||
|
key.value === Pair.MERGE_KEY &&
|
||
|
(!key.type || key.type === Scalar.PLAIN));
|
||
|
// If the value associated with a merge key is a single mapping node, each of
|
||
|
// its key/value pairs is inserted into the current mapping, unless the key
|
||
|
// already exists in it. If the value associated with the merge key is a
|
||
|
// sequence, then this sequence is expected to contain mapping nodes and each
|
||
|
// of these nodes is merged in turn according to its order in the sequence.
|
||
|
// Keys in mapping nodes earlier in the sequence override keys specified in
|
||
|
// later mapping nodes. -- http://yaml.org/type/merge.html
|
||
|
function mergeToJSMap(ctx, map, value) {
|
||
|
if (!isAlias(value) || !isMap(value.source))
|
||
|
throw new Error('Merge sources must be map aliases');
|
||
|
const srcMap = value.source.toJSON(null, ctx, Map);
|
||
|
for (const [key, value] of srcMap) {
|
||
|
if (map instanceof Map) {
|
||
|
if (!map.has(key))
|
||
|
map.set(key, value);
|
||
|
}
|
||
|
else if (map instanceof Set) {
|
||
|
map.add(key);
|
||
|
}
|
||
|
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
|
||
|
Object.defineProperty(map, key, {
|
||
|
value,
|
||
|
writable: true,
|
||
|
enumerable: true,
|
||
|
configurable: true
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
return map;
|
||
|
}
|
||
|
class Pair extends NodeBase {
|
||
|
constructor(key, value = null) {
|
||
|
super(PAIR);
|
||
|
this.key = key;
|
||
|
this.value = value;
|
||
|
}
|
||
|
// @ts-ignore This is fine.
|
||
|
get commentBefore() {
|
||
|
return isNode(this.key) ? this.key.commentBefore : undefined;
|
||
|
}
|
||
|
set commentBefore(cb) {
|
||
|
if (this.key == null)
|
||
|
this.key = new Scalar(null); // FIXME
|
||
|
if (isNode(this.key))
|
||
|
this.key.commentBefore = cb;
|
||
|
else {
|
||
|
const msg = 'Pair.commentBefore is an alias for Pair.key.commentBefore. To set it, the key must be a Node.';
|
||
|
throw new Error(msg);
|
||
|
}
|
||
|
}
|
||
|
// @ts-ignore This is fine.
|
||
|
get spaceBefore() {
|
||
|
return isNode(this.key) ? this.key.spaceBefore : undefined;
|
||
|
}
|
||
|
set spaceBefore(sb) {
|
||
|
if (this.key == null)
|
||
|
this.key = new Scalar(null); // FIXME
|
||
|
if (isNode(this.key))
|
||
|
this.key.spaceBefore = sb;
|
||
|
else {
|
||
|
const msg = 'Pair.spaceBefore is an alias for Pair.key.spaceBefore. To set it, the key must be a Node.';
|
||
|
throw new Error(msg);
|
||
|
}
|
||
|
}
|
||
|
addToJSMap(ctx, map) {
|
||
|
if (ctx && ctx.doc.schema.merge && isMergeKey(this.key)) {
|
||
|
if (isSeq(this.value))
|
||
|
for (const it of this.value.items)
|
||
|
mergeToJSMap(ctx, map, it);
|
||
|
else if (Array.isArray(this.value))
|
||
|
for (const it of this.value)
|
||
|
mergeToJSMap(ctx, map, it);
|
||
|
else
|
||
|
mergeToJSMap(ctx, map, this.value);
|
||
|
}
|
||
|
else {
|
||
|
const key = toJS(this.key, '', ctx);
|
||
|
if (map instanceof Map) {
|
||
|
const value = toJS(this.value, key, ctx);
|
||
|
map.set(key, value);
|
||
|
}
|
||
|
else if (map instanceof Set) {
|
||
|
map.add(key);
|
||
|
}
|
||
|
else {
|
||
|
const stringKey = stringifyKey(this.key, key, ctx);
|
||
|
const value = toJS(this.value, stringKey, ctx);
|
||
|
if (stringKey in map)
|
||
|
Object.defineProperty(map, stringKey, {
|
||
|
value,
|
||
|
writable: true,
|
||
|
enumerable: true,
|
||
|
configurable: true
|
||
|
});
|
||
|
else
|
||
|
map[stringKey] = value;
|
||
|
}
|
||
|
}
|
||
|
return map;
|
||
|
}
|
||
|
toJSON(_, ctx) {
|
||
|
const pair = ctx && ctx.mapAsMap ? new Map() : {};
|
||
|
return this.addToJSMap(ctx, pair);
|
||
|
}
|
||
|
toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx || !ctx.doc)
|
||
|
return JSON.stringify(this);
|
||
|
const { allNullValues, doc, indent, indentStep, options: { indentSeq, simpleKeys } } = ctx;
|
||
|
let { key, value } = this;
|
||
|
let keyComment = (isNode(key) && key.comment) || null;
|
||
|
if (simpleKeys) {
|
||
|
if (keyComment) {
|
||
|
throw new Error('With simple keys, key nodes cannot have comments');
|
||
|
}
|
||
|
if (isCollection(key)) {
|
||
|
const msg = 'With simple keys, collection cannot be used as a key value';
|
||
|
throw new Error(msg);
|
||
|
}
|
||
|
}
|
||
|
let explicitKey = !simpleKeys &&
|
||
|
(!key ||
|
||
|
(keyComment && value == null) ||
|
||
|
isCollection(key) ||
|
||
|
(isScalar(key)
|
||
|
? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL
|
||
|
: typeof key === 'object'));
|
||
|
ctx = Object.assign({}, ctx, {
|
||
|
allNullValues: false,
|
||
|
implicitKey: !explicitKey && (simpleKeys || !allNullValues),
|
||
|
indent: indent + indentStep
|
||
|
});
|
||
|
let chompKeep = false;
|
||
|
let str = stringify$1(key, ctx, () => (keyComment = null), () => (chompKeep = true));
|
||
|
if (!explicitKey && !ctx.inFlow && str.length > 1024) {
|
||
|
if (simpleKeys)
|
||
|
throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
|
||
|
explicitKey = true;
|
||
|
}
|
||
|
if ((allNullValues && (!simpleKeys || ctx.inFlow)) ||
|
||
|
(value == null && (explicitKey || ctx.inFlow))) {
|
||
|
str = addComment(str, ctx.indent, keyComment);
|
||
|
if (this.comment) {
|
||
|
if (keyComment && !this.comment.includes('\n'))
|
||
|
str += `\n${ctx.indent || ''}#${this.comment}`;
|
||
|
else
|
||
|
str = addComment(str, ctx.indent, this.comment);
|
||
|
if (onComment)
|
||
|
onComment();
|
||
|
}
|
||
|
else if (chompKeep && !keyComment && onChompKeep)
|
||
|
onChompKeep();
|
||
|
return ctx.inFlow && !explicitKey ? str : `? ${str}`;
|
||
|
}
|
||
|
str = explicitKey
|
||
|
? `? ${addComment(str, ctx.indent, keyComment)}\n${indent}:`
|
||
|
: addComment(`${str}:`, ctx.indent, keyComment);
|
||
|
if (this.comment) {
|
||
|
if (keyComment && !explicitKey && !this.comment.includes('\n'))
|
||
|
str += `\n${ctx.indent || ''}#${this.comment}`;
|
||
|
else
|
||
|
str = addComment(str, ctx.indent, this.comment);
|
||
|
if (onComment)
|
||
|
onComment();
|
||
|
}
|
||
|
let vcb = '';
|
||
|
let valueComment = null;
|
||
|
if (isNode(value)) {
|
||
|
if (value.spaceBefore)
|
||
|
vcb = '\n';
|
||
|
if (value.commentBefore) {
|
||
|
const cs = value.commentBefore.replace(/^/gm, `${ctx.indent}#`);
|
||
|
vcb += `\n${cs}`;
|
||
|
}
|
||
|
valueComment = value.comment;
|
||
|
}
|
||
|
else if (value && typeof value === 'object') {
|
||
|
value = doc.createNode(value);
|
||
|
}
|
||
|
ctx.implicitKey = false;
|
||
|
if (!explicitKey && !keyComment && !this.comment && isScalar(value))
|
||
|
ctx.indentAtStart = str.length + 1;
|
||
|
chompKeep = false;
|
||
|
if (!indentSeq &&
|
||
|
indentStep.length >= 2 &&
|
||
|
!ctx.inFlow &&
|
||
|
!explicitKey &&
|
||
|
isSeq(value) &&
|
||
|
!value.flow &&
|
||
|
!value.tag &&
|
||
|
!doc.anchors.getName(value)) {
|
||
|
// If indentSeq === false, consider '- ' as part of indentation where possible
|
||
|
ctx.indent = ctx.indent.substr(2);
|
||
|
}
|
||
|
const valueStr = stringify$1(value, ctx, () => (valueComment = null), () => (chompKeep = true));
|
||
|
let ws = ' ';
|
||
|
if (vcb || keyComment || this.comment) {
|
||
|
ws = `${vcb}\n${ctx.indent}`;
|
||
|
}
|
||
|
else if (!explicitKey && isCollection(value)) {
|
||
|
const flow = valueStr[0] === '[' || valueStr[0] === '{';
|
||
|
if (!flow || valueStr.includes('\n'))
|
||
|
ws = `\n${ctx.indent}`;
|
||
|
}
|
||
|
else if (valueStr[0] === '\n')
|
||
|
ws = '';
|
||
|
if (chompKeep && !valueComment && onChompKeep)
|
||
|
onChompKeep();
|
||
|
return addComment(str + ws + valueStr, ctx.indent, valueComment);
|
||
|
}
|
||
|
}
|
||
|
Pair.MERGE_KEY = '<<';
|
||
|
function stringifyKey(key, jsKey, ctx) {
|
||
|
if (jsKey === null)
|
||
|
return '';
|
||
|
if (typeof jsKey !== 'object')
|
||
|
return String(jsKey);
|
||
|
if (isNode(key) && ctx && ctx.doc) {
|
||
|
const strCtx = createStringifyContext(ctx.doc, {});
|
||
|
strCtx.inFlow = true;
|
||
|
strCtx.inStringifyKey = true;
|
||
|
const strKey = key.toString(strCtx);
|
||
|
if (!ctx.mapKeyWarned) {
|
||
|
let jsonStr = JSON.stringify(strKey);
|
||
|
if (jsonStr.length > 40)
|
||
|
jsonStr = jsonStr.substring(0, 36) + '..."';
|
||
|
warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
|
||
|
ctx.mapKeyWarned = true;
|
||
|
}
|
||
|
return strKey;
|
||
|
}
|
||
|
return JSON.stringify(jsKey);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* `yaml` defines document-specific options in three places: as an argument of
|
||
|
* parse, create and stringify calls, in the values of `YAML.defaultOptions`,
|
||
|
* and in the version-dependent `YAML.Document.defaults` object. Values set in
|
||
|
* `YAML.defaultOptions` override version-dependent defaults, and argument
|
||
|
* options override both.
|
||
|
*/
|
||
|
const defaultOptions = {
|
||
|
anchorPrefix: 'a',
|
||
|
intAsBigInt: false,
|
||
|
keepUndefined: false,
|
||
|
logLevel: 'warn',
|
||
|
prettyErrors: true,
|
||
|
strict: true,
|
||
|
version: '1.2'
|
||
|
};
|
||
|
|
||
|
class YAMLSeq extends Collection {
|
||
|
constructor(schema) {
|
||
|
super(SEQ, schema);
|
||
|
this.items = [];
|
||
|
}
|
||
|
static get tagName() {
|
||
|
return 'tag:yaml.org,2002:seq';
|
||
|
}
|
||
|
add(value) {
|
||
|
this.items.push(value);
|
||
|
}
|
||
|
/**
|
||
|
* Removes a value from the collection.
|
||
|
*
|
||
|
* `key` must contain a representation of an integer for this to succeed.
|
||
|
* It may be wrapped in a `Scalar`.
|
||
|
*
|
||
|
* @returns `true` if the item was found and removed.
|
||
|
*/
|
||
|
delete(key) {
|
||
|
const idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number')
|
||
|
return false;
|
||
|
const del = this.items.splice(idx, 1);
|
||
|
return del.length > 0;
|
||
|
}
|
||
|
/**
|
||
|
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||
|
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||
|
* `true` (collections are always returned intact).
|
||
|
*
|
||
|
* `key` must contain a representation of an integer for this to succeed.
|
||
|
* It may be wrapped in a `Scalar`.
|
||
|
*/
|
||
|
get(key, keepScalar) {
|
||
|
const idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number')
|
||
|
return undefined;
|
||
|
const it = this.items[idx];
|
||
|
return !keepScalar && isScalar(it) ? it.value : it;
|
||
|
}
|
||
|
/**
|
||
|
* Checks if the collection includes a value with the key `key`.
|
||
|
*
|
||
|
* `key` must contain a representation of an integer for this to succeed.
|
||
|
* It may be wrapped in a `Scalar`.
|
||
|
*/
|
||
|
has(key) {
|
||
|
const idx = asItemIndex(key);
|
||
|
return typeof idx === 'number' && idx < this.items.length;
|
||
|
}
|
||
|
/**
|
||
|
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||
|
* boolean to add/remove the item from the set.
|
||
|
*
|
||
|
* If `key` does not contain a representation of an integer, this will throw.
|
||
|
* It may be wrapped in a `Scalar`.
|
||
|
*/
|
||
|
set(key, value) {
|
||
|
const idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number')
|
||
|
throw new Error(`Expected a valid index, not ${key}.`);
|
||
|
const prev = this.items[idx];
|
||
|
if (isScalar(prev) && isScalarValue(value))
|
||
|
prev.value = value;
|
||
|
else
|
||
|
this.items[idx] = value;
|
||
|
}
|
||
|
toJSON(_, ctx) {
|
||
|
const seq = [];
|
||
|
if (ctx && ctx.onCreate)
|
||
|
ctx.onCreate(seq);
|
||
|
let i = 0;
|
||
|
for (const item of this.items)
|
||
|
seq.push(toJS(item, String(i++), ctx));
|
||
|
return seq;
|
||
|
}
|
||
|
toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx)
|
||
|
return JSON.stringify(this);
|
||
|
return super._toString(ctx, {
|
||
|
blockItem: n => (n.type === 'comment' ? n.str : `- ${n.str}`),
|
||
|
flowChars: { start: '[', end: ']' },
|
||
|
itemIndent: (ctx.indent || '') + ' '
|
||
|
}, onComment, onChompKeep);
|
||
|
}
|
||
|
}
|
||
|
function asItemIndex(key) {
|
||
|
let idx = isScalar(key) ? key.value : key;
|
||
|
if (idx && typeof idx === 'string')
|
||
|
idx = Number(idx);
|
||
|
return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
|
||
|
? idx
|
||
|
: null;
|
||
|
}
|
||
|
|
||
|
class Anchors {
|
||
|
constructor(prefix) {
|
||
|
this.map = Object.create(null);
|
||
|
this.prefix = prefix;
|
||
|
}
|
||
|
/**
|
||
|
* Create a new `Alias` node, adding the required anchor for `node`.
|
||
|
* If `name` is empty, a new anchor name will be generated.
|
||
|
*/
|
||
|
createAlias(node, name) {
|
||
|
this.setAnchor(node, name);
|
||
|
return new Alias(node);
|
||
|
}
|
||
|
/**
|
||
|
* Create a new merge `Pair` with the given source nodes.
|
||
|
* Non-`Alias` sources will be automatically wrapped.
|
||
|
*/
|
||
|
createMergePair(...sources) {
|
||
|
const key = new Scalar(Pair.MERGE_KEY);
|
||
|
const items = sources.map(s => {
|
||
|
if (isAlias(s)) {
|
||
|
if (isMap(s.source))
|
||
|
return s;
|
||
|
}
|
||
|
else if (isMap(s)) {
|
||
|
return this.createAlias(s);
|
||
|
}
|
||
|
throw new Error('Merge sources must be Map nodes or their Aliases');
|
||
|
});
|
||
|
if (items.length === 1)
|
||
|
return new Pair(key, items[0]);
|
||
|
const seq = new YAMLSeq();
|
||
|
seq.items = items;
|
||
|
return new Pair(key, seq);
|
||
|
}
|
||
|
/** The anchor name associated with `node`, if set. */
|
||
|
getName(node) {
|
||
|
return Object.keys(this.map).find(a => this.map[a] === node);
|
||
|
}
|
||
|
/** List of all defined anchor names. */
|
||
|
getNames() {
|
||
|
return Object.keys(this.map);
|
||
|
}
|
||
|
/** The node associated with the anchor `name`, if set. */
|
||
|
getNode(name) {
|
||
|
return this.map[name];
|
||
|
}
|
||
|
/**
|
||
|
* Find an available anchor name with the given `prefix` and a
|
||
|
* numerical suffix.
|
||
|
*/
|
||
|
newName(prefix) {
|
||
|
if (!prefix)
|
||
|
prefix = this.prefix;
|
||
|
const names = Object.keys(this.map);
|
||
|
for (let i = 1; true; ++i) {
|
||
|
const name = `${prefix}${i}`;
|
||
|
if (!names.includes(name))
|
||
|
return name;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Associate an anchor with `node`. If `name` is empty, a new name will be generated.
|
||
|
* To remove an anchor, use `setAnchor(null, name)`.
|
||
|
*/
|
||
|
setAnchor(node, name) {
|
||
|
const { map } = this;
|
||
|
if (!node) {
|
||
|
if (!name)
|
||
|
return null;
|
||
|
delete map[name];
|
||
|
return name;
|
||
|
}
|
||
|
if (!isScalar(node) && !isCollection(node))
|
||
|
throw new Error('Anchors may only be set for Scalar, Seq and Map nodes');
|
||
|
if (name) {
|
||
|
if (/[\x00-\x19\s,[\]{}]/.test(name))
|
||
|
throw new Error('Anchor names must not contain whitespace or control characters');
|
||
|
const prevNode = map[name];
|
||
|
if (prevNode && prevNode !== node)
|
||
|
map[this.newName(name)] = prevNode;
|
||
|
}
|
||
|
const prevName = Object.keys(map).find(a => map[a] === node);
|
||
|
if (prevName) {
|
||
|
if (!name || prevName === name)
|
||
|
return prevName;
|
||
|
delete map[prevName];
|
||
|
}
|
||
|
else if (!name)
|
||
|
name = this.newName();
|
||
|
map[name] = node;
|
||
|
return name;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function stringifyNumber({ format, minFractionDigits, tag, value }) {
|
||
|
if (typeof value === 'bigint')
|
||
|
return String(value);
|
||
|
const num = typeof value === 'number' ? value : Number(value);
|
||
|
if (!isFinite(num))
|
||
|
return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
|
||
|
let n = JSON.stringify(value);
|
||
|
if (!format &&
|
||
|
minFractionDigits &&
|
||
|
(!tag || tag === 'tag:yaml.org,2002:float') &&
|
||
|
/^\d/.test(n)) {
|
||
|
let i = n.indexOf('.');
|
||
|
if (i < 0) {
|
||
|
i = n.length;
|
||
|
n += '.';
|
||
|
}
|
||
|
let d = minFractionDigits - (n.length - i - 1);
|
||
|
while (d-- > 0)
|
||
|
n += '0';
|
||
|
}
|
||
|
return n;
|
||
|
}
|
||
|
|
||
|
function findPair(items, key) {
|
||
|
const k = isScalar(key) ? key.value : key;
|
||
|
for (const it of items) {
|
||
|
if (isPair(it)) {
|
||
|
if (it.key === key || it.key === k)
|
||
|
return it;
|
||
|
if (isScalar(it.key) && it.key.value === k)
|
||
|
return it;
|
||
|
}
|
||
|
}
|
||
|
return undefined;
|
||
|
}
|
||
|
class YAMLMap extends Collection {
|
||
|
constructor(schema) {
|
||
|
super(MAP, schema);
|
||
|
this.items = [];
|
||
|
}
|
||
|
static get tagName() {
|
||
|
return 'tag:yaml.org,2002:map';
|
||
|
}
|
||
|
/**
|
||
|
* Adds a value to the collection.
|
||
|
*
|
||
|
* @param overwrite - If not set `true`, using a key that is already in the
|
||
|
* collection will throw. Otherwise, overwrites the previous value.
|
||
|
*/
|
||
|
add(pair, overwrite) {
|
||
|
let _pair;
|
||
|
if (isPair(pair))
|
||
|
_pair = pair;
|
||
|
else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
|
||
|
// In TypeScript, this never happens.
|
||
|
_pair = new Pair(pair, pair.value);
|
||
|
}
|
||
|
else
|
||
|
_pair = new Pair(pair.key, pair.value);
|
||
|
const prev = findPair(this.items, _pair.key);
|
||
|
const sortEntries = this.schema && this.schema.sortMapEntries;
|
||
|
if (prev) {
|
||
|
if (!overwrite)
|
||
|
throw new Error(`Key ${_pair.key} already set`);
|
||
|
// For scalars, keep the old node & its comments and anchors
|
||
|
if (isScalar(prev.value) && isScalarValue(_pair.value))
|
||
|
prev.value.value = _pair.value;
|
||
|
else
|
||
|
prev.value = _pair.value;
|
||
|
}
|
||
|
else if (sortEntries) {
|
||
|
const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
|
||
|
if (i === -1)
|
||
|
this.items.push(_pair);
|
||
|
else
|
||
|
this.items.splice(i, 0, _pair);
|
||
|
}
|
||
|
else {
|
||
|
this.items.push(_pair);
|
||
|
}
|
||
|
}
|
||
|
delete(key) {
|
||
|
const it = findPair(this.items, key);
|
||
|
if (!it)
|
||
|
return false;
|
||
|
const del = this.items.splice(this.items.indexOf(it), 1);
|
||
|
return del.length > 0;
|
||
|
}
|
||
|
get(key, keepScalar) {
|
||
|
const it = findPair(this.items, key);
|
||
|
const node = it && it.value;
|
||
|
return !keepScalar && isScalar(node) ? node.value : node;
|
||
|
}
|
||
|
has(key) {
|
||
|
return !!findPair(this.items, key);
|
||
|
}
|
||
|
set(key, value) {
|
||
|
this.add(new Pair(key, value), true);
|
||
|
}
|
||
|
/**
|
||
|
* @param ctx - Conversion context, originally set in Document#toJS()
|
||
|
* @param {Class} Type - If set, forces the returned collection type
|
||
|
* @returns Instance of Type, Map, or Object
|
||
|
*/
|
||
|
toJSON(_, ctx, Type) {
|
||
|
const map = Type ? new Type() : ctx && ctx.mapAsMap ? new Map() : {};
|
||
|
if (ctx && ctx.onCreate)
|
||
|
ctx.onCreate(map);
|
||
|
for (const item of this.items)
|
||
|
item.addToJSMap(ctx, map);
|
||
|
return map;
|
||
|
}
|
||
|
toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx)
|
||
|
return JSON.stringify(this);
|
||
|
for (const item of this.items) {
|
||
|
if (!isPair(item))
|
||
|
throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
|
||
|
}
|
||
|
if (!ctx.allNullValues && this.hasAllNullValues(false))
|
||
|
ctx = Object.assign({}, ctx, { allNullValues: true });
|
||
|
return super._toString(ctx, {
|
||
|
blockItem: n => n.str,
|
||
|
flowChars: { start: '{', end: '}' },
|
||
|
itemIndent: ctx.indent || ''
|
||
|
}, onComment, onChompKeep);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function createMap(schema, obj, ctx) {
|
||
|
const { keepUndefined, replacer } = ctx;
|
||
|
const map = new YAMLMap(schema);
|
||
|
const add = (key, value) => {
|
||
|
if (typeof replacer === 'function')
|
||
|
value = replacer.call(obj, key, value);
|
||
|
else if (Array.isArray(replacer) && !replacer.includes(key))
|
||
|
return;
|
||
|
if (value !== undefined || keepUndefined)
|
||
|
map.items.push(createPair(key, value, ctx));
|
||
|
};
|
||
|
if (obj instanceof Map) {
|
||
|
for (const [key, value] of obj)
|
||
|
add(key, value);
|
||
|
}
|
||
|
else if (obj && typeof obj === 'object') {
|
||
|
for (const key of Object.keys(obj))
|
||
|
add(key, obj[key]);
|
||
|
}
|
||
|
if (typeof schema.sortMapEntries === 'function') {
|
||
|
map.items.sort(schema.sortMapEntries);
|
||
|
}
|
||
|
return map;
|
||
|
}
|
||
|
const map = {
|
||
|
collection: 'map',
|
||
|
createNode: createMap,
|
||
|
default: true,
|
||
|
nodeClass: YAMLMap,
|
||
|
tag: 'tag:yaml.org,2002:map',
|
||
|
resolve(map, onError) {
|
||
|
if (!isMap(map))
|
||
|
onError('Expected a mapping for this tag');
|
||
|
return map;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function createSeq(schema, obj, ctx) {
|
||
|
const { replacer } = ctx;
|
||
|
const seq = new YAMLSeq(schema);
|
||
|
if (obj && Symbol.iterator in Object(obj)) {
|
||
|
let i = 0;
|
||
|
for (let it of obj) {
|
||
|
if (typeof replacer === 'function') {
|
||
|
const key = obj instanceof Set ? it : String(i++);
|
||
|
it = replacer.call(obj, key, it);
|
||
|
}
|
||
|
seq.items.push(createNode(it, undefined, ctx));
|
||
|
}
|
||
|
}
|
||
|
return seq;
|
||
|
}
|
||
|
const seq = {
|
||
|
collection: 'seq',
|
||
|
createNode: createSeq,
|
||
|
default: true,
|
||
|
nodeClass: YAMLSeq,
|
||
|
tag: 'tag:yaml.org,2002:seq',
|
||
|
resolve(seq, onError) {
|
||
|
if (!isSeq(seq))
|
||
|
onError('Expected a sequence for this tag');
|
||
|
return seq;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const string = {
|
||
|
identify: value => typeof value === 'string',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:str',
|
||
|
resolve: str => str,
|
||
|
stringify(item, ctx, onComment, onChompKeep) {
|
||
|
ctx = Object.assign({ actualString: true }, ctx);
|
||
|
return stringifyString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const failsafe = [map, seq, string];
|
||
|
|
||
|
const intIdentify$2 = (value) => typeof value === 'bigint' || Number.isInteger(value);
|
||
|
const intResolve$1 = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
|
||
|
function intStringify$1(node, radix, prefix) {
|
||
|
const { value } = node;
|
||
|
if (intIdentify$2(value) && value >= 0)
|
||
|
return prefix + value.toString(radix);
|
||
|
return stringifyNumber(node);
|
||
|
}
|
||
|
const nullObj$1 = {
|
||
|
identify: value => value == null,
|
||
|
createNode: () => new Scalar(null),
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:null',
|
||
|
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||
|
resolve: () => new Scalar(null),
|
||
|
stringify: ({ source }, ctx) => source && nullObj$1.test.test(source) ? source : ctx.options.nullStr
|
||
|
};
|
||
|
const boolObj = {
|
||
|
identify: value => typeof value === 'boolean',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:bool',
|
||
|
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
|
||
|
resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'),
|
||
|
stringify({ source, value }, ctx) {
|
||
|
if (source && boolObj.test.test(source)) {
|
||
|
const sv = source[0] === 't' || source[0] === 'T';
|
||
|
if (value === sv)
|
||
|
return source;
|
||
|
}
|
||
|
return value ? ctx.options.trueStr : ctx.options.falseStr;
|
||
|
}
|
||
|
};
|
||
|
const octObj = {
|
||
|
identify: value => intIdentify$2(value) && value >= 0,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'OCT',
|
||
|
test: /^0o[0-7]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve$1(str, 2, 8, opt),
|
||
|
stringify: node => intStringify$1(node, 8, '0o')
|
||
|
};
|
||
|
const intObj = {
|
||
|
identify: intIdentify$2,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
test: /^[-+]?[0-9]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve$1(str, 0, 10, opt),
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
const hexObj = {
|
||
|
identify: value => intIdentify$2(value) && value >= 0,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'HEX',
|
||
|
test: /^0x[0-9a-fA-F]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve$1(str, 2, 16, opt),
|
||
|
stringify: node => intStringify$1(node, 16, '0x')
|
||
|
};
|
||
|
const nanObj = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
|
||
|
resolve: str => str.slice(-3).toLowerCase() === 'nan'
|
||
|
? NaN
|
||
|
: str[0] === '-'
|
||
|
? Number.NEGATIVE_INFINITY
|
||
|
: Number.POSITIVE_INFINITY,
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
const expObj = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
format: 'EXP',
|
||
|
test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
|
||
|
resolve: str => parseFloat(str),
|
||
|
stringify: ({ value }) => Number(value).toExponential()
|
||
|
};
|
||
|
const floatObj = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
|
||
|
resolve(str) {
|
||
|
const node = new Scalar(parseFloat(str));
|
||
|
const dot = str.indexOf('.');
|
||
|
if (dot !== -1 && str[str.length - 1] === '0')
|
||
|
node.minFractionDigits = str.length - dot - 1;
|
||
|
return node;
|
||
|
},
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
const core = failsafe.concat([
|
||
|
nullObj$1,
|
||
|
boolObj,
|
||
|
octObj,
|
||
|
intObj,
|
||
|
hexObj,
|
||
|
nanObj,
|
||
|
expObj,
|
||
|
floatObj
|
||
|
]);
|
||
|
|
||
|
/* global BigInt */
|
||
|
function intIdentify$1(value) {
|
||
|
return typeof value === 'bigint' || Number.isInteger(value);
|
||
|
}
|
||
|
const stringifyJSON = ({ value }) => JSON.stringify(value);
|
||
|
const jsonScalars = [
|
||
|
{
|
||
|
identify: value => typeof value === 'string',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:str',
|
||
|
resolve: str => str,
|
||
|
stringify: stringifyJSON
|
||
|
},
|
||
|
{
|
||
|
identify: value => value == null,
|
||
|
createNode: () => new Scalar(null),
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:null',
|
||
|
test: /^null$/,
|
||
|
resolve: () => null,
|
||
|
stringify: stringifyJSON
|
||
|
},
|
||
|
{
|
||
|
identify: value => typeof value === 'boolean',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:bool',
|
||
|
test: /^true|false$/,
|
||
|
resolve: str => str === 'true',
|
||
|
stringify: stringifyJSON
|
||
|
},
|
||
|
{
|
||
|
identify: intIdentify$1,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
test: /^-?(?:0|[1-9][0-9]*)$/,
|
||
|
resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
|
||
|
stringify: ({ value }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value)
|
||
|
},
|
||
|
{
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
|
||
|
resolve: str => parseFloat(str),
|
||
|
stringify: stringifyJSON
|
||
|
}
|
||
|
];
|
||
|
const jsonError = {
|
||
|
default: true,
|
||
|
tag: '',
|
||
|
test: /^/,
|
||
|
resolve(str, onError) {
|
||
|
onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
|
||
|
return str;
|
||
|
}
|
||
|
};
|
||
|
const json = [map, seq].concat(jsonScalars, jsonError);
|
||
|
|
||
|
const binary = {
|
||
|
identify: value => value instanceof Uint8Array,
|
||
|
default: false,
|
||
|
tag: 'tag:yaml.org,2002:binary',
|
||
|
/**
|
||
|
* Returns a Buffer in node and an Uint8Array in browsers
|
||
|
*
|
||
|
* To use the resulting buffer as an image, you'll want to do something like:
|
||
|
*
|
||
|
* const blob = new Blob([buffer], { type: 'image/jpeg' })
|
||
|
* document.querySelector('#photo').src = URL.createObjectURL(blob)
|
||
|
*/
|
||
|
resolve(src, onError) {
|
||
|
if (typeof Buffer === 'function') {
|
||
|
return Buffer.from(src, 'base64');
|
||
|
}
|
||
|
else if (typeof atob === 'function') {
|
||
|
// On IE 11, atob() can't handle newlines
|
||
|
const str = atob(src.replace(/[\n\r]/g, ''));
|
||
|
const buffer = new Uint8Array(str.length);
|
||
|
for (let i = 0; i < str.length; ++i)
|
||
|
buffer[i] = str.charCodeAt(i);
|
||
|
return buffer;
|
||
|
}
|
||
|
else {
|
||
|
onError('This environment does not support reading binary tags; either Buffer or atob is required');
|
||
|
return src;
|
||
|
}
|
||
|
},
|
||
|
stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
|
||
|
const buf = value; // checked earlier by binary.identify()
|
||
|
let str;
|
||
|
if (typeof Buffer === 'function') {
|
||
|
str =
|
||
|
buf instanceof Buffer
|
||
|
? buf.toString('base64')
|
||
|
: Buffer.from(buf.buffer).toString('base64');
|
||
|
}
|
||
|
else if (typeof btoa === 'function') {
|
||
|
let s = '';
|
||
|
for (let i = 0; i < buf.length; ++i)
|
||
|
s += String.fromCharCode(buf[i]);
|
||
|
str = btoa(s);
|
||
|
}
|
||
|
else {
|
||
|
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
|
||
|
}
|
||
|
if (!type)
|
||
|
type = Scalar.BLOCK_LITERAL;
|
||
|
if (type !== Scalar.QUOTE_DOUBLE) {
|
||
|
const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
|
||
|
const n = Math.ceil(str.length / lineWidth);
|
||
|
const lines = new Array(n);
|
||
|
for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
|
||
|
lines[i] = str.substr(o, lineWidth);
|
||
|
}
|
||
|
str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' ');
|
||
|
}
|
||
|
return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function resolvePairs(seq, onError) {
|
||
|
if (isSeq(seq)) {
|
||
|
for (let i = 0; i < seq.items.length; ++i) {
|
||
|
let item = seq.items[i];
|
||
|
if (isPair(item))
|
||
|
continue;
|
||
|
else if (isMap(item)) {
|
||
|
if (item.items.length > 1)
|
||
|
onError('Each pair must have its own sequence indicator');
|
||
|
const pair = item.items[0] || new Pair(null);
|
||
|
if (item.commentBefore)
|
||
|
pair.commentBefore = pair.commentBefore
|
||
|
? `${item.commentBefore}\n${pair.commentBefore}`
|
||
|
: item.commentBefore;
|
||
|
if (item.comment)
|
||
|
pair.comment = pair.comment
|
||
|
? `${item.comment}\n${pair.comment}`
|
||
|
: item.comment;
|
||
|
item = pair;
|
||
|
}
|
||
|
seq.items[i] = isPair(item) ? item : new Pair(item);
|
||
|
}
|
||
|
}
|
||
|
else
|
||
|
onError('Expected a sequence for this tag');
|
||
|
return seq;
|
||
|
}
|
||
|
function createPairs(schema, iterable, ctx) {
|
||
|
const { replacer } = ctx;
|
||
|
const pairs = new YAMLSeq(schema);
|
||
|
pairs.tag = 'tag:yaml.org,2002:pairs';
|
||
|
let i = 0;
|
||
|
if (iterable && Symbol.iterator in Object(iterable))
|
||
|
for (let it of iterable) {
|
||
|
if (typeof replacer === 'function')
|
||
|
it = replacer.call(iterable, String(i++), it);
|
||
|
let key, value;
|
||
|
if (Array.isArray(it)) {
|
||
|
if (it.length === 2) {
|
||
|
key = it[0];
|
||
|
value = it[1];
|
||
|
}
|
||
|
else
|
||
|
throw new TypeError(`Expected [key, value] tuple: ${it}`);
|
||
|
}
|
||
|
else if (it && it instanceof Object) {
|
||
|
const keys = Object.keys(it);
|
||
|
if (keys.length === 1) {
|
||
|
key = keys[0];
|
||
|
value = it[key];
|
||
|
}
|
||
|
else
|
||
|
throw new TypeError(`Expected { key: value } tuple: ${it}`);
|
||
|
}
|
||
|
else {
|
||
|
key = it;
|
||
|
}
|
||
|
pairs.items.push(createPair(key, value, ctx));
|
||
|
}
|
||
|
return pairs;
|
||
|
}
|
||
|
const pairs = {
|
||
|
collection: 'seq',
|
||
|
default: false,
|
||
|
tag: 'tag:yaml.org,2002:pairs',
|
||
|
resolve: resolvePairs,
|
||
|
createNode: createPairs
|
||
|
};
|
||
|
|
||
|
class YAMLOMap extends YAMLSeq {
|
||
|
constructor() {
|
||
|
super();
|
||
|
this.add = YAMLMap.prototype.add.bind(this);
|
||
|
this.delete = YAMLMap.prototype.delete.bind(this);
|
||
|
this.get = YAMLMap.prototype.get.bind(this);
|
||
|
this.has = YAMLMap.prototype.has.bind(this);
|
||
|
this.set = YAMLMap.prototype.set.bind(this);
|
||
|
this.tag = YAMLOMap.tag;
|
||
|
}
|
||
|
/**
|
||
|
* If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
|
||
|
* but TypeScript won't allow widening the signature of a child method.
|
||
|
*/
|
||
|
toJSON(_, ctx) {
|
||
|
if (!ctx)
|
||
|
return super.toJSON(_);
|
||
|
const map = new Map();
|
||
|
if (ctx && ctx.onCreate)
|
||
|
ctx.onCreate(map);
|
||
|
for (const pair of this.items) {
|
||
|
let key, value;
|
||
|
if (isPair(pair)) {
|
||
|
key = toJS(pair.key, '', ctx);
|
||
|
value = toJS(pair.value, key, ctx);
|
||
|
}
|
||
|
else {
|
||
|
key = toJS(pair, '', ctx);
|
||
|
}
|
||
|
if (map.has(key))
|
||
|
throw new Error('Ordered maps must not include duplicate keys');
|
||
|
map.set(key, value);
|
||
|
}
|
||
|
return map;
|
||
|
}
|
||
|
}
|
||
|
YAMLOMap.tag = 'tag:yaml.org,2002:omap';
|
||
|
const omap = {
|
||
|
collection: 'seq',
|
||
|
identify: value => value instanceof Map,
|
||
|
nodeClass: YAMLOMap,
|
||
|
default: false,
|
||
|
tag: 'tag:yaml.org,2002:omap',
|
||
|
resolve(seq, onError) {
|
||
|
const pairs = resolvePairs(seq, onError);
|
||
|
const seenKeys = [];
|
||
|
for (const { key } of pairs.items) {
|
||
|
if (isScalar(key)) {
|
||
|
if (seenKeys.includes(key.value)) {
|
||
|
onError(`Ordered maps must not include duplicate keys: ${key.value}`);
|
||
|
}
|
||
|
else {
|
||
|
seenKeys.push(key.value);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return Object.assign(new YAMLOMap(), pairs);
|
||
|
},
|
||
|
createNode(schema, iterable, ctx) {
|
||
|
const pairs = createPairs(schema, iterable, ctx);
|
||
|
const omap = new YAMLOMap();
|
||
|
omap.items = pairs.items;
|
||
|
return omap;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
class YAMLSet extends YAMLMap {
|
||
|
constructor(schema) {
|
||
|
super(schema);
|
||
|
this.tag = YAMLSet.tag;
|
||
|
}
|
||
|
add(key) {
|
||
|
let pair;
|
||
|
if (isPair(key))
|
||
|
pair = key;
|
||
|
else if (typeof key === 'object' &&
|
||
|
'key' in key &&
|
||
|
'value' in key &&
|
||
|
key.value === null)
|
||
|
pair = new Pair(key.key, null);
|
||
|
else
|
||
|
pair = new Pair(key, null);
|
||
|
const prev = findPair(this.items, pair.key);
|
||
|
if (!prev)
|
||
|
this.items.push(pair);
|
||
|
}
|
||
|
get(key, keepPair) {
|
||
|
const pair = findPair(this.items, key);
|
||
|
return !keepPair && isPair(pair)
|
||
|
? isScalar(pair.key)
|
||
|
? pair.key.value
|
||
|
: pair.key
|
||
|
: pair;
|
||
|
}
|
||
|
set(key, value) {
|
||
|
if (typeof value !== 'boolean')
|
||
|
throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
|
||
|
const prev = findPair(this.items, key);
|
||
|
if (prev && !value) {
|
||
|
this.items.splice(this.items.indexOf(prev), 1);
|
||
|
}
|
||
|
else if (!prev && value) {
|
||
|
this.items.push(new Pair(key));
|
||
|
}
|
||
|
}
|
||
|
toJSON(_, ctx) {
|
||
|
return super.toJSON(_, ctx, Set);
|
||
|
}
|
||
|
toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx)
|
||
|
return JSON.stringify(this);
|
||
|
if (this.hasAllNullValues(true))
|
||
|
return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
|
||
|
else
|
||
|
throw new Error('Set items must all have null values');
|
||
|
}
|
||
|
}
|
||
|
YAMLSet.tag = 'tag:yaml.org,2002:set';
|
||
|
const set = {
|
||
|
collection: 'map',
|
||
|
identify: value => value instanceof Set,
|
||
|
nodeClass: YAMLSet,
|
||
|
default: false,
|
||
|
tag: 'tag:yaml.org,2002:set',
|
||
|
resolve(map, onError) {
|
||
|
if (isMap(map)) {
|
||
|
if (map.hasAllNullValues(true))
|
||
|
return Object.assign(new YAMLSet(), map);
|
||
|
else
|
||
|
onError('Set items must all have null values');
|
||
|
}
|
||
|
else
|
||
|
onError('Expected a mapping for this tag');
|
||
|
return map;
|
||
|
},
|
||
|
createNode(schema, iterable, ctx) {
|
||
|
const { replacer } = ctx;
|
||
|
const set = new YAMLSet(schema);
|
||
|
if (iterable && Symbol.iterator in Object(iterable))
|
||
|
for (let value of iterable) {
|
||
|
if (typeof replacer === 'function')
|
||
|
value = replacer.call(iterable, value, value);
|
||
|
set.items.push(createPair(value, null, ctx));
|
||
|
}
|
||
|
return set;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
/** Internal types handle bigint as number, because TS can't figure it out. */
|
||
|
function parseSexagesimal(str, asBigInt) {
|
||
|
const sign = str[0];
|
||
|
const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
|
||
|
const num = (n) => asBigInt ? BigInt(n) : Number(n);
|
||
|
const res = parts
|
||
|
.replace(/_/g, '')
|
||
|
.split(':')
|
||
|
.reduce((res, p) => res * num(60) + num(p), num(0));
|
||
|
return (sign === '-' ? num(-1) * res : res);
|
||
|
}
|
||
|
/**
|
||
|
* hhhh:mm:ss.sss
|
||
|
*
|
||
|
* Internal types handle bigint as number, because TS can't figure it out.
|
||
|
*/
|
||
|
function stringifySexagesimal(node) {
|
||
|
let { value } = node;
|
||
|
let num = (n) => n;
|
||
|
if (typeof value === 'bigint')
|
||
|
num = n => BigInt(n);
|
||
|
else if (isNaN(value) || !isFinite(value))
|
||
|
return stringifyNumber(node);
|
||
|
let sign = '';
|
||
|
if (value < 0) {
|
||
|
sign = '-';
|
||
|
value *= num(-1);
|
||
|
}
|
||
|
const _60 = num(60);
|
||
|
const parts = [value % _60]; // seconds, including ms
|
||
|
if (value < 60) {
|
||
|
parts.unshift(0); // at least one : is required
|
||
|
}
|
||
|
else {
|
||
|
value = (value - parts[0]) / _60;
|
||
|
parts.unshift(value % _60); // minutes
|
||
|
if (value >= 60) {
|
||
|
value = (value - parts[0]) / _60;
|
||
|
parts.unshift(value); // hours
|
||
|
}
|
||
|
}
|
||
|
return (sign +
|
||
|
parts
|
||
|
.map(n => (n < 10 ? '0' + String(n) : String(n)))
|
||
|
.join(':')
|
||
|
.replace(/000000\d*$/, '') // % 60 may introduce error
|
||
|
);
|
||
|
}
|
||
|
const intTime = {
|
||
|
identify: value => typeof value === 'bigint' || Number.isInteger(value),
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'TIME',
|
||
|
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
|
||
|
resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
|
||
|
stringify: stringifySexagesimal
|
||
|
};
|
||
|
const floatTime = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
format: 'TIME',
|
||
|
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
|
||
|
resolve: str => parseSexagesimal(str, false),
|
||
|
stringify: stringifySexagesimal
|
||
|
};
|
||
|
const timestamp = {
|
||
|
identify: value => value instanceof Date,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:timestamp',
|
||
|
// If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
|
||
|
// may be omitted altogether, resulting in a date format. In such a case, the time part is
|
||
|
// assumed to be 00:00:00Z (start of day, UTC).
|
||
|
test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
|
||
|
'(?:' + // time is optional
|
||
|
'(?:t|T|[ \\t]+)' + // t | T | whitespace
|
||
|
'([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
|
||
|
'(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
|
||
|
')?$'),
|
||
|
resolve(str) {
|
||
|
const match = str.match(timestamp.test);
|
||
|
if (!match)
|
||
|
throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
|
||
|
const [, year, month, day, hour, minute, second] = match.map(Number);
|
||
|
const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
|
||
|
let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
|
||
|
const tz = match[8];
|
||
|
if (tz && tz !== 'Z') {
|
||
|
let d = parseSexagesimal(tz, false);
|
||
|
if (Math.abs(d) < 30)
|
||
|
d *= 60;
|
||
|
date -= 60000 * d;
|
||
|
}
|
||
|
return new Date(date);
|
||
|
},
|
||
|
stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
|
||
|
};
|
||
|
|
||
|
const nullObj = {
|
||
|
identify: value => value == null,
|
||
|
createNode: () => new Scalar(null),
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:null',
|
||
|
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||
|
resolve: () => new Scalar(null),
|
||
|
stringify: ({ source }, ctx) => source && nullObj.test.test(source) ? source : ctx.options.nullStr
|
||
|
};
|
||
|
function boolStringify({ value, source }, ctx) {
|
||
|
const boolObj = value ? trueObj : falseObj;
|
||
|
if (source && boolObj.test.test(source))
|
||
|
return source;
|
||
|
return value ? ctx.options.trueStr : ctx.options.falseStr;
|
||
|
}
|
||
|
const trueObj = {
|
||
|
identify: value => value === true,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:bool',
|
||
|
test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
|
||
|
resolve: () => new Scalar(true),
|
||
|
stringify: boolStringify
|
||
|
};
|
||
|
const falseObj = {
|
||
|
identify: value => value === false,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:bool',
|
||
|
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
|
||
|
resolve: () => new Scalar(false),
|
||
|
stringify: boolStringify
|
||
|
};
|
||
|
const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
|
||
|
function intResolve(str, offset, radix, { intAsBigInt }) {
|
||
|
const sign = str[0];
|
||
|
if (sign === '-' || sign === '+')
|
||
|
offset += 1;
|
||
|
str = str.substring(offset).replace(/_/g, '');
|
||
|
if (intAsBigInt) {
|
||
|
switch (radix) {
|
||
|
case 2:
|
||
|
str = `0b${str}`;
|
||
|
break;
|
||
|
case 8:
|
||
|
str = `0o${str}`;
|
||
|
break;
|
||
|
case 16:
|
||
|
str = `0x${str}`;
|
||
|
break;
|
||
|
}
|
||
|
const n = BigInt(str);
|
||
|
return sign === '-' ? BigInt(-1) * n : n;
|
||
|
}
|
||
|
const n = parseInt(str, radix);
|
||
|
return sign === '-' ? -1 * n : n;
|
||
|
}
|
||
|
function intStringify(node, radix, prefix) {
|
||
|
const { value } = node;
|
||
|
if (intIdentify(value)) {
|
||
|
const str = value.toString(radix);
|
||
|
return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
|
||
|
}
|
||
|
return stringifyNumber(node);
|
||
|
}
|
||
|
const yaml11 = failsafe.concat([
|
||
|
nullObj,
|
||
|
trueObj,
|
||
|
falseObj,
|
||
|
{
|
||
|
identify: intIdentify,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'BIN',
|
||
|
test: /^[-+]?0b[0-1_]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
|
||
|
stringify: node => intStringify(node, 2, '0b')
|
||
|
},
|
||
|
{
|
||
|
identify: intIdentify,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'OCT',
|
||
|
test: /^[-+]?0[0-7_]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
|
||
|
stringify: node => intStringify(node, 8, '0')
|
||
|
},
|
||
|
{
|
||
|
identify: intIdentify,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
test: /^[-+]?[0-9][0-9_]*$/,
|
||
|
resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
|
||
|
stringify: stringifyNumber
|
||
|
},
|
||
|
{
|
||
|
identify: intIdentify,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'HEX',
|
||
|
test: /^[-+]?0x[0-9a-fA-F_]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
|
||
|
stringify: node => intStringify(node, 16, '0x')
|
||
|
},
|
||
|
{
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
|
||
|
resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
|
||
|
? NaN
|
||
|
: str[0] === '-'
|
||
|
? Number.NEGATIVE_INFINITY
|
||
|
: Number.POSITIVE_INFINITY,
|
||
|
stringify: stringifyNumber
|
||
|
},
|
||
|
{
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
format: 'EXP',
|
||
|
test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
|
||
|
resolve: (str) => parseFloat(str.replace(/_/g, '')),
|
||
|
stringify: ({ value }) => Number(value).toExponential()
|
||
|
},
|
||
|
{
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
|
||
|
resolve(str) {
|
||
|
const node = new Scalar(parseFloat(str.replace(/_/g, '')));
|
||
|
const dot = str.indexOf('.');
|
||
|
if (dot !== -1) {
|
||
|
const f = str.substring(dot + 1).replace(/_/g, '');
|
||
|
if (f[f.length - 1] === '0')
|
||
|
node.minFractionDigits = f.length;
|
||
|
}
|
||
|
return node;
|
||
|
},
|
||
|
stringify: stringifyNumber
|
||
|
}
|
||
|
], binary, omap, pairs, set, intTime, floatTime, timestamp);
|
||
|
|
||
|
const schemas = { core, failsafe, json, yaml11 };
|
||
|
const tags = {
|
||
|
binary,
|
||
|
bool: boolObj,
|
||
|
float: floatObj,
|
||
|
floatExp: expObj,
|
||
|
floatNaN: nanObj,
|
||
|
floatTime,
|
||
|
int: intObj,
|
||
|
intHex: hexObj,
|
||
|
intOct: octObj,
|
||
|
intTime,
|
||
|
map,
|
||
|
null: nullObj$1,
|
||
|
omap,
|
||
|
pairs,
|
||
|
seq,
|
||
|
set,
|
||
|
timestamp
|
||
|
};
|
||
|
|
||
|
function getSchemaTags(schemas, knownTags, customTags, schemaName) {
|
||
|
const schemaId = schemaName.replace(/\W/g, ''); // 'yaml-1.1' -> 'yaml11'
|
||
|
let tags = schemas[schemaId];
|
||
|
if (!tags) {
|
||
|
const keys = Object.keys(schemas)
|
||
|
.map(key => JSON.stringify(key))
|
||
|
.join(', ');
|
||
|
throw new Error(`Unknown schema "${schemaName}"; use one of ${keys}`);
|
||
|
}
|
||
|
if (Array.isArray(customTags)) {
|
||
|
for (const tag of customTags)
|
||
|
tags = tags.concat(tag);
|
||
|
}
|
||
|
else if (typeof customTags === 'function') {
|
||
|
tags = customTags(tags.slice());
|
||
|
}
|
||
|
return tags.map(tag => {
|
||
|
if (typeof tag !== 'string')
|
||
|
return tag;
|
||
|
const tagObj = knownTags[tag];
|
||
|
if (tagObj)
|
||
|
return tagObj;
|
||
|
const keys = Object.keys(knownTags)
|
||
|
.map(key => JSON.stringify(key))
|
||
|
.join(', ');
|
||
|
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
|
||
|
const coreKnownTags = {
|
||
|
'tag:yaml.org,2002:binary': tags.binary,
|
||
|
'tag:yaml.org,2002:omap': tags.omap,
|
||
|
'tag:yaml.org,2002:pairs': tags.pairs,
|
||
|
'tag:yaml.org,2002:set': tags.set,
|
||
|
'tag:yaml.org,2002:timestamp': tags.timestamp
|
||
|
};
|
||
|
class Schema {
|
||
|
constructor({ customTags, merge, resolveKnownTags, schema, sortMapEntries }) {
|
||
|
// Used by createNode(), to avoid circular dependencies
|
||
|
this.map = tags.map;
|
||
|
this.seq = tags.seq;
|
||
|
this.merge = !!merge;
|
||
|
this.name = schema || 'core';
|
||
|
this.knownTags = resolveKnownTags ? coreKnownTags : {};
|
||
|
this.tags = getSchemaTags(schemas, tags, customTags, this.name);
|
||
|
// Used by createMap()
|
||
|
this.sortMapEntries =
|
||
|
sortMapEntries === true ? sortMapEntriesByKey : sortMapEntries || null;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
|
||
|
* in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
|
||
|
* 2021 edition: https://tc39.es/ecma262/#sec-json.parse
|
||
|
*
|
||
|
* Includes extensions for handling Map and Set objects.
|
||
|
*/
|
||
|
function applyReviver(reviver, obj, key, val) {
|
||
|
if (val && typeof val === 'object') {
|
||
|
if (Array.isArray(val)) {
|
||
|
for (let i = 0, len = val.length; i < len; ++i) {
|
||
|
const v0 = val[i];
|
||
|
const v1 = applyReviver(reviver, val, String(i), v0);
|
||
|
if (v1 === undefined)
|
||
|
delete val[i];
|
||
|
else if (v1 !== v0)
|
||
|
val[i] = v1;
|
||
|
}
|
||
|
}
|
||
|
else if (val instanceof Map) {
|
||
|
for (const k of Array.from(val.keys())) {
|
||
|
const v0 = val.get(k);
|
||
|
const v1 = applyReviver(reviver, val, k, v0);
|
||
|
if (v1 === undefined)
|
||
|
val.delete(k);
|
||
|
else if (v1 !== v0)
|
||
|
val.set(k, v1);
|
||
|
}
|
||
|
}
|
||
|
else if (val instanceof Set) {
|
||
|
for (const v0 of Array.from(val)) {
|
||
|
const v1 = applyReviver(reviver, val, v0, v0);
|
||
|
if (v1 === undefined)
|
||
|
val.delete(v0);
|
||
|
else if (v1 !== v0) {
|
||
|
val.delete(v0);
|
||
|
val.add(v1);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
for (const [k, v0] of Object.entries(val)) {
|
||
|
const v1 = applyReviver(reviver, val, k, v0);
|
||
|
if (v1 === undefined)
|
||
|
delete val[k];
|
||
|
else if (v1 !== v0)
|
||
|
val[k] = v1;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return reviver.call(obj, key, val);
|
||
|
}
|
||
|
|
||
|
class Document {
|
||
|
constructor(value, replacer, options) {
|
||
|
/** A comment before this Document */
|
||
|
this.commentBefore = null;
|
||
|
/** A comment immediately after this Document */
|
||
|
this.comment = null;
|
||
|
/** Errors encountered during parsing. */
|
||
|
this.errors = [];
|
||
|
/** Warnings encountered during parsing. */
|
||
|
this.warnings = [];
|
||
|
Object.defineProperty(this, NODE_TYPE, { value: DOC });
|
||
|
let _replacer = undefined;
|
||
|
if (typeof replacer === 'function' || Array.isArray(replacer)) {
|
||
|
_replacer = replacer;
|
||
|
}
|
||
|
else if (options === undefined && replacer) {
|
||
|
options = replacer;
|
||
|
replacer = undefined;
|
||
|
}
|
||
|
const opt = Object.assign({}, defaultOptions, options);
|
||
|
this.options = opt;
|
||
|
this.anchors = new Anchors(this.options.anchorPrefix);
|
||
|
let { version } = opt;
|
||
|
if (options === null || options === void 0 ? void 0 : options.directives) {
|
||
|
this.directives = options.directives.atDocument();
|
||
|
if (this.directives.yaml.explicit)
|
||
|
version = this.directives.yaml.version;
|
||
|
}
|
||
|
else
|
||
|
this.directives = new Directives({ version });
|
||
|
this.setSchema(version, options);
|
||
|
this.contents =
|
||
|
value === undefined
|
||
|
? null
|
||
|
: this.createNode(value, { replacer: _replacer });
|
||
|
}
|
||
|
/** Adds a value to the document. */
|
||
|
add(value) {
|
||
|
if (assertCollection(this.contents))
|
||
|
this.contents.add(value);
|
||
|
}
|
||
|
/** Adds a value to the document. */
|
||
|
addIn(path, value) {
|
||
|
if (assertCollection(this.contents))
|
||
|
this.contents.addIn(path, value);
|
||
|
}
|
||
|
/**
|
||
|
* Convert any value into a `Node` using the current schema, recursively
|
||
|
* turning objects into collections.
|
||
|
*/
|
||
|
createNode(value, { flow, keepUndefined, onTagObj, replacer, tag } = {}) {
|
||
|
if (typeof replacer === 'function')
|
||
|
value = replacer.call({ '': value }, '', value);
|
||
|
else if (Array.isArray(replacer)) {
|
||
|
const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
|
||
|
const asStr = replacer.filter(keyToStr).map(String);
|
||
|
if (asStr.length > 0)
|
||
|
replacer = replacer.concat(asStr);
|
||
|
}
|
||
|
if (typeof keepUndefined !== 'boolean')
|
||
|
keepUndefined = !!this.options.keepUndefined;
|
||
|
const aliasNodes = [];
|
||
|
const ctx = {
|
||
|
keepUndefined,
|
||
|
onAlias(source) {
|
||
|
// These get fixed later in createNode()
|
||
|
const alias = new Alias(source);
|
||
|
aliasNodes.push(alias);
|
||
|
return alias;
|
||
|
},
|
||
|
onTagObj,
|
||
|
prevObjects: new Map(),
|
||
|
replacer,
|
||
|
schema: this.schema
|
||
|
};
|
||
|
const node = createNode(value, tag, ctx);
|
||
|
for (const alias of aliasNodes) {
|
||
|
// With circular references, the source node is only resolved after all of
|
||
|
// its child nodes are. This is why anchors are set only after all of the
|
||
|
// nodes have been created.
|
||
|
alias.source = alias.source.node;
|
||
|
let name = this.anchors.getName(alias.source);
|
||
|
if (!name) {
|
||
|
name = this.anchors.newName();
|
||
|
this.anchors.map[name] = alias.source;
|
||
|
}
|
||
|
}
|
||
|
if (flow && isCollection(node))
|
||
|
node.flow = true;
|
||
|
return node;
|
||
|
}
|
||
|
/**
|
||
|
* Convert a key and a value into a `Pair` using the current schema,
|
||
|
* recursively wrapping all values as `Scalar` or `Collection` nodes.
|
||
|
*/
|
||
|
createPair(key, value, options = {}) {
|
||
|
const k = this.createNode(key, options);
|
||
|
const v = this.createNode(value, options);
|
||
|
return new Pair(k, v);
|
||
|
}
|
||
|
/**
|
||
|
* Removes a value from the document.
|
||
|
* @returns `true` if the item was found and removed.
|
||
|
*/
|
||
|
delete(key) {
|
||
|
return assertCollection(this.contents) ? this.contents.delete(key) : false;
|
||
|
}
|
||
|
/**
|
||
|
* Removes a value from the document.
|
||
|
* @returns `true` if the item was found and removed.
|
||
|
*/
|
||
|
deleteIn(path) {
|
||
|
if (isEmptyPath(path)) {
|
||
|
if (this.contents == null)
|
||
|
return false;
|
||
|
this.contents = null;
|
||
|
return true;
|
||
|
}
|
||
|
return assertCollection(this.contents)
|
||
|
? this.contents.deleteIn(path)
|
||
|
: false;
|
||
|
}
|
||
|
/**
|
||
|
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||
|
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||
|
* `true` (collections are always returned intact).
|
||
|
*/
|
||
|
get(key, keepScalar) {
|
||
|
return isCollection(this.contents)
|
||
|
? this.contents.get(key, keepScalar)
|
||
|
: undefined;
|
||
|
}
|
||
|
/**
|
||
|
* Returns item at `path`, or `undefined` if not found. By default unwraps
|
||
|
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||
|
* `true` (collections are always returned intact).
|
||
|
*/
|
||
|
getIn(path, keepScalar) {
|
||
|
if (isEmptyPath(path))
|
||
|
return !keepScalar && isScalar(this.contents)
|
||
|
? this.contents.value
|
||
|
: this.contents;
|
||
|
return isCollection(this.contents)
|
||
|
? this.contents.getIn(path, keepScalar)
|
||
|
: undefined;
|
||
|
}
|
||
|
/**
|
||
|
* Checks if the document includes a value with the key `key`.
|
||
|
*/
|
||
|
has(key) {
|
||
|
return isCollection(this.contents) ? this.contents.has(key) : false;
|
||
|
}
|
||
|
/**
|
||
|
* Checks if the document includes a value at `path`.
|
||
|
*/
|
||
|
hasIn(path) {
|
||
|
if (isEmptyPath(path))
|
||
|
return this.contents !== undefined;
|
||
|
return isCollection(this.contents) ? this.contents.hasIn(path) : false;
|
||
|
}
|
||
|
/**
|
||
|
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||
|
* boolean to add/remove the item from the set.
|
||
|
*/
|
||
|
set(key, value) {
|
||
|
if (this.contents == null) {
|
||
|
this.contents = collectionFromPath(this.schema, [key], value);
|
||
|
}
|
||
|
else if (assertCollection(this.contents)) {
|
||
|
this.contents.set(key, value);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||
|
* boolean to add/remove the item from the set.
|
||
|
*/
|
||
|
setIn(path, value) {
|
||
|
if (isEmptyPath(path))
|
||
|
this.contents = value;
|
||
|
else if (this.contents == null) {
|
||
|
this.contents = collectionFromPath(this.schema, Array.from(path), value);
|
||
|
}
|
||
|
else if (assertCollection(this.contents)) {
|
||
|
this.contents.setIn(path, value);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Change the YAML version and schema used by the document.
|
||
|
*
|
||
|
* Overrides all previously set schema options
|
||
|
*/
|
||
|
setSchema(version, options) {
|
||
|
let _options;
|
||
|
switch (String(version)) {
|
||
|
case '1.1':
|
||
|
this.directives.yaml.version = '1.1';
|
||
|
_options = Object.assign({ merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }, options);
|
||
|
break;
|
||
|
case '1.2':
|
||
|
this.directives.yaml.version = '1.2';
|
||
|
_options = Object.assign({ merge: false, resolveKnownTags: true, schema: 'core' }, options);
|
||
|
break;
|
||
|
default: {
|
||
|
const sv = JSON.stringify(version);
|
||
|
throw new Error(`Expected '1.1' or '1.2' as version, but found: ${sv}`);
|
||
|
}
|
||
|
}
|
||
|
this.schema = new Schema(_options);
|
||
|
}
|
||
|
// json & jsonArg are only used from toJSON()
|
||
|
toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
|
||
|
const anchorNodes = Object.values(this.anchors.map).map(node => [node, { alias: [], aliasCount: 0, count: 1 }]);
|
||
|
const anchors = anchorNodes.length > 0 ? new Map(anchorNodes) : null;
|
||
|
const ctx = {
|
||
|
anchors,
|
||
|
doc: this,
|
||
|
keep: !json,
|
||
|
mapAsMap: mapAsMap === true,
|
||
|
mapKeyWarned: false,
|
||
|
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,
|
||
|
stringify: stringify$1
|
||
|
};
|
||
|
const res = toJS(this.contents, jsonArg || '', ctx);
|
||
|
if (typeof onAnchor === 'function' && anchors)
|
||
|
for (const { count, res } of anchors.values())
|
||
|
onAnchor(res, count);
|
||
|
return typeof reviver === 'function'
|
||
|
? applyReviver(reviver, { '': res }, '', res)
|
||
|
: res;
|
||
|
}
|
||
|
/**
|
||
|
* A JSON representation of the document `contents`.
|
||
|
*
|
||
|
* @param jsonArg Used by `JSON.stringify` to indicate the array index or
|
||
|
* property name.
|
||
|
*/
|
||
|
toJSON(jsonArg, onAnchor) {
|
||
|
return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
|
||
|
}
|
||
|
/** A YAML representation of the document. */
|
||
|
toString(options = {}) {
|
||
|
if (this.errors.length > 0)
|
||
|
throw new Error('Document with errors cannot be stringified');
|
||
|
if ('indent' in options &&
|
||
|
(!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
|
||
|
const s = JSON.stringify(options.indent);
|
||
|
throw new Error(`"indent" option must be a positive integer, not ${s}`);
|
||
|
}
|
||
|
const lines = [];
|
||
|
let hasDirectives = options.directives === true;
|
||
|
if (options.directives !== false) {
|
||
|
const dir = this.directives.toString(this);
|
||
|
if (dir) {
|
||
|
lines.push(dir);
|
||
|
hasDirectives = true;
|
||
|
}
|
||
|
else if (this.directives.marker)
|
||
|
hasDirectives = true;
|
||
|
}
|
||
|
if (hasDirectives)
|
||
|
lines.push('---');
|
||
|
if (this.commentBefore) {
|
||
|
if (lines.length !== 1)
|
||
|
lines.unshift('');
|
||
|
lines.unshift(this.commentBefore.replace(/^/gm, '#'));
|
||
|
}
|
||
|
const ctx = createStringifyContext(this, options);
|
||
|
let chompKeep = false;
|
||
|
let contentComment = null;
|
||
|
if (this.contents) {
|
||
|
if (isNode(this.contents)) {
|
||
|
if (this.contents.spaceBefore && hasDirectives)
|
||
|
lines.push('');
|
||
|
if (this.contents.commentBefore)
|
||
|
lines.push(this.contents.commentBefore.replace(/^/gm, '#'));
|
||
|
// top-level block scalars need to be indented if followed by a comment
|
||
|
ctx.forceBlockIndent = !!this.comment;
|
||
|
contentComment = this.contents.comment;
|
||
|
}
|
||
|
const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
|
||
|
let body = stringify$1(this.contents, ctx, () => (contentComment = null), onChompKeep);
|
||
|
if (contentComment)
|
||
|
body = addComment(body, '', contentComment);
|
||
|
if ((body[0] === '|' || body[0] === '>') &&
|
||
|
lines[lines.length - 1] === '---') {
|
||
|
// Top-level block scalars with a preceding doc marker ought to use the
|
||
|
// same line for their header.
|
||
|
lines[lines.length - 1] = `--- ${body}`;
|
||
|
}
|
||
|
else
|
||
|
lines.push(body);
|
||
|
}
|
||
|
else {
|
||
|
lines.push(stringify$1(this.contents, ctx));
|
||
|
}
|
||
|
if (this.comment) {
|
||
|
if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
|
||
|
lines.push('');
|
||
|
lines.push(this.comment.replace(/^/gm, '#'));
|
||
|
}
|
||
|
return lines.join('\n') + '\n';
|
||
|
}
|
||
|
}
|
||
|
function assertCollection(contents) {
|
||
|
if (isCollection(contents))
|
||
|
return true;
|
||
|
throw new Error('Expected a YAML collection as document contents');
|
||
|
}
|
||
|
|
||
|
class YAMLError extends Error {
|
||
|
constructor(name, offset, message) {
|
||
|
if (!message)
|
||
|
throw new Error(`Invalid arguments for new ${name}`);
|
||
|
super();
|
||
|
this.name = name;
|
||
|
this.message = message;
|
||
|
this.offset = offset;
|
||
|
}
|
||
|
}
|
||
|
class YAMLParseError extends YAMLError {
|
||
|
constructor(offset, message) {
|
||
|
super('YAMLParseError', offset, message);
|
||
|
}
|
||
|
}
|
||
|
class YAMLWarning extends YAMLError {
|
||
|
constructor(offset, message) {
|
||
|
super('YAMLWarning', offset, message);
|
||
|
}
|
||
|
}
|
||
|
const prettifyError = (src, lc) => (error) => {
|
||
|
if (error.offset === -1)
|
||
|
return;
|
||
|
error.linePos = lc.linePos(error.offset);
|
||
|
const { line, col } = error.linePos;
|
||
|
error.message += ` at line ${line}, column ${col}`;
|
||
|
let ci = col - 1;
|
||
|
let lineStr = src
|
||
|
.substring(lc.lineStarts[line - 1], lc.lineStarts[line])
|
||
|
.replace(/[\n\r]+$/, '');
|
||
|
// Trim to max 80 chars, keeping col position near the middle
|
||
|
if (ci >= 60 && lineStr.length > 80) {
|
||
|
const trimStart = Math.min(ci - 39, lineStr.length - 79);
|
||
|
lineStr = '…' + lineStr.substring(trimStart);
|
||
|
ci -= trimStart - 1;
|
||
|
}
|
||
|
if (lineStr.length > 80)
|
||
|
lineStr = lineStr.substring(0, 79) + '…';
|
||
|
// Include previous line in context if pointing at line start
|
||
|
if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
|
||
|
// Regexp won't match if start is trimmed
|
||
|
let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
|
||
|
if (prev.length > 80)
|
||
|
prev = prev.substring(0, 79) + '…\n';
|
||
|
lineStr = prev + lineStr;
|
||
|
}
|
||
|
if (/[^ ]/.test(lineStr)) {
|
||
|
const pointer = ' '.repeat(ci) + '^';
|
||
|
error.message += `:\n\n${lineStr}\n${pointer}\n`;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function resolveProps(doc, tokens, startOnNewline, indicator, offset, onError) {
|
||
|
let length = 0;
|
||
|
let spaceBefore = false;
|
||
|
let atNewline = startOnNewline;
|
||
|
let hasSpace = startOnNewline;
|
||
|
let comment = '';
|
||
|
let hasComment = false;
|
||
|
let hasNewline = false;
|
||
|
let sep = '';
|
||
|
let anchor = '';
|
||
|
let tagName = '';
|
||
|
let found = null;
|
||
|
let start = null;
|
||
|
for (const token of tokens) {
|
||
|
switch (token.type) {
|
||
|
case 'space':
|
||
|
// At the doc level, tabs at line start may be parsed as leading
|
||
|
// white space rather than indentation.
|
||
|
if (atNewline && indicator !== 'doc-start' && token.source[0] === '\t')
|
||
|
onError(offset + length, 'Tabs are not allowed as indentation');
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
case 'comment': {
|
||
|
if (doc.options.strict && !hasSpace)
|
||
|
onError(offset + length, 'Comments must be separated from other tokens by white space characters');
|
||
|
const cb = token.source.substring(1);
|
||
|
if (!hasComment)
|
||
|
comment = cb;
|
||
|
else
|
||
|
comment += sep + cb;
|
||
|
hasComment = true;
|
||
|
sep = '';
|
||
|
break;
|
||
|
}
|
||
|
case 'newline':
|
||
|
if (atNewline && !hasComment)
|
||
|
spaceBefore = true;
|
||
|
atNewline = true;
|
||
|
hasNewline = true;
|
||
|
hasSpace = true;
|
||
|
sep += token.source;
|
||
|
break;
|
||
|
case 'anchor':
|
||
|
if (anchor)
|
||
|
onError(offset + length, 'A node can have at most one anchor');
|
||
|
anchor = token.source.substring(1);
|
||
|
if (start === null)
|
||
|
start = offset + length;
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
case 'tag': {
|
||
|
if (tagName)
|
||
|
onError(offset + length, 'A node can have at most one tag');
|
||
|
const tn = doc.directives.tagName(token.source, msg => onError(offset, msg));
|
||
|
if (tn)
|
||
|
tagName = tn;
|
||
|
if (start === null)
|
||
|
start = offset + length;
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
}
|
||
|
case indicator:
|
||
|
// Could here handle preceding comments differently
|
||
|
found = { indent: token.indent, offset: offset + length };
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
default:
|
||
|
onError(offset + length, `Unexpected ${token.type} token`);
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
}
|
||
|
/* istanbul ignore else should not happen */
|
||
|
if (token.source)
|
||
|
length += token.source.length;
|
||
|
}
|
||
|
return {
|
||
|
found,
|
||
|
spaceBefore,
|
||
|
comment,
|
||
|
hasNewline,
|
||
|
anchor,
|
||
|
tagName,
|
||
|
length,
|
||
|
start: start !== null && start !== void 0 ? start : offset + length
|
||
|
};
|
||
|
}
|
||
|
|
||
|
function containsNewline(key) {
|
||
|
if (!key)
|
||
|
return null;
|
||
|
switch (key.type) {
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
return key.source.includes('\n');
|
||
|
case 'flow-collection':
|
||
|
for (const token of key.items) {
|
||
|
switch (token.type) {
|
||
|
case 'newline':
|
||
|
return true;
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'flow-collection':
|
||
|
if (containsNewline(token))
|
||
|
return true;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
default:
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const startColMsg = 'All mapping items must start at the same column';
|
||
|
function resolveBlockMap({ composeNode, composeEmptyNode }, doc, { indent, items, offset }, anchor, onError) {
|
||
|
var _a;
|
||
|
const start = offset;
|
||
|
const map = new YAMLMap(doc.schema);
|
||
|
if (anchor)
|
||
|
doc.anchors.setAnchor(map, anchor);
|
||
|
for (const { start, key, sep, value } of items) {
|
||
|
// key properties
|
||
|
const keyProps = resolveProps(doc, start, true, 'explicit-key-ind', offset, onError);
|
||
|
const implicitKey = !keyProps.found;
|
||
|
if (implicitKey) {
|
||
|
if (key) {
|
||
|
if (key.type === 'block-seq')
|
||
|
onError(offset, 'A block sequence may not be used as an implicit map key');
|
||
|
else if ('indent' in key && key.indent !== indent)
|
||
|
onError(offset, startColMsg);
|
||
|
}
|
||
|
if (!keyProps.anchor && !keyProps.tagName && !sep) {
|
||
|
// TODO: assert being at last item?
|
||
|
if (keyProps.comment) {
|
||
|
if (map.comment)
|
||
|
map.comment += '\n' + keyProps.comment;
|
||
|
else
|
||
|
map.comment = keyProps.comment;
|
||
|
}
|
||
|
continue;
|
||
|
}
|
||
|
}
|
||
|
else if (((_a = keyProps.found) === null || _a === void 0 ? void 0 : _a.indent) !== indent)
|
||
|
onError(offset, startColMsg);
|
||
|
offset += keyProps.length;
|
||
|
if (implicitKey && containsNewline(key))
|
||
|
onError(offset, 'Implicit keys need to be on a single line');
|
||
|
// key value
|
||
|
const keyStart = offset;
|
||
|
const keyNode = key
|
||
|
? composeNode(doc, key, keyProps, onError)
|
||
|
: composeEmptyNode(doc, offset, start, null, keyProps, onError);
|
||
|
offset = keyNode.range[1];
|
||
|
// value properties
|
||
|
const valueProps = resolveProps(doc, sep || [], !key || key.type === 'block-scalar', 'map-value-ind', offset, onError);
|
||
|
offset += valueProps.length;
|
||
|
if (valueProps.found) {
|
||
|
if (implicitKey) {
|
||
|
if ((value === null || value === void 0 ? void 0 : value.type) === 'block-map' && !valueProps.hasNewline)
|
||
|
onError(offset, 'Nested mappings are not allowed in compact mappings');
|
||
|
if (doc.options.strict &&
|
||
|
keyProps.start < valueProps.found.offset - 1024)
|
||
|
onError(offset, 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
|
||
|
}
|
||
|
// value value
|
||
|
const valueNode = value
|
||
|
? composeNode(doc, value, valueProps, onError)
|
||
|
: composeEmptyNode(doc, offset, sep, null, valueProps, onError);
|
||
|
offset = valueNode.range[1];
|
||
|
map.items.push(new Pair(keyNode, valueNode));
|
||
|
}
|
||
|
else {
|
||
|
// key with no value
|
||
|
if (implicitKey)
|
||
|
onError(keyStart, 'Implicit map keys need to be followed by map values');
|
||
|
if (valueProps.comment) {
|
||
|
if (keyNode.comment)
|
||
|
keyNode.comment += '\n' + valueProps.comment;
|
||
|
else
|
||
|
keyNode.comment = valueProps.comment;
|
||
|
}
|
||
|
map.items.push(new Pair(keyNode));
|
||
|
}
|
||
|
}
|
||
|
map.range = [start, offset];
|
||
|
return map;
|
||
|
}
|
||
|
|
||
|
function resolveBlockSeq({ composeNode, composeEmptyNode }, doc, { items, offset }, anchor, onError) {
|
||
|
const start = offset;
|
||
|
const seq = new YAMLSeq(doc.schema);
|
||
|
if (anchor)
|
||
|
doc.anchors.setAnchor(seq, anchor);
|
||
|
for (const { start, value } of items) {
|
||
|
const props = resolveProps(doc, start, true, 'seq-item-ind', offset, onError);
|
||
|
offset += props.length;
|
||
|
if (!props.found) {
|
||
|
if (props.anchor || props.tagName || value) {
|
||
|
const msg = value && value.type === 'block-seq'
|
||
|
? 'All sequence items must start at the same column'
|
||
|
: 'Sequence item without - indicator';
|
||
|
onError(offset, msg);
|
||
|
}
|
||
|
else {
|
||
|
// TODO: assert being at last item?
|
||
|
if (props.comment)
|
||
|
seq.comment = props.comment;
|
||
|
continue;
|
||
|
}
|
||
|
}
|
||
|
const node = value
|
||
|
? composeNode(doc, value, props, onError)
|
||
|
: composeEmptyNode(doc, offset, start, null, props, onError);
|
||
|
offset = node.range[1];
|
||
|
seq.items.push(node);
|
||
|
}
|
||
|
seq.range = [start, offset];
|
||
|
return seq;
|
||
|
}
|
||
|
|
||
|
function resolveEnd(end, offset, reqSpace, onError) {
|
||
|
let comment = '';
|
||
|
if (end) {
|
||
|
let hasSpace = false;
|
||
|
let hasComment = false;
|
||
|
let sep = '';
|
||
|
for (const { source, type } of end) {
|
||
|
switch (type) {
|
||
|
case 'space':
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
case 'comment': {
|
||
|
if (reqSpace && !hasSpace)
|
||
|
onError(offset, 'Comments must be separated from other tokens by white space characters');
|
||
|
const cb = source.substring(1);
|
||
|
if (!hasComment)
|
||
|
comment = cb;
|
||
|
else
|
||
|
comment += sep + cb;
|
||
|
hasComment = true;
|
||
|
sep = '';
|
||
|
break;
|
||
|
}
|
||
|
case 'newline':
|
||
|
if (hasComment)
|
||
|
sep += source;
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
default:
|
||
|
onError(offset, `Unexpected ${type} at node end`);
|
||
|
}
|
||
|
offset += source.length;
|
||
|
}
|
||
|
}
|
||
|
return { comment, offset };
|
||
|
}
|
||
|
|
||
|
function resolveFlowCollection({ composeNode, composeEmptyNode }, doc, fc, _anchor, onError) {
|
||
|
const isMap = fc.start.source === '{';
|
||
|
const coll = isMap ? new YAMLMap(doc.schema) : new YAMLSeq(doc.schema);
|
||
|
coll.flow = true;
|
||
|
if (_anchor)
|
||
|
doc.anchors.setAnchor(coll, _anchor);
|
||
|
let key = null;
|
||
|
let value = null;
|
||
|
let spaceBefore = false;
|
||
|
let comment = '';
|
||
|
let hasSpace = false;
|
||
|
let hasComment = false;
|
||
|
let newlines = '';
|
||
|
let anchor = '';
|
||
|
let tagName = '';
|
||
|
let offset = fc.offset + 1;
|
||
|
let atLineStart = false;
|
||
|
let atExplicitKey = false;
|
||
|
let atValueEnd = false;
|
||
|
let nlAfterValueInSeq = false;
|
||
|
let seqKeyToken = null;
|
||
|
function getProps() {
|
||
|
const props = { spaceBefore, comment, anchor, tagName };
|
||
|
spaceBefore = false;
|
||
|
comment = '';
|
||
|
hasComment = false;
|
||
|
newlines = '';
|
||
|
anchor = '';
|
||
|
tagName = '';
|
||
|
return props;
|
||
|
}
|
||
|
function addItem(pos) {
|
||
|
if (value) {
|
||
|
if (hasComment)
|
||
|
value.comment = comment;
|
||
|
}
|
||
|
else {
|
||
|
value = composeEmptyNode(doc, offset, fc.items, pos, getProps(), onError);
|
||
|
}
|
||
|
if (isMap || atExplicitKey) {
|
||
|
coll.items.push(key ? new Pair(key, value) : new Pair(value));
|
||
|
}
|
||
|
else {
|
||
|
const seq = coll;
|
||
|
if (key) {
|
||
|
const map = new YAMLMap(doc.schema);
|
||
|
map.flow = true;
|
||
|
map.items.push(new Pair(key, value));
|
||
|
seq.items.push(map);
|
||
|
}
|
||
|
else
|
||
|
seq.items.push(value);
|
||
|
}
|
||
|
}
|
||
|
for (let i = 0; i < fc.items.length; ++i) {
|
||
|
const token = fc.items[i];
|
||
|
let isSourceToken = true;
|
||
|
switch (token.type) {
|
||
|
case 'space':
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
case 'comment': {
|
||
|
if (doc.options.strict && !hasSpace)
|
||
|
onError(offset, 'Comments must be separated from other tokens by white space characters');
|
||
|
const cb = token.source.substring(1);
|
||
|
if (!hasComment)
|
||
|
comment = cb;
|
||
|
else
|
||
|
comment += newlines + cb;
|
||
|
atLineStart = false;
|
||
|
hasComment = true;
|
||
|
newlines = '';
|
||
|
break;
|
||
|
}
|
||
|
case 'newline':
|
||
|
if (atLineStart && !hasComment)
|
||
|
spaceBefore = true;
|
||
|
if (atValueEnd) {
|
||
|
if (hasComment) {
|
||
|
let node = coll.items[coll.items.length - 1];
|
||
|
if (isPair(node))
|
||
|
node = node.value || node.key;
|
||
|
/* istanbul ignore else should not happen */
|
||
|
if (isNode(node))
|
||
|
node.comment = comment;
|
||
|
else
|
||
|
onError(offset, 'Error adding trailing comment to node');
|
||
|
comment = '';
|
||
|
hasComment = false;
|
||
|
}
|
||
|
atValueEnd = false;
|
||
|
}
|
||
|
else {
|
||
|
newlines += token.source;
|
||
|
if (!isMap && !key && value)
|
||
|
nlAfterValueInSeq = true;
|
||
|
}
|
||
|
atLineStart = true;
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
case 'anchor':
|
||
|
if (anchor)
|
||
|
onError(offset, 'A node can have at most one anchor');
|
||
|
anchor = token.source.substring(1);
|
||
|
atLineStart = false;
|
||
|
atValueEnd = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
case 'tag': {
|
||
|
if (tagName)
|
||
|
onError(offset, 'A node can have at most one tag');
|
||
|
const tn = doc.directives.tagName(token.source, m => onError(offset, m));
|
||
|
if (tn)
|
||
|
tagName = tn;
|
||
|
atLineStart = false;
|
||
|
atValueEnd = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
}
|
||
|
case 'explicit-key-ind':
|
||
|
if (anchor || tagName)
|
||
|
onError(offset, 'Anchors and tags must be after the ? indicator');
|
||
|
atExplicitKey = true;
|
||
|
atLineStart = false;
|
||
|
atValueEnd = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
case 'map-value-ind': {
|
||
|
if (key) {
|
||
|
if (value) {
|
||
|
onError(offset, 'Missing {} around pair used as mapping key');
|
||
|
const map = new YAMLMap(doc.schema);
|
||
|
map.flow = true;
|
||
|
map.items.push(new Pair(key, value));
|
||
|
map.range = [key.range[0], value.range[1]];
|
||
|
key = map;
|
||
|
value = null;
|
||
|
} // else explicit key
|
||
|
}
|
||
|
else if (value) {
|
||
|
if (doc.options.strict) {
|
||
|
const slMsg = 'Implicit keys of flow sequence pairs need to be on a single line';
|
||
|
if (nlAfterValueInSeq)
|
||
|
onError(offset, slMsg);
|
||
|
else if (seqKeyToken) {
|
||
|
if (containsNewline(seqKeyToken))
|
||
|
onError(offset, slMsg);
|
||
|
const start = 'offset' in seqKeyToken && seqKeyToken.offset;
|
||
|
if (typeof start === 'number' && start < offset - 1024)
|
||
|
onError(offset, 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
|
||
|
seqKeyToken = null;
|
||
|
}
|
||
|
}
|
||
|
key = value;
|
||
|
value = null;
|
||
|
}
|
||
|
else {
|
||
|
key = composeEmptyNode(doc, offset, fc.items, i, getProps(), onError);
|
||
|
}
|
||
|
if (hasComment) {
|
||
|
key.comment = comment;
|
||
|
comment = '';
|
||
|
hasComment = false;
|
||
|
}
|
||
|
atExplicitKey = false;
|
||
|
atValueEnd = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
}
|
||
|
case 'comma':
|
||
|
if (key || value || anchor || tagName || atExplicitKey)
|
||
|
addItem(i);
|
||
|
else
|
||
|
onError(offset, `Unexpected , in flow ${isMap ? 'map' : 'sequence'}`);
|
||
|
key = null;
|
||
|
value = null;
|
||
|
atExplicitKey = false;
|
||
|
atValueEnd = true;
|
||
|
hasSpace = false;
|
||
|
nlAfterValueInSeq = false;
|
||
|
seqKeyToken = null;
|
||
|
break;
|
||
|
case 'block-map':
|
||
|
case 'block-seq':
|
||
|
onError(offset, 'Block collections are not allowed within flow collections');
|
||
|
// fallthrough
|
||
|
default: {
|
||
|
if (value)
|
||
|
onError(offset, 'Missing , between flow collection items');
|
||
|
if (!isMap && !key && !atExplicitKey)
|
||
|
seqKeyToken = token;
|
||
|
value = composeNode(doc, token, getProps(), onError);
|
||
|
offset = value.range[1];
|
||
|
atLineStart = false;
|
||
|
isSourceToken = false;
|
||
|
atValueEnd = false;
|
||
|
hasSpace = false;
|
||
|
}
|
||
|
}
|
||
|
if (isSourceToken)
|
||
|
offset += token.source.length;
|
||
|
}
|
||
|
if (key || value || anchor || tagName || atExplicitKey)
|
||
|
addItem(fc.items.length);
|
||
|
const expectedEnd = isMap ? '}' : ']';
|
||
|
const [ce, ...ee] = fc.end;
|
||
|
if (!ce || ce.source !== expectedEnd) {
|
||
|
const cs = isMap ? 'map' : 'sequence';
|
||
|
onError(offset, `Expected flow ${cs} to end with ${expectedEnd}`);
|
||
|
}
|
||
|
if (ce)
|
||
|
offset += ce.source.length;
|
||
|
if (ee.length > 0) {
|
||
|
const end = resolveEnd(ee, offset, doc.options.strict, onError);
|
||
|
if (end.comment)
|
||
|
coll.comment = comment;
|
||
|
offset = end.offset;
|
||
|
}
|
||
|
coll.range = [fc.offset, offset];
|
||
|
return coll;
|
||
|
}
|
||
|
|
||
|
function composeCollection(CN, doc, token, anchor, tagName, onError) {
|
||
|
let coll;
|
||
|
switch (token.type) {
|
||
|
case 'block-map': {
|
||
|
coll = resolveBlockMap(CN, doc, token, anchor, onError);
|
||
|
break;
|
||
|
}
|
||
|
case 'block-seq': {
|
||
|
coll = resolveBlockSeq(CN, doc, token, anchor, onError);
|
||
|
break;
|
||
|
}
|
||
|
case 'flow-collection': {
|
||
|
coll = resolveFlowCollection(CN, doc, token, anchor, onError);
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (!tagName)
|
||
|
return coll;
|
||
|
// Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841
|
||
|
const Coll = coll.constructor;
|
||
|
if (tagName === '!' || tagName === Coll.tagName) {
|
||
|
coll.tag = Coll.tagName;
|
||
|
return coll;
|
||
|
}
|
||
|
const expType = isMap(coll) ? 'map' : 'seq';
|
||
|
let tag = doc.schema.tags.find(t => t.collection === expType && t.tag === tagName);
|
||
|
if (!tag) {
|
||
|
const kt = doc.schema.knownTags[tagName];
|
||
|
if (kt && kt.collection === expType) {
|
||
|
doc.schema.tags.push(Object.assign({}, kt, { default: false }));
|
||
|
tag = kt;
|
||
|
}
|
||
|
else {
|
||
|
onError(coll.range[0], `Unresolved tag: ${tagName}`, true);
|
||
|
coll.tag = tagName;
|
||
|
return coll;
|
||
|
}
|
||
|
}
|
||
|
const res = tag.resolve(coll, msg => onError(coll.range[0], msg), doc.options);
|
||
|
const node = isNode(res)
|
||
|
? res
|
||
|
: new Scalar(res);
|
||
|
node.range = coll.range;
|
||
|
node.tag = tagName;
|
||
|
if (tag === null || tag === void 0 ? void 0 : tag.format)
|
||
|
node.format = tag.format;
|
||
|
return node;
|
||
|
}
|
||
|
|
||
|
function resolveBlockScalar(scalar, strict, onError) {
|
||
|
const header = parseBlockScalarHeader(scalar, strict, onError);
|
||
|
if (!header)
|
||
|
return { value: '', type: null, comment: '', length: 0 };
|
||
|
const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
|
||
|
const lines = scalar.source ? splitLines(scalar.source) : [];
|
||
|
// determine the end of content & start of chomping
|
||
|
let chompStart = lines.length;
|
||
|
for (let i = lines.length - 1; i >= 0; --i) {
|
||
|
const content = lines[i][1];
|
||
|
if (content === '' || content === '\r')
|
||
|
chompStart = i;
|
||
|
else
|
||
|
break;
|
||
|
}
|
||
|
// shortcut for empty contents
|
||
|
if (!scalar.source || chompStart === 0) {
|
||
|
const value = header.chomp === '+' ? lines.map(line => line[0]).join('\n') : '';
|
||
|
let length = header.length;
|
||
|
if (scalar.source)
|
||
|
length += scalar.source.length;
|
||
|
return { value, type, comment: header.comment, length };
|
||
|
}
|
||
|
// find the indentation level to trim from start
|
||
|
let trimIndent = scalar.indent + header.indent;
|
||
|
let offset = scalar.offset + header.length;
|
||
|
let contentStart = 0;
|
||
|
for (let i = 0; i < chompStart; ++i) {
|
||
|
const [indent, content] = lines[i];
|
||
|
if (content === '' || content === '\r') {
|
||
|
if (header.indent === 0 && indent.length > trimIndent)
|
||
|
trimIndent = indent.length;
|
||
|
}
|
||
|
else {
|
||
|
if (indent.length < trimIndent) {
|
||
|
const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
|
||
|
onError(offset + indent.length, message);
|
||
|
}
|
||
|
if (header.indent === 0)
|
||
|
trimIndent = indent.length;
|
||
|
contentStart = i;
|
||
|
break;
|
||
|
}
|
||
|
offset += indent.length + content.length + 1;
|
||
|
}
|
||
|
let value = '';
|
||
|
let sep = '';
|
||
|
let prevMoreIndented = false;
|
||
|
// leading whitespace is kept intact
|
||
|
for (let i = 0; i < contentStart; ++i)
|
||
|
value += lines[i][0].slice(trimIndent) + '\n';
|
||
|
for (let i = contentStart; i < chompStart; ++i) {
|
||
|
let [indent, content] = lines[i];
|
||
|
offset += indent.length + content.length + 1;
|
||
|
const crlf = content[content.length - 1] === '\r';
|
||
|
if (crlf)
|
||
|
content = content.slice(0, -1);
|
||
|
/* istanbul ignore if already caught in lexer */
|
||
|
if (content && indent.length < trimIndent) {
|
||
|
const src = header.indent
|
||
|
? 'explicit indentation indicator'
|
||
|
: 'first line';
|
||
|
const message = `Block scalar lines must not be less indented than their ${src}`;
|
||
|
onError(offset - content.length - (crlf ? 2 : 1), message);
|
||
|
indent = '';
|
||
|
}
|
||
|
if (type === Scalar.BLOCK_LITERAL) {
|
||
|
value += sep + indent.slice(trimIndent) + content;
|
||
|
sep = '\n';
|
||
|
}
|
||
|
else if (indent.length > trimIndent || content[0] === '\t') {
|
||
|
// more-indented content within a folded block
|
||
|
if (sep === ' ')
|
||
|
sep = '\n';
|
||
|
else if (!prevMoreIndented && sep === '\n')
|
||
|
sep = '\n\n';
|
||
|
value += sep + indent.slice(trimIndent) + content;
|
||
|
sep = '\n';
|
||
|
prevMoreIndented = true;
|
||
|
}
|
||
|
else if (content === '') {
|
||
|
// empty line
|
||
|
if (sep === '\n')
|
||
|
value += '\n';
|
||
|
else
|
||
|
sep = '\n';
|
||
|
}
|
||
|
else {
|
||
|
value += sep + content;
|
||
|
sep = ' ';
|
||
|
prevMoreIndented = false;
|
||
|
}
|
||
|
}
|
||
|
switch (header.chomp) {
|
||
|
case '-':
|
||
|
break;
|
||
|
case '+':
|
||
|
for (let i = chompStart; i < lines.length; ++i)
|
||
|
value += '\n' + lines[i][0].slice(trimIndent);
|
||
|
if (value[value.length - 1] !== '\n')
|
||
|
value += '\n';
|
||
|
break;
|
||
|
default:
|
||
|
value += '\n';
|
||
|
}
|
||
|
return {
|
||
|
value,
|
||
|
type,
|
||
|
comment: header.comment,
|
||
|
length: header.length + scalar.source.length
|
||
|
};
|
||
|
}
|
||
|
function parseBlockScalarHeader({ offset, props }, strict, onError) {
|
||
|
/* istanbul ignore if should not happen */
|
||
|
if (props[0].type !== 'block-scalar-header') {
|
||
|
onError(offset, 'Block scalar header not found');
|
||
|
return null;
|
||
|
}
|
||
|
const { source } = props[0];
|
||
|
const mode = source[0];
|
||
|
let indent = 0;
|
||
|
let chomp = '';
|
||
|
let error = -1;
|
||
|
for (let i = 1; i < source.length; ++i) {
|
||
|
const ch = source[i];
|
||
|
if (!chomp && (ch === '-' || ch === '+'))
|
||
|
chomp = ch;
|
||
|
else {
|
||
|
const n = Number(ch);
|
||
|
if (!indent && n)
|
||
|
indent = n;
|
||
|
else if (error === -1)
|
||
|
error = offset + i;
|
||
|
}
|
||
|
}
|
||
|
if (error !== -1)
|
||
|
onError(error, `Block scalar header includes extra characters: ${source}`);
|
||
|
let hasSpace = false;
|
||
|
let comment = '';
|
||
|
let length = source.length;
|
||
|
for (let i = 1; i < props.length; ++i) {
|
||
|
const token = props[i];
|
||
|
switch (token.type) {
|
||
|
case 'space':
|
||
|
hasSpace = true;
|
||
|
// fallthrough
|
||
|
case 'newline':
|
||
|
length += token.source.length;
|
||
|
break;
|
||
|
case 'comment':
|
||
|
if (strict && !hasSpace) {
|
||
|
const message = 'Comments must be separated from other tokens by white space characters';
|
||
|
onError(offset + length, message);
|
||
|
}
|
||
|
length += token.source.length;
|
||
|
comment = token.source.substring(1);
|
||
|
break;
|
||
|
case 'error':
|
||
|
onError(offset + length, token.message);
|
||
|
length += token.source.length;
|
||
|
break;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default: {
|
||
|
const message = `Unexpected token in block scalar header: ${token.type}`;
|
||
|
onError(offset + length, message);
|
||
|
const ts = token.source;
|
||
|
if (ts && typeof ts === 'string')
|
||
|
length += ts.length;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return { mode, indent, chomp, comment, length };
|
||
|
}
|
||
|
/** @returns Array of lines split up as `[indent, content]` */
|
||
|
function splitLines(source) {
|
||
|
const split = source.split(/\n( *)/);
|
||
|
const first = split[0];
|
||
|
const m = first.match(/^( *)/);
|
||
|
const line0 = m && m[1] ? [m[1], first.slice(m[1].length)] : ['', first];
|
||
|
const lines = [line0];
|
||
|
for (let i = 1; i < split.length; i += 2)
|
||
|
lines.push([split[i], split[i + 1]]);
|
||
|
return lines;
|
||
|
}
|
||
|
|
||
|
function resolveFlowScalar({ offset, type, source, end }, strict, onError) {
|
||
|
let _type;
|
||
|
let value;
|
||
|
const _onError = (rel, msg) => onError(offset + rel, msg);
|
||
|
switch (type) {
|
||
|
case 'scalar':
|
||
|
_type = Scalar.PLAIN;
|
||
|
value = plainValue(source, _onError);
|
||
|
break;
|
||
|
case 'single-quoted-scalar':
|
||
|
_type = Scalar.QUOTE_SINGLE;
|
||
|
value = singleQuotedValue(source, _onError);
|
||
|
break;
|
||
|
case 'double-quoted-scalar':
|
||
|
_type = Scalar.QUOTE_DOUBLE;
|
||
|
value = doubleQuotedValue(source, _onError);
|
||
|
break;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default:
|
||
|
onError(offset, `Expected a flow scalar value, but found: ${type}`);
|
||
|
return {
|
||
|
value: '',
|
||
|
type: null,
|
||
|
comment: '',
|
||
|
length: source.length
|
||
|
};
|
||
|
}
|
||
|
const re = resolveEnd(end, 0, strict, _onError);
|
||
|
return {
|
||
|
value,
|
||
|
type: _type,
|
||
|
comment: re.comment,
|
||
|
length: source.length + re.offset
|
||
|
};
|
||
|
}
|
||
|
function plainValue(source, onError) {
|
||
|
switch (source[0]) {
|
||
|
/* istanbul ignore next should not happen */
|
||
|
case '\t':
|
||
|
onError(0, 'Plain value cannot start with a tab character');
|
||
|
break;
|
||
|
case '|':
|
||
|
case '>': {
|
||
|
const message = `Plain value cannot start with block scalar indicator ${source[0]}`;
|
||
|
onError(0, message);
|
||
|
break;
|
||
|
}
|
||
|
case '@':
|
||
|
case '`': {
|
||
|
const message = `Plain value cannot start with reserved character ${source[0]}`;
|
||
|
onError(0, message);
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
return foldLines(source.trim());
|
||
|
}
|
||
|
function singleQuotedValue(source, onError) {
|
||
|
if (source[source.length - 1] !== "'" || source.length === 1)
|
||
|
onError(source.length, "Missing closing 'quote");
|
||
|
return foldLines(source.slice(1, -1)).replace(/''/g, "'");
|
||
|
}
|
||
|
function foldLines(source) {
|
||
|
const lines = source.split(/[ \t]*\r?\n[ \t]*/);
|
||
|
let res = lines[0];
|
||
|
let sep = ' ';
|
||
|
for (let i = 1; i < lines.length - 1; ++i) {
|
||
|
const line = lines[i];
|
||
|
if (line === '') {
|
||
|
if (sep === '\n')
|
||
|
res += sep;
|
||
|
else
|
||
|
sep = '\n';
|
||
|
}
|
||
|
else {
|
||
|
res += sep + line;
|
||
|
sep = ' ';
|
||
|
}
|
||
|
}
|
||
|
if (lines.length > 1)
|
||
|
res += sep + lines[lines.length - 1];
|
||
|
return res;
|
||
|
}
|
||
|
function doubleQuotedValue(source, onError) {
|
||
|
let res = '';
|
||
|
for (let i = 1; i < source.length - 1; ++i) {
|
||
|
const ch = source[i];
|
||
|
if (ch === '\r' && source[i + 1] === '\n')
|
||
|
continue;
|
||
|
if (ch === '\n') {
|
||
|
const { fold, offset } = foldNewline(source, i);
|
||
|
res += fold;
|
||
|
i = offset;
|
||
|
}
|
||
|
else if (ch === '\\') {
|
||
|
let next = source[++i];
|
||
|
const cc = escapeCodes[next];
|
||
|
if (cc)
|
||
|
res += cc;
|
||
|
else if (next === '\n') {
|
||
|
// skip escaped newlines, but still trim the following line
|
||
|
next = source[i + 1];
|
||
|
while (next === ' ' || next === '\t')
|
||
|
next = source[++i + 1];
|
||
|
}
|
||
|
else if (next === 'x' || next === 'u' || next === 'U') {
|
||
|
const length = { x: 2, u: 4, U: 8 }[next];
|
||
|
res += parseCharCode(source, i + 1, length, onError);
|
||
|
i += length;
|
||
|
}
|
||
|
else {
|
||
|
const raw = source.substr(i - 1, 2);
|
||
|
onError(i - 1, `Invalid escape sequence ${raw}`);
|
||
|
res += raw;
|
||
|
}
|
||
|
}
|
||
|
else if (ch === ' ' || ch === '\t') {
|
||
|
// trim trailing whitespace
|
||
|
const wsStart = i;
|
||
|
let next = source[i + 1];
|
||
|
while (next === ' ' || next === '\t')
|
||
|
next = source[++i + 1];
|
||
|
if (next !== '\n')
|
||
|
res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
|
||
|
}
|
||
|
else {
|
||
|
res += ch;
|
||
|
}
|
||
|
}
|
||
|
if (source[source.length - 1] !== '"' || source.length === 1)
|
||
|
onError(source.length, 'Missing closing "quote');
|
||
|
return res;
|
||
|
}
|
||
|
/**
|
||
|
* Fold a single newline into a space, multiple newlines to N - 1 newlines.
|
||
|
* Presumes `source[offset] === '\n'`
|
||
|
*/
|
||
|
function foldNewline(source, offset) {
|
||
|
let fold = '';
|
||
|
let ch = source[offset + 1];
|
||
|
while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
|
||
|
if (ch === '\r' && source[offset + 2] !== '\n')
|
||
|
break;
|
||
|
if (ch === '\n')
|
||
|
fold += '\n';
|
||
|
offset += 1;
|
||
|
ch = source[offset + 1];
|
||
|
}
|
||
|
if (!fold)
|
||
|
fold = ' ';
|
||
|
return { fold, offset };
|
||
|
}
|
||
|
const escapeCodes = {
|
||
|
'0': '\0',
|
||
|
a: '\x07',
|
||
|
b: '\b',
|
||
|
e: '\x1b',
|
||
|
f: '\f',
|
||
|
n: '\n',
|
||
|
r: '\r',
|
||
|
t: '\t',
|
||
|
v: '\v',
|
||
|
N: '\u0085',
|
||
|
_: '\u00a0',
|
||
|
L: '\u2028',
|
||
|
P: '\u2029',
|
||
|
' ': ' ',
|
||
|
'"': '"',
|
||
|
'/': '/',
|
||
|
'\\': '\\',
|
||
|
'\t': '\t'
|
||
|
};
|
||
|
function parseCharCode(source, offset, length, onError) {
|
||
|
const cc = source.substr(offset, length);
|
||
|
const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
|
||
|
const code = ok ? parseInt(cc, 16) : NaN;
|
||
|
if (isNaN(code)) {
|
||
|
const raw = source.substr(offset - 2, length + 2);
|
||
|
onError(offset - 2, `Invalid escape sequence ${raw}`);
|
||
|
return raw;
|
||
|
}
|
||
|
return String.fromCodePoint(code);
|
||
|
}
|
||
|
|
||
|
function composeScalar(doc, token, anchor, tagName, onError) {
|
||
|
const { offset } = token;
|
||
|
const { value, type, comment, length } = token.type === 'block-scalar'
|
||
|
? resolveBlockScalar(token, doc.options.strict, onError)
|
||
|
: resolveFlowScalar(token, doc.options.strict, onError);
|
||
|
const tag = tagName
|
||
|
? findScalarTagByName(doc.schema, value, tagName, onError)
|
||
|
: findScalarTagByTest(doc.schema, value, token.type === 'scalar');
|
||
|
let scalar;
|
||
|
try {
|
||
|
const res = tag
|
||
|
? tag.resolve(value, msg => onError(offset, msg), doc.options)
|
||
|
: value;
|
||
|
scalar = isScalar(res) ? res : new Scalar(res);
|
||
|
}
|
||
|
catch (error) {
|
||
|
onError(offset, error.message);
|
||
|
scalar = new Scalar(value);
|
||
|
}
|
||
|
scalar.range = [offset, offset + length];
|
||
|
scalar.source = value;
|
||
|
if (type)
|
||
|
scalar.type = type;
|
||
|
if (tagName)
|
||
|
scalar.tag = tagName;
|
||
|
if (tag === null || tag === void 0 ? void 0 : tag.format)
|
||
|
scalar.format = tag.format;
|
||
|
if (comment)
|
||
|
scalar.comment = comment;
|
||
|
if (anchor)
|
||
|
doc.anchors.setAnchor(scalar, anchor);
|
||
|
return scalar;
|
||
|
}
|
||
|
const defaultScalarTag = (schema) => schema.tags.find(tag => !tag.collection && tag.tag === 'tag:yaml.org,2002:str');
|
||
|
function findScalarTagByName(schema, value, tagName, onError) {
|
||
|
var _a;
|
||
|
if (tagName === '!')
|
||
|
return defaultScalarTag(schema); // non-specific tag
|
||
|
const matchWithTest = [];
|
||
|
for (const tag of schema.tags) {
|
||
|
if (!tag.collection && tag.tag === tagName) {
|
||
|
if (tag.default && tag.test)
|
||
|
matchWithTest.push(tag);
|
||
|
else
|
||
|
return tag;
|
||
|
}
|
||
|
}
|
||
|
for (const tag of matchWithTest)
|
||
|
if ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value))
|
||
|
return tag;
|
||
|
const kt = schema.knownTags[tagName];
|
||
|
if (kt && !kt.collection) {
|
||
|
// Ensure that the known tag is available for stringifying,
|
||
|
// but does not get used by default.
|
||
|
schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
|
||
|
return kt;
|
||
|
}
|
||
|
onError(0, `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
|
||
|
return defaultScalarTag(schema);
|
||
|
}
|
||
|
function findScalarTagByTest(schema, value, apply) {
|
||
|
var _a;
|
||
|
if (apply) {
|
||
|
for (const tag of schema.tags) {
|
||
|
if (tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)))
|
||
|
return tag;
|
||
|
}
|
||
|
}
|
||
|
return defaultScalarTag(schema);
|
||
|
}
|
||
|
|
||
|
function emptyScalarPosition(offset, before, pos) {
|
||
|
if (before) {
|
||
|
if (pos === null)
|
||
|
pos = before.length;
|
||
|
for (let i = pos - 1; i >= 0; --i) {
|
||
|
let st = before[i];
|
||
|
switch (st.type) {
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
offset -= st.source.length;
|
||
|
continue;
|
||
|
}
|
||
|
// Technically, an empty scalar is immediately after the last non-empty
|
||
|
// node, but it's more useful to place it after any whitespace.
|
||
|
st = before[++i];
|
||
|
while ((st === null || st === void 0 ? void 0 : st.type) === 'space') {
|
||
|
offset += st.source.length;
|
||
|
st = before[++i];
|
||
|
}
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
return offset;
|
||
|
}
|
||
|
|
||
|
const CN = { composeNode, composeEmptyNode };
|
||
|
function composeNode(doc, token, props, onError) {
|
||
|
const { spaceBefore, comment, anchor, tagName } = props;
|
||
|
let node;
|
||
|
switch (token.type) {
|
||
|
case 'alias':
|
||
|
node = composeAlias(doc, token, onError);
|
||
|
if (anchor || tagName)
|
||
|
onError(token.offset, 'An alias node must not specify any properties');
|
||
|
break;
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
case 'block-scalar':
|
||
|
node = composeScalar(doc, token, anchor, tagName, onError);
|
||
|
break;
|
||
|
case 'block-map':
|
||
|
case 'block-seq':
|
||
|
case 'flow-collection':
|
||
|
node = composeCollection(CN, doc, token, anchor, tagName, onError);
|
||
|
break;
|
||
|
default:
|
||
|
console.log(token);
|
||
|
throw new Error(`Unsupporten token type: ${token.type}`);
|
||
|
}
|
||
|
if (spaceBefore)
|
||
|
node.spaceBefore = true;
|
||
|
if (comment) {
|
||
|
if (token.type === 'scalar' && token.source === '')
|
||
|
node.comment = comment;
|
||
|
else
|
||
|
node.commentBefore = comment;
|
||
|
}
|
||
|
return node;
|
||
|
}
|
||
|
function composeEmptyNode(doc, offset, before, pos, { spaceBefore, comment, anchor, tagName }, onError) {
|
||
|
const token = {
|
||
|
type: 'scalar',
|
||
|
offset: emptyScalarPosition(offset, before, pos),
|
||
|
indent: -1,
|
||
|
source: ''
|
||
|
};
|
||
|
const node = composeScalar(doc, token, anchor, tagName, onError);
|
||
|
if (spaceBefore)
|
||
|
node.spaceBefore = true;
|
||
|
if (comment)
|
||
|
node.comment = comment;
|
||
|
return node;
|
||
|
}
|
||
|
function composeAlias(doc, { offset, source, end }, onError) {
|
||
|
const name = source.substring(1);
|
||
|
const src = doc.anchors.getNode(name);
|
||
|
if (!src)
|
||
|
onError(offset, `Aliased anchor not found: ${name}`);
|
||
|
const alias = new Alias(src);
|
||
|
const re = resolveEnd(end, offset + source.length, doc.options.strict, onError);
|
||
|
alias.range = [offset, re.offset];
|
||
|
if (re.comment)
|
||
|
alias.comment = re.comment;
|
||
|
return alias;
|
||
|
}
|
||
|
|
||
|
function composeDoc(options, directives, { offset, start, value, end }, onError) {
|
||
|
const opts = Object.assign({ directives }, options);
|
||
|
const doc = new Document(undefined, opts);
|
||
|
const props = resolveProps(doc, start, true, 'doc-start', offset, onError);
|
||
|
if (props.found)
|
||
|
doc.directives.marker = true;
|
||
|
doc.contents = value
|
||
|
? composeNode(doc, value, props, onError)
|
||
|
: composeEmptyNode(doc, offset + props.length, start, null, props, onError);
|
||
|
const re = resolveEnd(end, doc.contents.range[1], false, onError);
|
||
|
if (re.comment)
|
||
|
doc.comment = re.comment;
|
||
|
doc.range = [offset, re.offset];
|
||
|
return doc;
|
||
|
}
|
||
|
|
||
|
function parsePrelude(prelude) {
|
||
|
let comment = '';
|
||
|
let atComment = false;
|
||
|
let afterEmptyLine = false;
|
||
|
for (let i = 0; i < prelude.length; ++i) {
|
||
|
const source = prelude[i];
|
||
|
switch (source[0]) {
|
||
|
case '#':
|
||
|
comment +=
|
||
|
(comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
|
||
|
source.substring(1);
|
||
|
atComment = true;
|
||
|
afterEmptyLine = false;
|
||
|
break;
|
||
|
case '%':
|
||
|
if (prelude[i + 1][0] !== '#')
|
||
|
i += 1;
|
||
|
atComment = false;
|
||
|
break;
|
||
|
default:
|
||
|
// This may be wrong after doc-end, but in that case it doesn't matter
|
||
|
if (!atComment)
|
||
|
afterEmptyLine = true;
|
||
|
atComment = false;
|
||
|
}
|
||
|
}
|
||
|
return { comment, afterEmptyLine };
|
||
|
}
|
||
|
/**
|
||
|
* Compose a stream of CST nodes into a stream of YAML Documents.
|
||
|
*
|
||
|
* ```ts
|
||
|
* const options: Options = { ... }
|
||
|
* const docs: Document.Parsed[] = []
|
||
|
* const composer = new Composer(doc => docs.push(doc), options)
|
||
|
* const parser = new Parser(composer.next)
|
||
|
* parser.parse(source)
|
||
|
* composer.end()
|
||
|
* ```
|
||
|
*/
|
||
|
class Composer {
|
||
|
constructor(onDocument, options = {}) {
|
||
|
this.doc = null;
|
||
|
this.atDirectives = false;
|
||
|
this.prelude = [];
|
||
|
this.errors = [];
|
||
|
this.warnings = [];
|
||
|
this.onError = (offset, message, warning) => {
|
||
|
if (warning)
|
||
|
this.warnings.push(new YAMLWarning(offset, message));
|
||
|
else
|
||
|
this.errors.push(new YAMLParseError(offset, message));
|
||
|
};
|
||
|
/**
|
||
|
* Advance the composed by one CST token. Bound to the Composer
|
||
|
* instance, so may be used directly as a callback function.
|
||
|
*/
|
||
|
this.next = (token) => {
|
||
|
switch (token.type) {
|
||
|
case 'directive':
|
||
|
this.directives.add(token.source, this.onError);
|
||
|
this.prelude.push(token.source);
|
||
|
this.atDirectives = true;
|
||
|
break;
|
||
|
case 'document': {
|
||
|
const doc = composeDoc(this.options, this.directives, token, this.onError);
|
||
|
this.decorate(doc, false);
|
||
|
if (this.doc)
|
||
|
this.onDocument(this.doc);
|
||
|
this.doc = doc;
|
||
|
this.atDirectives = false;
|
||
|
break;
|
||
|
}
|
||
|
case 'byte-order-mark':
|
||
|
case 'space':
|
||
|
break;
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
this.prelude.push(token.source);
|
||
|
break;
|
||
|
case 'error': {
|
||
|
const msg = token.source
|
||
|
? `${token.message}: ${JSON.stringify(token.source)}`
|
||
|
: token.message;
|
||
|
const error = new YAMLParseError(-1, msg);
|
||
|
if (this.atDirectives || !this.doc)
|
||
|
this.errors.push(error);
|
||
|
else
|
||
|
this.doc.errors.push(error);
|
||
|
break;
|
||
|
}
|
||
|
case 'doc-end': {
|
||
|
if (!this.doc) {
|
||
|
const msg = 'Unexpected doc-end without preceding document';
|
||
|
this.errors.push(new YAMLParseError(token.offset, msg));
|
||
|
break;
|
||
|
}
|
||
|
const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
|
||
|
this.decorate(this.doc, true);
|
||
|
if (end.comment) {
|
||
|
const dc = this.doc.comment;
|
||
|
this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
|
||
|
}
|
||
|
this.doc.range[1] = end.offset;
|
||
|
break;
|
||
|
}
|
||
|
default:
|
||
|
this.errors.push(new YAMLParseError(-1, `Unsupported token ${token.type}`));
|
||
|
}
|
||
|
};
|
||
|
this.directives = new Directives({
|
||
|
version: (options === null || options === void 0 ? void 0 : options.version) || defaultOptions.version
|
||
|
});
|
||
|
this.onDocument = onDocument;
|
||
|
this.options = options;
|
||
|
}
|
||
|
decorate(doc, afterDoc) {
|
||
|
const { comment, afterEmptyLine } = parsePrelude(this.prelude);
|
||
|
//console.log({ dc: doc.comment, prelude, comment })
|
||
|
if (comment) {
|
||
|
const dc = doc.contents;
|
||
|
if (afterDoc) {
|
||
|
doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
|
||
|
}
|
||
|
else if (afterEmptyLine || doc.directives.marker || !dc) {
|
||
|
doc.commentBefore = comment;
|
||
|
}
|
||
|
else if (isCollection(dc) && !dc.flow && dc.items.length > 0) {
|
||
|
const it = dc.items[0];
|
||
|
const cb = it.commentBefore;
|
||
|
it.commentBefore = cb ? `${comment}\n${cb}` : comment;
|
||
|
}
|
||
|
else {
|
||
|
const cb = dc.commentBefore;
|
||
|
dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
|
||
|
}
|
||
|
}
|
||
|
if (afterDoc) {
|
||
|
Array.prototype.push.apply(doc.errors, this.errors);
|
||
|
Array.prototype.push.apply(doc.warnings, this.warnings);
|
||
|
}
|
||
|
else {
|
||
|
doc.errors = this.errors;
|
||
|
doc.warnings = this.warnings;
|
||
|
}
|
||
|
this.prelude = [];
|
||
|
this.errors = [];
|
||
|
this.warnings = [];
|
||
|
}
|
||
|
/**
|
||
|
* Current stream status information.
|
||
|
*
|
||
|
* Mostly useful at the end of input for an empty stream.
|
||
|
*/
|
||
|
streamInfo() {
|
||
|
return {
|
||
|
comment: parsePrelude(this.prelude).comment,
|
||
|
directives: this.directives,
|
||
|
errors: this.errors,
|
||
|
warnings: this.warnings
|
||
|
};
|
||
|
}
|
||
|
end(forceDoc = false, offset = -1) {
|
||
|
if (this.doc) {
|
||
|
this.decorate(this.doc, true);
|
||
|
this.onDocument(this.doc);
|
||
|
this.doc = null;
|
||
|
}
|
||
|
else if (forceDoc) {
|
||
|
const opts = Object.assign({ directives: this.directives }, this.options);
|
||
|
const doc = new Document(undefined, opts);
|
||
|
if (this.atDirectives)
|
||
|
this.onError(offset, 'Missing directives-end indicator line');
|
||
|
doc.range = [0, offset];
|
||
|
this.decorate(doc, false);
|
||
|
this.onDocument(doc);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/** The byte order mark */
|
||
|
const BOM = '\u{FEFF}';
|
||
|
/** Start of doc-mode */
|
||
|
const DOCUMENT = '\x02'; // C0: Start of Text
|
||
|
/** Unexpected end of flow-mode */
|
||
|
const FLOW_END = '\x18'; // C0: Cancel
|
||
|
/** Next token is a scalar value */
|
||
|
const SCALAR = '\x1f'; // C0: Unit Separator
|
||
|
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
|
||
|
function tokenType(source) {
|
||
|
switch (source) {
|
||
|
case BOM:
|
||
|
return 'byte-order-mark';
|
||
|
case DOCUMENT:
|
||
|
return 'doc-mode';
|
||
|
case FLOW_END:
|
||
|
return 'flow-error-end';
|
||
|
case SCALAR:
|
||
|
return 'scalar';
|
||
|
case '---':
|
||
|
return 'doc-start';
|
||
|
case '...':
|
||
|
return 'doc-end';
|
||
|
case '':
|
||
|
case '\n':
|
||
|
case '\r\n':
|
||
|
return 'newline';
|
||
|
case '-':
|
||
|
return 'seq-item-ind';
|
||
|
case '?':
|
||
|
return 'explicit-key-ind';
|
||
|
case ':':
|
||
|
return 'map-value-ind';
|
||
|
case '{':
|
||
|
return 'flow-map-start';
|
||
|
case '}':
|
||
|
return 'flow-map-end';
|
||
|
case '[':
|
||
|
return 'flow-seq-start';
|
||
|
case ']':
|
||
|
return 'flow-seq-end';
|
||
|
case ',':
|
||
|
return 'comma';
|
||
|
}
|
||
|
switch (source[0]) {
|
||
|
case ' ':
|
||
|
case '\t':
|
||
|
return 'space';
|
||
|
case '#':
|
||
|
return 'comment';
|
||
|
case '%':
|
||
|
return 'directive-line';
|
||
|
case '*':
|
||
|
return 'alias';
|
||
|
case '&':
|
||
|
return 'anchor';
|
||
|
case '!':
|
||
|
return 'tag';
|
||
|
case "'":
|
||
|
return 'single-quoted-scalar';
|
||
|
case '"':
|
||
|
return 'double-quoted-scalar';
|
||
|
case '|':
|
||
|
case '>':
|
||
|
return 'block-scalar-header';
|
||
|
}
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
/*
|
||
|
START -> stream
|
||
|
|
||
|
stream
|
||
|
directive -> line-end -> stream
|
||
|
indent + line-end -> stream
|
||
|
[else] -> line-start
|
||
|
|
||
|
line-end
|
||
|
comment -> line-end
|
||
|
newline -> .
|
||
|
input-end -> END
|
||
|
|
||
|
line-start
|
||
|
doc-start -> doc
|
||
|
doc-end -> stream
|
||
|
[else] -> indent -> block-start
|
||
|
|
||
|
block-start
|
||
|
seq-item-start -> block-start
|
||
|
explicit-key-start -> block-start
|
||
|
map-value-start -> block-start
|
||
|
[else] -> doc
|
||
|
|
||
|
doc
|
||
|
line-end -> line-start
|
||
|
spaces -> doc
|
||
|
anchor -> doc
|
||
|
tag -> doc
|
||
|
flow-start -> flow -> doc
|
||
|
flow-end -> error -> doc
|
||
|
seq-item-start -> error -> doc
|
||
|
explicit-key-start -> error -> doc
|
||
|
map-value-start -> doc
|
||
|
alias -> doc
|
||
|
quote-start -> quoted-scalar -> doc
|
||
|
block-scalar-header -> line-end -> block-scalar(min) -> line-start
|
||
|
[else] -> plain-scalar(false, min) -> doc
|
||
|
|
||
|
flow
|
||
|
line-end -> flow
|
||
|
spaces -> flow
|
||
|
anchor -> flow
|
||
|
tag -> flow
|
||
|
flow-start -> flow -> flow
|
||
|
flow-end -> .
|
||
|
seq-item-start -> error -> flow
|
||
|
explicit-key-start -> flow
|
||
|
map-value-start -> flow
|
||
|
alias -> flow
|
||
|
quote-start -> quoted-scalar -> flow
|
||
|
comma -> flow
|
||
|
[else] -> plain-scalar(true, 0) -> flow
|
||
|
|
||
|
quoted-scalar
|
||
|
quote-end -> .
|
||
|
[else] -> quoted-scalar
|
||
|
|
||
|
block-scalar(min)
|
||
|
newline + peek(indent < min) -> .
|
||
|
[else] -> block-scalar(min)
|
||
|
|
||
|
plain-scalar(is-flow, min)
|
||
|
scalar-end(is-flow) -> .
|
||
|
peek(newline + (indent < min)) -> .
|
||
|
[else] -> plain-scalar(min)
|
||
|
*/
|
||
|
function isEmpty(ch) {
|
||
|
switch (ch) {
|
||
|
case undefined:
|
||
|
case ' ':
|
||
|
case '\n':
|
||
|
case '\r':
|
||
|
case '\t':
|
||
|
return true;
|
||
|
default:
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
const invalidFlowScalarChars = [',', '[', ']', '{', '}'];
|
||
|
const invalidIdentifierChars = [' ', ',', '[', ']', '{', '}', '\n', '\r', '\t'];
|
||
|
const isNotIdentifierChar = (ch) => !ch || invalidIdentifierChars.includes(ch);
|
||
|
/**
|
||
|
* Splits an input string into lexical tokens, i.e. smaller strings that are
|
||
|
* easily identifiable by `tokens.tokenType()`.
|
||
|
*
|
||
|
* Lexing starts always in a "stream" context. Incomplete input may be buffered
|
||
|
* until a complete token can be emitted.
|
||
|
*
|
||
|
* In addition to slices of the original input, the following control characters
|
||
|
* may also be emitted:
|
||
|
*
|
||
|
* - `\x02` (Start of Text): A document starts with the next token
|
||
|
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
|
||
|
* - `\x1f` (Unit Separator): Next token is a scalar value
|
||
|
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
|
||
|
*/
|
||
|
class Lexer {
|
||
|
/**
|
||
|
* Define/initialise a YAML lexer. `push` will be called separately with each
|
||
|
* token when `lex()` is passed an input string.
|
||
|
*
|
||
|
* @public
|
||
|
*/
|
||
|
constructor(push) {
|
||
|
/**
|
||
|
* Flag indicating whether the end of the current buffer marks the end of
|
||
|
* all input
|
||
|
*/
|
||
|
this.atEnd = false;
|
||
|
/**
|
||
|
* Explicit indent set in block scalar header, as an offset from the current
|
||
|
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
|
||
|
* explicitly set.
|
||
|
*/
|
||
|
this.blockScalarIndent = -1;
|
||
|
/**
|
||
|
* Block scalars that include a + (keep) chomping indicator in their header
|
||
|
* include trailing empty lines, which are otherwise excluded from the
|
||
|
* scalar's contents.
|
||
|
*/
|
||
|
this.blockScalarKeep = false;
|
||
|
/** Current input */
|
||
|
this.buffer = '';
|
||
|
/**
|
||
|
* Flag noting whether the map value indicator : can immediately follow this
|
||
|
* node within a flow context.
|
||
|
*/
|
||
|
this.flowKey = false;
|
||
|
/** Count of surrounding flow collection levels. */
|
||
|
this.flowLevel = 0;
|
||
|
/**
|
||
|
* Minimum level of indentation required for next lines to be parsed as a
|
||
|
* part of the current scalar value.
|
||
|
*/
|
||
|
this.indentNext = 0;
|
||
|
/** Indentation level of the current line. */
|
||
|
this.indentValue = 0;
|
||
|
/** Stores the state of the lexer if reaching the end of incpomplete input */
|
||
|
this.next = null;
|
||
|
/** A pointer to `buffer`; the current position of the lexer. */
|
||
|
this.pos = 0;
|
||
|
this.push = push;
|
||
|
}
|
||
|
/**
|
||
|
* Read YAML tokens from the `source` string, calling the callback
|
||
|
* defined in the constructor for each one. If `incomplete`, a part
|
||
|
* of the last line may be left as a buffer for the next call.
|
||
|
*
|
||
|
* @public
|
||
|
*/
|
||
|
lex(source, incomplete) {
|
||
|
if (source)
|
||
|
this.buffer = this.buffer ? this.buffer + source : source;
|
||
|
this.atEnd = !incomplete;
|
||
|
let next = this.next || 'stream';
|
||
|
while (next && (incomplete || this.hasChars(1)))
|
||
|
next = this.parseNext(next);
|
||
|
}
|
||
|
atLineEnd() {
|
||
|
let i = this.pos;
|
||
|
let ch = this.buffer[i];
|
||
|
while (ch === ' ' || ch === '\t')
|
||
|
ch = this.buffer[++i];
|
||
|
if (!ch || ch === '#' || ch === '\n')
|
||
|
return true;
|
||
|
if (ch === '\r')
|
||
|
return this.buffer[i + 1] === '\n';
|
||
|
return false;
|
||
|
}
|
||
|
charAt(n) {
|
||
|
return this.buffer[this.pos + n];
|
||
|
}
|
||
|
continueScalar(offset) {
|
||
|
let ch = this.buffer[offset];
|
||
|
if (this.indentNext > 0) {
|
||
|
let indent = 0;
|
||
|
while (ch === ' ')
|
||
|
ch = this.buffer[++indent + offset];
|
||
|
if (ch === '\r') {
|
||
|
const next = this.buffer[indent + offset + 1];
|
||
|
if (next === '\n' || (!next && !this.atEnd))
|
||
|
return offset + indent + 1;
|
||
|
}
|
||
|
return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
|
||
|
? offset + indent
|
||
|
: -1;
|
||
|
}
|
||
|
if (ch === '-' || ch === '.') {
|
||
|
const dt = this.buffer.substr(offset, 3);
|
||
|
if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
|
||
|
return -1;
|
||
|
}
|
||
|
return offset;
|
||
|
}
|
||
|
getLine() {
|
||
|
let end = this.buffer.indexOf('\n', this.pos);
|
||
|
if (end === -1)
|
||
|
return this.atEnd ? this.buffer.substring(this.pos) : null;
|
||
|
if (this.buffer[end - 1] === '\r')
|
||
|
end -= 1;
|
||
|
return this.buffer.substring(this.pos, end);
|
||
|
}
|
||
|
hasChars(n) {
|
||
|
return this.pos + n <= this.buffer.length;
|
||
|
}
|
||
|
setNext(state) {
|
||
|
this.buffer = this.buffer.substring(this.pos);
|
||
|
this.pos = 0;
|
||
|
this.next = state;
|
||
|
return null;
|
||
|
}
|
||
|
peek(n) {
|
||
|
return this.buffer.substr(this.pos, n);
|
||
|
}
|
||
|
parseNext(next) {
|
||
|
switch (next) {
|
||
|
case 'stream':
|
||
|
return this.parseStream();
|
||
|
case 'line-start':
|
||
|
return this.parseLineStart();
|
||
|
case 'block-start':
|
||
|
return this.parseBlockStart();
|
||
|
case 'doc':
|
||
|
return this.parseDocument();
|
||
|
case 'flow':
|
||
|
return this.parseFlowCollection();
|
||
|
case 'quoted-scalar':
|
||
|
return this.parseQuotedScalar();
|
||
|
case 'block-scalar':
|
||
|
return this.parseBlockScalar();
|
||
|
case 'plain-scalar':
|
||
|
return this.parsePlainScalar();
|
||
|
}
|
||
|
}
|
||
|
parseStream() {
|
||
|
let line = this.getLine();
|
||
|
if (line === null)
|
||
|
return this.setNext('stream');
|
||
|
if (line[0] === BOM) {
|
||
|
this.pushCount(1);
|
||
|
line = line.substring(1);
|
||
|
}
|
||
|
if (line[0] === '%') {
|
||
|
let dirEnd = line.length;
|
||
|
const cs = line.indexOf('#');
|
||
|
if (cs !== -1) {
|
||
|
const ch = line[cs - 1];
|
||
|
if (ch === ' ' || ch === '\t')
|
||
|
dirEnd = cs - 1;
|
||
|
}
|
||
|
while (true) {
|
||
|
const ch = line[dirEnd - 1];
|
||
|
if (ch === ' ' || ch === '\t')
|
||
|
dirEnd -= 1;
|
||
|
else
|
||
|
break;
|
||
|
}
|
||
|
const n = this.pushCount(dirEnd) + this.pushSpaces(true);
|
||
|
this.pushCount(line.length - n); // possible comment
|
||
|
this.pushNewline();
|
||
|
return 'stream';
|
||
|
}
|
||
|
if (this.atLineEnd()) {
|
||
|
const sp = this.pushSpaces(true);
|
||
|
this.pushCount(line.length - sp);
|
||
|
this.pushNewline();
|
||
|
return 'stream';
|
||
|
}
|
||
|
this.push(DOCUMENT);
|
||
|
return this.parseLineStart();
|
||
|
}
|
||
|
parseLineStart() {
|
||
|
const ch = this.charAt(0);
|
||
|
if (!ch && !this.atEnd)
|
||
|
return this.setNext('line-start');
|
||
|
if (ch === '-' || ch === '.') {
|
||
|
if (!this.atEnd && !this.hasChars(4))
|
||
|
return this.setNext('line-start');
|
||
|
const s = this.peek(3);
|
||
|
if (s === '---' && isEmpty(this.charAt(3))) {
|
||
|
this.pushCount(3);
|
||
|
this.indentValue = 0;
|
||
|
this.indentNext = 0;
|
||
|
return 'doc';
|
||
|
}
|
||
|
else if (s === '...' && isEmpty(this.charAt(3))) {
|
||
|
this.pushCount(3);
|
||
|
return 'stream';
|
||
|
}
|
||
|
}
|
||
|
this.indentValue = this.pushSpaces(false);
|
||
|
if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
|
||
|
this.indentNext = this.indentValue;
|
||
|
return this.parseBlockStart();
|
||
|
}
|
||
|
parseBlockStart() {
|
||
|
const [ch0, ch1] = this.peek(2);
|
||
|
if (!ch1 && !this.atEnd)
|
||
|
return this.setNext('block-start');
|
||
|
if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
|
||
|
const n = this.pushCount(1) + this.pushSpaces(true);
|
||
|
this.indentNext = this.indentValue + 1;
|
||
|
this.indentValue += n;
|
||
|
return this.parseBlockStart();
|
||
|
}
|
||
|
return 'doc';
|
||
|
}
|
||
|
parseDocument() {
|
||
|
this.pushSpaces(true);
|
||
|
const line = this.getLine();
|
||
|
if (line === null)
|
||
|
return this.setNext('doc');
|
||
|
let n = this.pushIndicators();
|
||
|
switch (line[n]) {
|
||
|
case '#':
|
||
|
this.pushCount(line.length - n);
|
||
|
// fallthrough
|
||
|
case undefined:
|
||
|
this.pushNewline();
|
||
|
return this.parseLineStart();
|
||
|
case '{':
|
||
|
case '[':
|
||
|
this.pushCount(1);
|
||
|
this.flowKey = false;
|
||
|
this.flowLevel = 1;
|
||
|
return 'flow';
|
||
|
case '}':
|
||
|
case ']':
|
||
|
// this is an error
|
||
|
this.pushCount(1);
|
||
|
return 'doc';
|
||
|
case '*':
|
||
|
this.pushUntil(isNotIdentifierChar);
|
||
|
return 'doc';
|
||
|
case '"':
|
||
|
case "'":
|
||
|
return this.parseQuotedScalar();
|
||
|
case '|':
|
||
|
case '>':
|
||
|
n += this.parseBlockScalarHeader();
|
||
|
n += this.pushSpaces(true);
|
||
|
this.pushCount(line.length - n);
|
||
|
this.pushNewline();
|
||
|
return this.parseBlockScalar();
|
||
|
default:
|
||
|
return this.parsePlainScalar();
|
||
|
}
|
||
|
}
|
||
|
parseFlowCollection() {
|
||
|
let nl, sp;
|
||
|
let indent = -1;
|
||
|
do {
|
||
|
nl = this.pushNewline();
|
||
|
sp = this.pushSpaces(true);
|
||
|
if (nl > 0)
|
||
|
this.indentValue = indent = sp;
|
||
|
} while (nl + sp > 0);
|
||
|
const line = this.getLine();
|
||
|
if (line === null)
|
||
|
return this.setNext('flow');
|
||
|
if ((indent !== -1 && indent < this.indentNext) ||
|
||
|
(indent === 0 &&
|
||
|
(line.startsWith('---') || line.startsWith('...')) &&
|
||
|
isEmpty(line[3]))) {
|
||
|
// Allowing for the terminal ] or } at the same (rather than greater)
|
||
|
// indent level as the initial [ or { is technically invalid, but
|
||
|
// failing here would be surprising to users.
|
||
|
const atFlowEndMarker = indent === this.indentNext - 1 &&
|
||
|
this.flowLevel === 1 &&
|
||
|
(line[0] === ']' || line[0] === '}');
|
||
|
if (!atFlowEndMarker) {
|
||
|
// this is an error
|
||
|
this.flowLevel = 0;
|
||
|
this.push(FLOW_END);
|
||
|
return this.parseLineStart();
|
||
|
}
|
||
|
}
|
||
|
let n = 0;
|
||
|
while (line[n] === ',')
|
||
|
n += this.pushCount(1) + this.pushSpaces(true);
|
||
|
n += this.pushIndicators();
|
||
|
switch (line[n]) {
|
||
|
case undefined:
|
||
|
return 'flow';
|
||
|
case '#':
|
||
|
this.pushCount(line.length - n);
|
||
|
return 'flow';
|
||
|
case '{':
|
||
|
case '[':
|
||
|
this.pushCount(1);
|
||
|
this.flowKey = false;
|
||
|
this.flowLevel += 1;
|
||
|
return 'flow';
|
||
|
case '}':
|
||
|
case ']':
|
||
|
this.pushCount(1);
|
||
|
this.flowKey = true;
|
||
|
this.flowLevel -= 1;
|
||
|
return this.flowLevel ? 'flow' : 'doc';
|
||
|
case '*':
|
||
|
this.pushUntil(isNotIdentifierChar);
|
||
|
return 'flow';
|
||
|
case '"':
|
||
|
case "'":
|
||
|
this.flowKey = true;
|
||
|
return this.parseQuotedScalar();
|
||
|
case ':': {
|
||
|
const next = this.charAt(1);
|
||
|
if (this.flowKey || isEmpty(next) || next === ',') {
|
||
|
this.pushCount(1);
|
||
|
this.pushSpaces(true);
|
||
|
return 'flow';
|
||
|
}
|
||
|
}
|
||
|
// fallthrough
|
||
|
default:
|
||
|
this.flowKey = false;
|
||
|
return this.parsePlainScalar();
|
||
|
}
|
||
|
}
|
||
|
parseQuotedScalar() {
|
||
|
const quote = this.charAt(0);
|
||
|
let end = this.buffer.indexOf(quote, this.pos + 1);
|
||
|
if (quote === "'") {
|
||
|
while (end !== -1 && this.buffer[end + 1] === "'")
|
||
|
end = this.buffer.indexOf("'", end + 2);
|
||
|
}
|
||
|
else {
|
||
|
// double-quote
|
||
|
while (end !== -1) {
|
||
|
let n = 0;
|
||
|
while (this.buffer[end - 1 - n] === '\\')
|
||
|
n += 1;
|
||
|
if (n % 2 === 0)
|
||
|
break;
|
||
|
end = this.buffer.indexOf('"', end + 1);
|
||
|
}
|
||
|
}
|
||
|
let nl = this.buffer.indexOf('\n', this.pos);
|
||
|
if (nl !== -1 && nl < end) {
|
||
|
while (nl !== -1 && nl < end) {
|
||
|
const cs = this.continueScalar(nl + 1);
|
||
|
if (cs === -1)
|
||
|
break;
|
||
|
nl = this.buffer.indexOf('\n', cs);
|
||
|
}
|
||
|
if (nl !== -1 && nl < end) {
|
||
|
// this is an error caused by an unexpected unindent
|
||
|
end = nl - 1;
|
||
|
}
|
||
|
}
|
||
|
if (end === -1) {
|
||
|
if (!this.atEnd)
|
||
|
return this.setNext('quoted-scalar');
|
||
|
end = this.buffer.length;
|
||
|
}
|
||
|
this.pushToIndex(end + 1, false);
|
||
|
return this.flowLevel ? 'flow' : 'doc';
|
||
|
}
|
||
|
parseBlockScalarHeader() {
|
||
|
this.blockScalarIndent = -1;
|
||
|
this.blockScalarKeep = false;
|
||
|
let i = this.pos;
|
||
|
while (true) {
|
||
|
const ch = this.buffer[++i];
|
||
|
if (ch === '+')
|
||
|
this.blockScalarKeep = true;
|
||
|
else if (ch > '0' && ch <= '9')
|
||
|
this.blockScalarIndent = Number(ch) - 1;
|
||
|
else if (ch !== '-')
|
||
|
break;
|
||
|
}
|
||
|
return this.pushUntil(ch => isEmpty(ch) || ch === '#');
|
||
|
}
|
||
|
parseBlockScalar() {
|
||
|
let nl = this.pos - 1; // may be -1 if this.pos === 0
|
||
|
let indent = 0;
|
||
|
let ch;
|
||
|
loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
|
||
|
switch (ch) {
|
||
|
case ' ':
|
||
|
indent += 1;
|
||
|
break;
|
||
|
case '\n':
|
||
|
nl = i;
|
||
|
indent = 0;
|
||
|
break;
|
||
|
case '\r': {
|
||
|
const next = this.buffer[i + 1];
|
||
|
if (!next && !this.atEnd)
|
||
|
return this.setNext('block-scalar');
|
||
|
if (next === '\n')
|
||
|
break;
|
||
|
} // fallthrough
|
||
|
default:
|
||
|
break loop;
|
||
|
}
|
||
|
}
|
||
|
if (!ch && !this.atEnd)
|
||
|
return this.setNext('block-scalar');
|
||
|
if (indent >= this.indentNext) {
|
||
|
if (this.blockScalarIndent === -1)
|
||
|
this.indentNext = indent;
|
||
|
else
|
||
|
this.indentNext += this.blockScalarIndent;
|
||
|
do {
|
||
|
const cs = this.continueScalar(nl + 1);
|
||
|
if (cs === -1)
|
||
|
break;
|
||
|
nl = this.buffer.indexOf('\n', cs);
|
||
|
} while (nl !== -1);
|
||
|
if (nl === -1) {
|
||
|
if (!this.atEnd)
|
||
|
return this.setNext('block-scalar');
|
||
|
nl = this.buffer.length;
|
||
|
}
|
||
|
}
|
||
|
if (!this.blockScalarKeep) {
|
||
|
do {
|
||
|
let i = nl - 1;
|
||
|
let ch = this.buffer[i];
|
||
|
if (ch === '\r')
|
||
|
ch = this.buffer[--i];
|
||
|
while (ch === ' ' || ch === '\t')
|
||
|
ch = this.buffer[--i];
|
||
|
if (ch === '\n' && i >= this.pos)
|
||
|
nl = i;
|
||
|
else
|
||
|
break;
|
||
|
} while (true);
|
||
|
}
|
||
|
this.push(SCALAR);
|
||
|
this.pushToIndex(nl + 1, true);
|
||
|
return this.parseLineStart();
|
||
|
}
|
||
|
parsePlainScalar() {
|
||
|
const inFlow = this.flowLevel > 0;
|
||
|
let end = this.pos - 1;
|
||
|
let i = this.pos - 1;
|
||
|
let ch;
|
||
|
while ((ch = this.buffer[++i])) {
|
||
|
if (ch === ':') {
|
||
|
const next = this.buffer[i + 1];
|
||
|
if (isEmpty(next) || (inFlow && next === ','))
|
||
|
break;
|
||
|
end = i;
|
||
|
}
|
||
|
else if (isEmpty(ch)) {
|
||
|
const next = this.buffer[i + 1];
|
||
|
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
|
||
|
break;
|
||
|
if (ch === '\r') {
|
||
|
if (next === '\n') {
|
||
|
i += 1;
|
||
|
ch = '\n';
|
||
|
}
|
||
|
else
|
||
|
end = i;
|
||
|
}
|
||
|
if (ch === '\n') {
|
||
|
const cs = this.continueScalar(i + 1);
|
||
|
if (cs === -1)
|
||
|
break;
|
||
|
i = Math.max(i, cs - 2); // to advance, but still account for ' #'
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
if (inFlow && invalidFlowScalarChars.includes(ch))
|
||
|
break;
|
||
|
end = i;
|
||
|
}
|
||
|
}
|
||
|
if (!ch && !this.atEnd)
|
||
|
return this.setNext('plain-scalar');
|
||
|
this.push(SCALAR);
|
||
|
this.pushToIndex(end + 1, true);
|
||
|
return inFlow ? 'flow' : 'doc';
|
||
|
}
|
||
|
pushCount(n) {
|
||
|
if (n > 0) {
|
||
|
this.push(this.buffer.substr(this.pos, n));
|
||
|
this.pos += n;
|
||
|
return n;
|
||
|
}
|
||
|
return 0;
|
||
|
}
|
||
|
pushToIndex(i, allowEmpty) {
|
||
|
const s = this.buffer.slice(this.pos, i);
|
||
|
if (s) {
|
||
|
this.push(s);
|
||
|
this.pos += s.length;
|
||
|
return s.length;
|
||
|
}
|
||
|
else if (allowEmpty)
|
||
|
this.push('');
|
||
|
return 0;
|
||
|
}
|
||
|
pushIndicators() {
|
||
|
switch (this.charAt(0)) {
|
||
|
case '!':
|
||
|
if (this.charAt(1) === '<')
|
||
|
return (this.pushVerbatimTag() +
|
||
|
this.pushSpaces(true) +
|
||
|
this.pushIndicators());
|
||
|
// fallthrough
|
||
|
case '&':
|
||
|
return (this.pushUntil(isNotIdentifierChar) +
|
||
|
this.pushSpaces(true) +
|
||
|
this.pushIndicators());
|
||
|
case ':':
|
||
|
case '?': // this is an error outside flow collections
|
||
|
case '-': // this is an error
|
||
|
if (isEmpty(this.charAt(1))) {
|
||
|
if (this.flowLevel === 0)
|
||
|
this.indentNext = this.indentValue + 1;
|
||
|
return (this.pushCount(1) + this.pushSpaces(true) + this.pushIndicators());
|
||
|
}
|
||
|
}
|
||
|
return 0;
|
||
|
}
|
||
|
pushVerbatimTag() {
|
||
|
let i = this.pos + 2;
|
||
|
let ch = this.buffer[i];
|
||
|
while (!isEmpty(ch) && ch !== '>')
|
||
|
ch = this.buffer[++i];
|
||
|
return this.pushToIndex(ch === '>' ? i + 1 : i, false);
|
||
|
}
|
||
|
pushNewline() {
|
||
|
const ch = this.buffer[this.pos];
|
||
|
if (ch === '\n')
|
||
|
return this.pushCount(1);
|
||
|
else if (ch === '\r' && this.charAt(1) === '\n')
|
||
|
return this.pushCount(2);
|
||
|
else
|
||
|
return 0;
|
||
|
}
|
||
|
pushSpaces(allowTabs) {
|
||
|
let i = this.pos - 1;
|
||
|
let ch;
|
||
|
do {
|
||
|
ch = this.buffer[++i];
|
||
|
} while (ch === ' ' || (allowTabs && ch === '\t'));
|
||
|
const n = i - this.pos;
|
||
|
if (n > 0) {
|
||
|
this.push(this.buffer.substr(this.pos, n));
|
||
|
this.pos = i;
|
||
|
}
|
||
|
return n;
|
||
|
}
|
||
|
pushUntil(test) {
|
||
|
let i = this.pos;
|
||
|
let ch = this.buffer[i];
|
||
|
while (!test(ch))
|
||
|
ch = this.buffer[++i];
|
||
|
return this.pushToIndex(i, false);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Tracks newlines during parsing in order to provide an efficient API for
|
||
|
* determining the one-indexed `{ line, col }` position for any offset
|
||
|
* within the input.
|
||
|
*/
|
||
|
class LineCounter {
|
||
|
constructor() {
|
||
|
this.lineStarts = [];
|
||
|
/**
|
||
|
* Should be called in ascending order. Otherwise, call
|
||
|
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
|
||
|
*/
|
||
|
this.addNewLine = (offset) => this.lineStarts.push(offset);
|
||
|
/**
|
||
|
* Performs a binary search and returns the 1-indexed { line, col }
|
||
|
* position of `offset`. If `line === 0`, `addNewLine` has never been
|
||
|
* called or `offset` is before the first known newline.
|
||
|
*/
|
||
|
this.linePos = (offset) => {
|
||
|
let low = 0;
|
||
|
let high = this.lineStarts.length;
|
||
|
while (low < high) {
|
||
|
const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
|
||
|
if (this.lineStarts[mid] < offset)
|
||
|
low = mid + 1;
|
||
|
else
|
||
|
high = mid;
|
||
|
}
|
||
|
if (this.lineStarts[low] === offset)
|
||
|
return { line: low + 1, col: 1 };
|
||
|
if (low === 0)
|
||
|
return { line: 0, col: offset };
|
||
|
const start = this.lineStarts[low - 1];
|
||
|
return { line: low, col: offset - start + 1 };
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function includesToken(list, type) {
|
||
|
for (let i = 0; i < list.length; ++i)
|
||
|
if (list[i].type === type)
|
||
|
return true;
|
||
|
return false;
|
||
|
}
|
||
|
function includesNonEmpty(list) {
|
||
|
for (let i = 0; i < list.length; ++i) {
|
||
|
switch (list[i].type) {
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
break;
|
||
|
default:
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
function atFirstEmptyLineAfterComments(start) {
|
||
|
let hasComment = false;
|
||
|
for (let i = 0; i < start.length; ++i) {
|
||
|
switch (start[i].type) {
|
||
|
case 'space':
|
||
|
break;
|
||
|
case 'comment':
|
||
|
hasComment = true;
|
||
|
break;
|
||
|
case 'newline':
|
||
|
if (!hasComment)
|
||
|
return false;
|
||
|
break;
|
||
|
default:
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
if (hasComment) {
|
||
|
for (let i = start.length - 1; i >= 0; --i) {
|
||
|
switch (start[i].type) {
|
||
|
/* istanbul ignore next */
|
||
|
case 'space':
|
||
|
break;
|
||
|
case 'newline':
|
||
|
return true;
|
||
|
default:
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
function isFlowToken(token) {
|
||
|
switch (token === null || token === void 0 ? void 0 : token.type) {
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
case 'flow-collection':
|
||
|
return true;
|
||
|
default:
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
function getPrevProps(parent) {
|
||
|
switch (parent.type) {
|
||
|
case 'document':
|
||
|
return parent.start;
|
||
|
case 'block-map': {
|
||
|
const it = parent.items[parent.items.length - 1];
|
||
|
return it.sep || it.start;
|
||
|
}
|
||
|
case 'block-seq':
|
||
|
return parent.items[parent.items.length - 1].start;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default:
|
||
|
return [];
|
||
|
}
|
||
|
}
|
||
|
/** Note: May modify input array */
|
||
|
function getFirstKeyStartProps(prev) {
|
||
|
var _a;
|
||
|
if (prev.length === 0)
|
||
|
return [];
|
||
|
let i = prev.length;
|
||
|
loop: while (--i >= 0) {
|
||
|
switch (prev[i].type) {
|
||
|
case 'explicit-key-ind':
|
||
|
case 'map-value-ind':
|
||
|
case 'seq-item-ind':
|
||
|
case 'newline':
|
||
|
break loop;
|
||
|
}
|
||
|
}
|
||
|
while (((_a = prev[++i]) === null || _a === void 0 ? void 0 : _a.type) === 'space') {
|
||
|
/* loop */
|
||
|
}
|
||
|
return prev.splice(i, prev.length);
|
||
|
}
|
||
|
/**
|
||
|
* A YAML concrete syntax tree (CST) parser
|
||
|
*
|
||
|
* While the `parse()` method provides an API for parsing a source string
|
||
|
* directly, the parser may also be used with a user-provided lexer:
|
||
|
*
|
||
|
* ```ts
|
||
|
* const cst: Token[] = []
|
||
|
* const parser = new Parser(tok => cst.push(tok))
|
||
|
* const src: string = ...
|
||
|
*
|
||
|
* // The following would be equivalent to `parser.parse(src, false)`
|
||
|
* const lexer = new Lexer(parser.next)
|
||
|
* lexer.lex(src, false)
|
||
|
* parser.end()
|
||
|
* ```
|
||
|
*/
|
||
|
class Parser {
|
||
|
/**
|
||
|
* @param push - Called separately with each parsed token
|
||
|
* @param onNewLine - If defined, called separately with the start position of
|
||
|
* each new line (in `parse()`, including the start of input).
|
||
|
* @public
|
||
|
*/
|
||
|
constructor(push, onNewLine) {
|
||
|
/** If true, space and sequence indicators count as indentation */
|
||
|
this.atNewLine = true;
|
||
|
/** If true, next token is a scalar value */
|
||
|
this.atScalar = false;
|
||
|
/** Current indentation level */
|
||
|
this.indent = 0;
|
||
|
/** Current offset since the start of parsing */
|
||
|
this.offset = 0;
|
||
|
/** On the same line with a block map key */
|
||
|
this.onKeyLine = false;
|
||
|
/** Top indicates the node that's currently being built */
|
||
|
this.stack = [];
|
||
|
/** The source of the current token, set in parse() */
|
||
|
this.source = '';
|
||
|
/** The type of the current token, set in parse() */
|
||
|
this.type = '';
|
||
|
/**
|
||
|
* Advance the parser by the `source` of one lexical token. Bound to the
|
||
|
* Parser instance, so may be used directly as a callback function.
|
||
|
*/
|
||
|
this.next = (source) => {
|
||
|
this.source = source;
|
||
|
if (this.atScalar) {
|
||
|
this.atScalar = false;
|
||
|
this.step();
|
||
|
this.offset += source.length;
|
||
|
return;
|
||
|
}
|
||
|
const type = tokenType(source);
|
||
|
if (!type) {
|
||
|
const message = `Not a YAML token: ${source}`;
|
||
|
this.pop({ type: 'error', offset: this.offset, message, source });
|
||
|
this.offset += source.length;
|
||
|
}
|
||
|
else if (type === 'scalar') {
|
||
|
this.atNewLine = false;
|
||
|
this.atScalar = true;
|
||
|
this.type = 'scalar';
|
||
|
}
|
||
|
else {
|
||
|
this.type = type;
|
||
|
this.step();
|
||
|
switch (type) {
|
||
|
case 'newline':
|
||
|
this.atNewLine = true;
|
||
|
this.indent = 0;
|
||
|
if (this.onNewLine)
|
||
|
this.onNewLine(this.offset + source.length);
|
||
|
break;
|
||
|
case 'space':
|
||
|
if (this.atNewLine && source[0] === ' ')
|
||
|
this.indent += source.length;
|
||
|
break;
|
||
|
case 'explicit-key-ind':
|
||
|
case 'map-value-ind':
|
||
|
case 'seq-item-ind':
|
||
|
if (this.atNewLine)
|
||
|
this.indent += source.length;
|
||
|
break;
|
||
|
case 'doc-mode':
|
||
|
return;
|
||
|
default:
|
||
|
this.atNewLine = false;
|
||
|
}
|
||
|
this.offset += source.length;
|
||
|
}
|
||
|
};
|
||
|
// Must be defined after `next()`
|
||
|
this.lexer = new Lexer(this.next);
|
||
|
this.push = push;
|
||
|
this.onNewLine = onNewLine;
|
||
|
}
|
||
|
/**
|
||
|
* Parse `source` as a YAML stream, calling `push` with each directive,
|
||
|
* document and other structure as it is completely parsed. If `incomplete`,
|
||
|
* a part of the last line may be left as a buffer for the next call.
|
||
|
*
|
||
|
* Errors are not thrown, but pushed out as `{ type: 'error', message }` tokens.
|
||
|
* @public
|
||
|
*/
|
||
|
parse(source, incomplete = false) {
|
||
|
if (this.onNewLine && this.offset === 0)
|
||
|
this.onNewLine(0);
|
||
|
this.lexer.lex(source, incomplete);
|
||
|
if (!incomplete)
|
||
|
this.end();
|
||
|
}
|
||
|
/** Call at end of input to push out any remaining constructions */
|
||
|
end() {
|
||
|
while (this.stack.length > 0)
|
||
|
this.pop();
|
||
|
}
|
||
|
get sourceToken() {
|
||
|
return {
|
||
|
type: this.type,
|
||
|
indent: this.indent,
|
||
|
source: this.source
|
||
|
};
|
||
|
}
|
||
|
step() {
|
||
|
const top = this.peek(1);
|
||
|
if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
|
||
|
while (this.stack.length > 0)
|
||
|
this.pop();
|
||
|
this.stack.push({
|
||
|
type: 'doc-end',
|
||
|
offset: this.offset,
|
||
|
source: this.source
|
||
|
});
|
||
|
return;
|
||
|
}
|
||
|
if (!top)
|
||
|
return this.stream();
|
||
|
switch (top.type) {
|
||
|
case 'document':
|
||
|
return this.document(top);
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
return this.scalar(top);
|
||
|
case 'block-scalar':
|
||
|
return this.blockScalar(top);
|
||
|
case 'block-map':
|
||
|
return this.blockMap(top);
|
||
|
case 'block-seq':
|
||
|
return this.blockSequence(top);
|
||
|
case 'flow-collection':
|
||
|
return this.flowCollection(top);
|
||
|
case 'doc-end':
|
||
|
return this.documentEnd(top);
|
||
|
}
|
||
|
/* istanbul ignore next should not happen */
|
||
|
this.pop();
|
||
|
}
|
||
|
peek(n) {
|
||
|
return this.stack[this.stack.length - n];
|
||
|
}
|
||
|
pop(error) {
|
||
|
const token = error || this.stack.pop();
|
||
|
/* istanbul ignore if should not happen */
|
||
|
if (!token) {
|
||
|
const message = 'Tried to pop an empty stack';
|
||
|
this.push({ type: 'error', source: '', message });
|
||
|
}
|
||
|
else if (this.stack.length === 0) {
|
||
|
this.push(token);
|
||
|
}
|
||
|
else {
|
||
|
const top = this.peek(1);
|
||
|
// For these, parent indent is needed instead of own
|
||
|
if (token.type === 'block-scalar' || token.type === 'flow-collection')
|
||
|
token.indent = 'indent' in top ? top.indent : -1;
|
||
|
switch (top.type) {
|
||
|
case 'document':
|
||
|
top.value = token;
|
||
|
break;
|
||
|
case 'block-scalar':
|
||
|
top.props.push(token); // error
|
||
|
break;
|
||
|
case 'block-map': {
|
||
|
const it = top.items[top.items.length - 1];
|
||
|
if (it.value) {
|
||
|
top.items.push({ start: [], key: token, sep: [] });
|
||
|
this.onKeyLine = true;
|
||
|
return;
|
||
|
}
|
||
|
else if (it.sep) {
|
||
|
it.value = token;
|
||
|
}
|
||
|
else {
|
||
|
Object.assign(it, { key: token, sep: [] });
|
||
|
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
|
||
|
return;
|
||
|
}
|
||
|
break;
|
||
|
}
|
||
|
case 'block-seq': {
|
||
|
const it = top.items[top.items.length - 1];
|
||
|
if (it.value)
|
||
|
top.items.push({ start: [], value: token });
|
||
|
else
|
||
|
it.value = token;
|
||
|
break;
|
||
|
}
|
||
|
case 'flow-collection':
|
||
|
top.items.push(token);
|
||
|
break;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default:
|
||
|
this.pop();
|
||
|
this.pop(token);
|
||
|
}
|
||
|
if ((top.type === 'document' ||
|
||
|
top.type === 'block-map' ||
|
||
|
top.type === 'block-seq') &&
|
||
|
(token.type === 'block-map' || token.type === 'block-seq')) {
|
||
|
const last = token.items[token.items.length - 1];
|
||
|
if (last &&
|
||
|
!last.sep &&
|
||
|
!last.value &&
|
||
|
last.start.length > 0 &&
|
||
|
!includesNonEmpty(last.start) &&
|
||
|
(token.indent === 0 ||
|
||
|
last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
|
||
|
if (top.type === 'document')
|
||
|
top.end = last.start;
|
||
|
else
|
||
|
top.items.push({ start: last.start });
|
||
|
token.items.splice(-1, 1);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
stream() {
|
||
|
switch (this.type) {
|
||
|
case 'directive-line':
|
||
|
this.push({ type: 'directive', source: this.source });
|
||
|
return;
|
||
|
case 'byte-order-mark':
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
this.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'doc-mode':
|
||
|
case 'doc-start': {
|
||
|
const doc = {
|
||
|
type: 'document',
|
||
|
offset: this.offset,
|
||
|
start: []
|
||
|
};
|
||
|
if (this.type === 'doc-start')
|
||
|
doc.start.push(this.sourceToken);
|
||
|
this.stack.push(doc);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
this.push({
|
||
|
type: 'error',
|
||
|
offset: this.offset,
|
||
|
message: `Unexpected ${this.type} token in YAML stream`,
|
||
|
source: this.source
|
||
|
});
|
||
|
}
|
||
|
document(doc) {
|
||
|
if (doc.value)
|
||
|
return this.lineEnd(doc);
|
||
|
switch (this.type) {
|
||
|
case 'doc-start': {
|
||
|
if (includesNonEmpty(doc.start)) {
|
||
|
this.pop();
|
||
|
this.step();
|
||
|
}
|
||
|
else
|
||
|
doc.start.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
case 'anchor':
|
||
|
case 'tag':
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
doc.start.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
const bv = this.startBlockValue(doc);
|
||
|
if (bv)
|
||
|
this.stack.push(bv);
|
||
|
else {
|
||
|
this.push({
|
||
|
type: 'error',
|
||
|
offset: this.offset,
|
||
|
message: `Unexpected ${this.type} token in YAML document`,
|
||
|
source: this.source
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
scalar(scalar) {
|
||
|
if (this.type === 'map-value-ind') {
|
||
|
const prev = getPrevProps(this.peek(2));
|
||
|
const start = getFirstKeyStartProps(prev);
|
||
|
let sep;
|
||
|
if (scalar.end) {
|
||
|
sep = scalar.end;
|
||
|
sep.push(this.sourceToken);
|
||
|
delete scalar.end;
|
||
|
}
|
||
|
else
|
||
|
sep = [this.sourceToken];
|
||
|
const map = {
|
||
|
type: 'block-map',
|
||
|
offset: scalar.offset,
|
||
|
indent: scalar.indent,
|
||
|
items: [{ start, key: scalar, sep }]
|
||
|
};
|
||
|
this.onKeyLine = true;
|
||
|
this.stack[this.stack.length - 1] = map;
|
||
|
}
|
||
|
else
|
||
|
this.lineEnd(scalar);
|
||
|
}
|
||
|
blockScalar(scalar) {
|
||
|
switch (this.type) {
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
scalar.props.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'scalar':
|
||
|
scalar.source = this.source;
|
||
|
// block-scalar source includes trailing newline
|
||
|
this.atNewLine = true;
|
||
|
this.indent = 0;
|
||
|
if (this.onNewLine) {
|
||
|
let nl = this.source.indexOf('\n') + 1;
|
||
|
while (nl !== 0) {
|
||
|
this.onNewLine(this.offset + nl);
|
||
|
nl = this.source.indexOf('\n', nl) + 1;
|
||
|
}
|
||
|
}
|
||
|
this.pop();
|
||
|
break;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default:
|
||
|
this.pop();
|
||
|
this.step();
|
||
|
}
|
||
|
}
|
||
|
blockMap(map) {
|
||
|
var _a;
|
||
|
const it = map.items[map.items.length - 1];
|
||
|
// it.sep is true-ish if pair already has key or : separator
|
||
|
switch (this.type) {
|
||
|
case 'newline':
|
||
|
this.onKeyLine = false;
|
||
|
if (!it.sep && atFirstEmptyLineAfterComments(it.start)) {
|
||
|
const prev = map.items[map.items.length - 2];
|
||
|
const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
|
||
|
if (Array.isArray(end)) {
|
||
|
Array.prototype.push.apply(end, it.start);
|
||
|
it.start = [this.sourceToken];
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
// fallthrough
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
if (it.value)
|
||
|
map.items.push({ start: [this.sourceToken] });
|
||
|
else if (it.sep)
|
||
|
it.sep.push(this.sourceToken);
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
if (this.indent >= map.indent) {
|
||
|
const atNextItem = !this.onKeyLine &&
|
||
|
this.indent === map.indent &&
|
||
|
(it.sep || includesNonEmpty(it.start));
|
||
|
switch (this.type) {
|
||
|
case 'anchor':
|
||
|
case 'tag':
|
||
|
if (atNextItem || it.value) {
|
||
|
map.items.push({ start: [this.sourceToken] });
|
||
|
this.onKeyLine = true;
|
||
|
}
|
||
|
else if (it.sep)
|
||
|
it.sep.push(this.sourceToken);
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'explicit-key-ind':
|
||
|
if (!it.sep && !includesToken(it.start, 'explicit-key-ind'))
|
||
|
it.start.push(this.sourceToken);
|
||
|
else if (atNextItem || it.value)
|
||
|
map.items.push({ start: [this.sourceToken] });
|
||
|
else
|
||
|
this.stack.push({
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start: [this.sourceToken] }]
|
||
|
});
|
||
|
this.onKeyLine = true;
|
||
|
return;
|
||
|
case 'map-value-ind':
|
||
|
if (!it.sep)
|
||
|
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||
|
else if (it.value ||
|
||
|
(atNextItem && !includesToken(it.start, 'explicit-key-ind')))
|
||
|
map.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||
|
else if (includesToken(it.sep, 'map-value-ind'))
|
||
|
this.stack.push({
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start: [], key: null, sep: [this.sourceToken] }]
|
||
|
});
|
||
|
else if (includesToken(it.start, 'explicit-key-ind') &&
|
||
|
isFlowToken(it.key) &&
|
||
|
!includesToken(it.sep, 'newline')) {
|
||
|
const start = getFirstKeyStartProps(it.start);
|
||
|
const key = it.key;
|
||
|
const sep = it.sep;
|
||
|
sep.push(this.sourceToken);
|
||
|
// @ts-ignore type guard is wrong here
|
||
|
delete it.key, delete it.sep;
|
||
|
this.stack.push({
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start, key, sep }]
|
||
|
});
|
||
|
}
|
||
|
else
|
||
|
it.sep.push(this.sourceToken);
|
||
|
this.onKeyLine = true;
|
||
|
return;
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar': {
|
||
|
const fs = this.flowScalar(this.type);
|
||
|
if (atNextItem || it.value) {
|
||
|
map.items.push({ start: [], key: fs, sep: [] });
|
||
|
this.onKeyLine = true;
|
||
|
}
|
||
|
else if (it.sep) {
|
||
|
this.stack.push(fs);
|
||
|
}
|
||
|
else {
|
||
|
Object.assign(it, { key: fs, sep: [] });
|
||
|
this.onKeyLine = true;
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
default: {
|
||
|
const bv = this.startBlockValue(map);
|
||
|
if (bv) {
|
||
|
if (atNextItem &&
|
||
|
bv.type !== 'block-seq' &&
|
||
|
includesToken(it.start, 'explicit-key-ind'))
|
||
|
map.items.push({ start: [] });
|
||
|
this.stack.push(bv);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
this.pop();
|
||
|
this.step();
|
||
|
}
|
||
|
blockSequence(seq) {
|
||
|
var _a;
|
||
|
const it = seq.items[seq.items.length - 1];
|
||
|
switch (this.type) {
|
||
|
case 'newline':
|
||
|
if (!it.value && atFirstEmptyLineAfterComments(it.start)) {
|
||
|
const prev = seq.items[seq.items.length - 2];
|
||
|
const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
|
||
|
if (Array.isArray(end)) {
|
||
|
Array.prototype.push.apply(end, it.start);
|
||
|
it.start = [this.sourceToken];
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
// fallthrough
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
if (it.value)
|
||
|
seq.items.push({ start: [this.sourceToken] });
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'anchor':
|
||
|
case 'tag':
|
||
|
if (it.value || this.indent <= seq.indent)
|
||
|
break;
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'seq-item-ind':
|
||
|
if (this.indent !== seq.indent)
|
||
|
break;
|
||
|
if (it.value || includesToken(it.start, 'seq-item-ind'))
|
||
|
seq.items.push({ start: [this.sourceToken] });
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
if (this.indent > seq.indent) {
|
||
|
const bv = this.startBlockValue(seq);
|
||
|
if (bv)
|
||
|
return this.stack.push(bv);
|
||
|
}
|
||
|
this.pop();
|
||
|
this.step();
|
||
|
}
|
||
|
flowCollection(fc) {
|
||
|
if (this.type === 'flow-error-end') {
|
||
|
let top;
|
||
|
do {
|
||
|
this.pop();
|
||
|
top = this.peek(1);
|
||
|
} while (top && top.type === 'flow-collection');
|
||
|
}
|
||
|
else if (fc.end.length === 0) {
|
||
|
switch (this.type) {
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
case 'comma':
|
||
|
case 'explicit-key-ind':
|
||
|
case 'map-value-ind':
|
||
|
case 'anchor':
|
||
|
case 'tag':
|
||
|
fc.items.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
fc.items.push(this.flowScalar(this.type));
|
||
|
return;
|
||
|
case 'flow-map-end':
|
||
|
case 'flow-seq-end':
|
||
|
fc.end.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
const bv = this.startBlockValue(fc);
|
||
|
/* istanbul ignore else should not happen */
|
||
|
if (bv)
|
||
|
return this.stack.push(bv);
|
||
|
else {
|
||
|
this.pop();
|
||
|
this.step();
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
const parent = this.peek(2);
|
||
|
if (parent.type === 'block-map' &&
|
||
|
(this.type === 'map-value-ind' ||
|
||
|
(this.type === 'newline' &&
|
||
|
!parent.items[parent.items.length - 1].sep))) {
|
||
|
this.pop();
|
||
|
this.step();
|
||
|
}
|
||
|
else if (this.type === 'map-value-ind' &&
|
||
|
parent.type !== 'flow-collection') {
|
||
|
const prev = getPrevProps(parent);
|
||
|
const start = getFirstKeyStartProps(prev);
|
||
|
const sep = fc.end.splice(1, fc.end.length);
|
||
|
sep.push(this.sourceToken);
|
||
|
const map = {
|
||
|
type: 'block-map',
|
||
|
offset: fc.offset,
|
||
|
indent: fc.indent,
|
||
|
items: [{ start, key: fc, sep }]
|
||
|
};
|
||
|
this.onKeyLine = true;
|
||
|
this.stack[this.stack.length - 1] = map;
|
||
|
}
|
||
|
else {
|
||
|
this.lineEnd(fc);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
flowScalar(type) {
|
||
|
if (this.onNewLine) {
|
||
|
let nl = this.source.indexOf('\n') + 1;
|
||
|
while (nl !== 0) {
|
||
|
this.onNewLine(this.offset + nl);
|
||
|
nl = this.source.indexOf('\n', nl) + 1;
|
||
|
}
|
||
|
}
|
||
|
return {
|
||
|
type,
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
source: this.source
|
||
|
};
|
||
|
}
|
||
|
startBlockValue(parent) {
|
||
|
switch (this.type) {
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
return this.flowScalar(this.type);
|
||
|
case 'block-scalar-header':
|
||
|
return {
|
||
|
type: 'block-scalar',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
props: [this.sourceToken]
|
||
|
};
|
||
|
case 'flow-map-start':
|
||
|
case 'flow-seq-start':
|
||
|
return {
|
||
|
type: 'flow-collection',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
start: this.sourceToken,
|
||
|
items: [],
|
||
|
end: []
|
||
|
};
|
||
|
case 'seq-item-ind':
|
||
|
return {
|
||
|
type: 'block-seq',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start: [this.sourceToken] }]
|
||
|
};
|
||
|
case 'explicit-key-ind':
|
||
|
this.onKeyLine = true;
|
||
|
return {
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start: [this.sourceToken] }]
|
||
|
};
|
||
|
case 'map-value-ind': {
|
||
|
this.onKeyLine = true;
|
||
|
const prev = getPrevProps(parent);
|
||
|
const start = getFirstKeyStartProps(prev);
|
||
|
return {
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
return null;
|
||
|
}
|
||
|
documentEnd(docEnd) {
|
||
|
if (this.type !== 'doc-mode') {
|
||
|
if (docEnd.end)
|
||
|
docEnd.end.push(this.sourceToken);
|
||
|
else
|
||
|
docEnd.end = [this.sourceToken];
|
||
|
if (this.type === 'newline')
|
||
|
this.pop();
|
||
|
}
|
||
|
}
|
||
|
lineEnd(token) {
|
||
|
switch (this.type) {
|
||
|
case 'comma':
|
||
|
case 'doc-start':
|
||
|
case 'doc-end':
|
||
|
case 'flow-seq-end':
|
||
|
case 'flow-map-end':
|
||
|
case 'map-value-ind':
|
||
|
this.pop();
|
||
|
this.step();
|
||
|
break;
|
||
|
case 'newline':
|
||
|
this.onKeyLine = false;
|
||
|
// fallthrough
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
default:
|
||
|
// all other values are errors
|
||
|
if (token.end)
|
||
|
token.end.push(this.sourceToken);
|
||
|
else
|
||
|
token.end = [this.sourceToken];
|
||
|
if (this.type === 'newline')
|
||
|
this.pop();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function parseOptions(options) {
|
||
|
const prettyErrors = !options || options.prettyErrors !== false;
|
||
|
const lineCounter = (options && options.lineCounter) ||
|
||
|
(prettyErrors && new LineCounter()) ||
|
||
|
null;
|
||
|
return { lineCounter, prettyErrors };
|
||
|
}
|
||
|
/** Parse an input string into a single YAML.Document */
|
||
|
function parseDocument(source, options) {
|
||
|
const { lineCounter, prettyErrors } = parseOptions(options);
|
||
|
// `doc` is always set by compose.end(true) at the very latest
|
||
|
let doc = null;
|
||
|
const composer = new Composer(_doc => {
|
||
|
if (!doc)
|
||
|
doc = _doc;
|
||
|
else if (doc.options.logLevel !== 'silent') {
|
||
|
const errMsg = 'Source contains multiple documents; please use YAML.parseAllDocuments()';
|
||
|
doc.errors.push(new YAMLParseError(_doc.range[0], errMsg));
|
||
|
}
|
||
|
}, options);
|
||
|
const parser = new Parser(composer.next, lineCounter === null || lineCounter === void 0 ? void 0 : lineCounter.addNewLine);
|
||
|
parser.parse(source);
|
||
|
composer.end(true, source.length);
|
||
|
if (prettyErrors && lineCounter) {
|
||
|
doc.errors.forEach(prettifyError(source, lineCounter));
|
||
|
doc.warnings.forEach(prettifyError(source, lineCounter));
|
||
|
}
|
||
|
return doc;
|
||
|
}
|
||
|
function parse$1(src, reviver, options) {
|
||
|
let _reviver = undefined;
|
||
|
if (typeof reviver === 'function') {
|
||
|
_reviver = reviver;
|
||
|
}
|
||
|
else if (options === undefined && reviver && typeof reviver === 'object') {
|
||
|
options = reviver;
|
||
|
}
|
||
|
const doc = parseDocument(src, options);
|
||
|
if (!doc)
|
||
|
return null;
|
||
|
doc.warnings.forEach(warning => warn(doc.options.logLevel, warning));
|
||
|
if (doc.errors.length > 0) {
|
||
|
if (doc.options.logLevel !== 'silent')
|
||
|
throw doc.errors[0];
|
||
|
else
|
||
|
doc.errors = [];
|
||
|
}
|
||
|
return doc.toJS(Object.assign({ reviver: _reviver }, options));
|
||
|
}
|
||
|
function stringify(value, replacer, options) {
|
||
|
let _replacer = null;
|
||
|
if (typeof replacer === 'function' || Array.isArray(replacer)) {
|
||
|
_replacer = replacer;
|
||
|
}
|
||
|
else if (options === undefined && replacer) {
|
||
|
options = replacer;
|
||
|
}
|
||
|
if (typeof options === 'string')
|
||
|
options = options.length;
|
||
|
if (typeof options === 'number') {
|
||
|
const indent = Math.round(options);
|
||
|
options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
|
||
|
}
|
||
|
if (value === undefined) {
|
||
|
const { keepUndefined } = options || replacer || {};
|
||
|
if (!keepUndefined)
|
||
|
return undefined;
|
||
|
}
|
||
|
return new Document(value, _replacer, options).toString(options);
|
||
|
}
|
||
|
|
||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||
|
//
|
||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||
|
// copy of this software and associated documentation files (the
|
||
|
// "Software"), to deal in the Software without restriction, including
|
||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||
|
// persons to whom the Software is furnished to do so, subject to the
|
||
|
// following conditions:
|
||
|
//
|
||
|
// The above copyright notice and this permission notice shall be included
|
||
|
// in all copies or substantial portions of the Software.
|
||
|
//
|
||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||
|
|
||
|
|
||
|
var isWindows$1 = process.platform === 'win32';
|
||
|
|
||
|
|
||
|
// JavaScript implementation of realpath, ported from node pre-v6
|
||
|
|
||
|
var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
|
||
|
|
||
|
function rethrow() {
|
||
|
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
|
||
|
// is fairly slow to generate.
|
||
|
var callback;
|
||
|
if (DEBUG) {
|
||
|
var backtrace = new Error;
|
||
|
callback = debugCallback;
|
||
|
} else
|
||
|
callback = missingCallback;
|
||
|
|
||
|
return callback;
|
||
|
|
||
|
function debugCallback(err) {
|
||
|
if (err) {
|
||
|
backtrace.message = err.message;
|
||
|
err = backtrace;
|
||
|
missingCallback(err);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function missingCallback(err) {
|
||
|
if (err) {
|
||
|
if (process.throwDeprecation)
|
||
|
throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
|
||
|
else if (!process.noDeprecation) {
|
||
|
var msg = 'fs: missing callback ' + (err.stack || err.message);
|
||
|
if (process.traceDeprecation)
|
||
|
console.trace(msg);
|
||
|
else
|
||
|
console.error(msg);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function maybeCallback(cb) {
|
||
|
return typeof cb === 'function' ? cb : rethrow();
|
||
|
}
|
||
|
|
||
|
path__default['default'].normalize;
|
||
|
|
||
|
// Regexp that finds the next partion of a (partial) path
|
||
|
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
|
||
|
if (isWindows$1) {
|
||
|
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
|
||
|
} else {
|
||
|
var nextPartRe = /(.*?)(?:[\/]+|$)/g;
|
||
|
}
|
||
|
|
||
|
// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
|
||
|
if (isWindows$1) {
|
||
|
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
|
||
|
} else {
|
||
|
var splitRootRe = /^[\/]*/;
|
||
|
}
|
||
|
|
||
|
var realpathSync$1 = function realpathSync(p, cache) {
|
||
|
// make p is absolute
|
||
|
p = path__default['default'].resolve(p);
|
||
|
|
||
|
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
|
||
|
return cache[p];
|
||
|
}
|
||
|
|
||
|
var original = p,
|
||
|
seenLinks = {},
|
||
|
knownHard = {};
|
||
|
|
||
|
// current character position in p
|
||
|
var pos;
|
||
|
// the partial path so far, including a trailing slash if any
|
||
|
var current;
|
||
|
// the partial path without a trailing slash (except when pointing at a root)
|
||
|
var base;
|
||
|
// the partial path scanned in the previous round, with slash
|
||
|
var previous;
|
||
|
|
||
|
start();
|
||
|
|
||
|
function start() {
|
||
|
// Skip over roots
|
||
|
var m = splitRootRe.exec(p);
|
||
|
pos = m[0].length;
|
||
|
current = m[0];
|
||
|
base = m[0];
|
||
|
previous = '';
|
||
|
|
||
|
// On windows, check that the root exists. On unix there is no need.
|
||
|
if (isWindows$1 && !knownHard[base]) {
|
||
|
fs__default['default'].lstatSync(base);
|
||
|
knownHard[base] = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// walk down the path, swapping out linked pathparts for their real
|
||
|
// values
|
||
|
// NB: p.length changes.
|
||
|
while (pos < p.length) {
|
||
|
// find the next part
|
||
|
nextPartRe.lastIndex = pos;
|
||
|
var result = nextPartRe.exec(p);
|
||
|
previous = current;
|
||
|
current += result[0];
|
||
|
base = previous + result[1];
|
||
|
pos = nextPartRe.lastIndex;
|
||
|
|
||
|
// continue if not a symlink
|
||
|
if (knownHard[base] || (cache && cache[base] === base)) {
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
var resolvedLink;
|
||
|
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
|
||
|
// some known symbolic link. no need to stat again.
|
||
|
resolvedLink = cache[base];
|
||
|
} else {
|
||
|
var stat = fs__default['default'].lstatSync(base);
|
||
|
if (!stat.isSymbolicLink()) {
|
||
|
knownHard[base] = true;
|
||
|
if (cache) cache[base] = base;
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
// read the link if it wasn't read before
|
||
|
// dev/ino always return 0 on windows, so skip the check.
|
||
|
var linkTarget = null;
|
||
|
if (!isWindows$1) {
|
||
|
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
|
||
|
if (seenLinks.hasOwnProperty(id)) {
|
||
|
linkTarget = seenLinks[id];
|
||
|
}
|
||
|
}
|
||
|
if (linkTarget === null) {
|
||
|
fs__default['default'].statSync(base);
|
||
|
linkTarget = fs__default['default'].readlinkSync(base);
|
||
|
}
|
||
|
resolvedLink = path__default['default'].resolve(previous, linkTarget);
|
||
|
// track this, if given a cache.
|
||
|
if (cache) cache[base] = resolvedLink;
|
||
|
if (!isWindows$1) seenLinks[id] = linkTarget;
|
||
|
}
|
||
|
|
||
|
// resolve the link, then start over
|
||
|
p = path__default['default'].resolve(resolvedLink, p.slice(pos));
|
||
|
start();
|
||
|
}
|
||
|
|
||
|
if (cache) cache[original] = p;
|
||
|
|
||
|
return p;
|
||
|
};
|
||
|
|
||
|
|
||
|
var realpath$1 = function realpath(p, cache, cb) {
|
||
|
if (typeof cb !== 'function') {
|
||
|
cb = maybeCallback(cache);
|
||
|
cache = null;
|
||
|
}
|
||
|
|
||
|
// make p is absolute
|
||
|
p = path__default['default'].resolve(p);
|
||
|
|
||
|
if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
|
||
|
return process.nextTick(cb.bind(null, null, cache[p]));
|
||
|
}
|
||
|
|
||
|
var original = p,
|
||
|
seenLinks = {},
|
||
|
knownHard = {};
|
||
|
|
||
|
// current character position in p
|
||
|
var pos;
|
||
|
// the partial path so far, including a trailing slash if any
|
||
|
var current;
|
||
|
// the partial path without a trailing slash (except when pointing at a root)
|
||
|
var base;
|
||
|
// the partial path scanned in the previous round, with slash
|
||
|
var previous;
|
||
|
|
||
|
start();
|
||
|
|
||
|
function start() {
|
||
|
// Skip over roots
|
||
|
var m = splitRootRe.exec(p);
|
||
|
pos = m[0].length;
|
||
|
current = m[0];
|
||
|
base = m[0];
|
||
|
previous = '';
|
||
|
|
||
|
// On windows, check that the root exists. On unix there is no need.
|
||
|
if (isWindows$1 && !knownHard[base]) {
|
||
|
fs__default['default'].lstat(base, function(err) {
|
||
|
if (err) return cb(err);
|
||
|
knownHard[base] = true;
|
||
|
LOOP();
|
||
|
});
|
||
|
} else {
|
||
|
process.nextTick(LOOP);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// walk down the path, swapping out linked pathparts for their real
|
||
|
// values
|
||
|
function LOOP() {
|
||
|
// stop if scanned past end of path
|
||
|
if (pos >= p.length) {
|
||
|
if (cache) cache[original] = p;
|
||
|
return cb(null, p);
|
||
|
}
|
||
|
|
||
|
// find the next part
|
||
|
nextPartRe.lastIndex = pos;
|
||
|
var result = nextPartRe.exec(p);
|
||
|
previous = current;
|
||
|
current += result[0];
|
||
|
base = previous + result[1];
|
||
|
pos = nextPartRe.lastIndex;
|
||
|
|
||
|
// continue if not a symlink
|
||
|
if (knownHard[base] || (cache && cache[base] === base)) {
|
||
|
return process.nextTick(LOOP);
|
||
|
}
|
||
|
|
||
|
if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
|
||
|
// known symbolic link. no need to stat again.
|
||
|
return gotResolvedLink(cache[base]);
|
||
|
}
|
||
|
|
||
|
return fs__default['default'].lstat(base, gotStat);
|
||
|
}
|
||
|
|
||
|
function gotStat(err, stat) {
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
// if not a symlink, skip to the next path part
|
||
|
if (!stat.isSymbolicLink()) {
|
||
|
knownHard[base] = true;
|
||
|
if (cache) cache[base] = base;
|
||
|
return process.nextTick(LOOP);
|
||
|
}
|
||
|
|
||
|
// stat & read the link if not read before
|
||
|
// call gotTarget as soon as the link target is known
|
||
|
// dev/ino always return 0 on windows, so skip the check.
|
||
|
if (!isWindows$1) {
|
||
|
var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
|
||
|
if (seenLinks.hasOwnProperty(id)) {
|
||
|
return gotTarget(null, seenLinks[id], base);
|
||
|
}
|
||
|
}
|
||
|
fs__default['default'].stat(base, function(err) {
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
fs__default['default'].readlink(base, function(err, target) {
|
||
|
if (!isWindows$1) seenLinks[id] = target;
|
||
|
gotTarget(err, target);
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function gotTarget(err, target, base) {
|
||
|
if (err) return cb(err);
|
||
|
|
||
|
var resolvedLink = path__default['default'].resolve(previous, target);
|
||
|
if (cache) cache[base] = resolvedLink;
|
||
|
gotResolvedLink(resolvedLink);
|
||
|
}
|
||
|
|
||
|
function gotResolvedLink(resolvedLink) {
|
||
|
// resolve the link, then start over
|
||
|
p = path__default['default'].resolve(resolvedLink, p.slice(pos));
|
||
|
start();
|
||
|
}
|
||
|
};
|
||
|
|
||
|
var old = {
|
||
|
realpathSync: realpathSync$1,
|
||
|
realpath: realpath$1
|
||
|
};
|
||
|
|
||
|
var fs_realpath = realpath;
|
||
|
realpath.realpath = realpath;
|
||
|
realpath.sync = realpathSync;
|
||
|
realpath.realpathSync = realpathSync;
|
||
|
realpath.monkeypatch = monkeypatch;
|
||
|
realpath.unmonkeypatch = unmonkeypatch;
|
||
|
|
||
|
|
||
|
var origRealpath = fs__default['default'].realpath;
|
||
|
var origRealpathSync = fs__default['default'].realpathSync;
|
||
|
|
||
|
var version = process.version;
|
||
|
var ok = /^v[0-5]\./.test(version);
|
||
|
|
||
|
|
||
|
function newError (er) {
|
||
|
return er && er.syscall === 'realpath' && (
|
||
|
er.code === 'ELOOP' ||
|
||
|
er.code === 'ENOMEM' ||
|
||
|
er.code === 'ENAMETOOLONG'
|
||
|
)
|
||
|
}
|
||
|
|
||
|
function realpath (p, cache, cb) {
|
||
|
if (ok) {
|
||
|
return origRealpath(p, cache, cb)
|
||
|
}
|
||
|
|
||
|
if (typeof cache === 'function') {
|
||
|
cb = cache;
|
||
|
cache = null;
|
||
|
}
|
||
|
origRealpath(p, cache, function (er, result) {
|
||
|
if (newError(er)) {
|
||
|
old.realpath(p, cache, cb);
|
||
|
} else {
|
||
|
cb(er, result);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function realpathSync (p, cache) {
|
||
|
if (ok) {
|
||
|
return origRealpathSync(p, cache)
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
return origRealpathSync(p, cache)
|
||
|
} catch (er) {
|
||
|
if (newError(er)) {
|
||
|
return old.realpathSync(p, cache)
|
||
|
} else {
|
||
|
throw er
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function monkeypatch () {
|
||
|
fs__default['default'].realpath = realpath;
|
||
|
fs__default['default'].realpathSync = realpathSync;
|
||
|
}
|
||
|
|
||
|
function unmonkeypatch () {
|
||
|
fs__default['default'].realpath = origRealpath;
|
||
|
fs__default['default'].realpathSync = origRealpathSync;
|
||
|
}
|
||
|
|
||
|
var concatMap = function (xs, fn) {
|
||
|
var res = [];
|
||
|
for (var i = 0; i < xs.length; i++) {
|
||
|
var x = fn(xs[i], i);
|
||
|
if (isArray(x)) res.push.apply(res, x);
|
||
|
else res.push(x);
|
||
|
}
|
||
|
return res;
|
||
|
};
|
||
|
|
||
|
var isArray = Array.isArray || function (xs) {
|
||
|
return Object.prototype.toString.call(xs) === '[object Array]';
|
||
|
};
|
||
|
|
||
|
var balancedMatch = balanced;
|
||
|
function balanced(a, b, str) {
|
||
|
if (a instanceof RegExp) a = maybeMatch(a, str);
|
||
|
if (b instanceof RegExp) b = maybeMatch(b, str);
|
||
|
|
||
|
var r = range(a, b, str);
|
||
|
|
||
|
return r && {
|
||
|
start: r[0],
|
||
|
end: r[1],
|
||
|
pre: str.slice(0, r[0]),
|
||
|
body: str.slice(r[0] + a.length, r[1]),
|
||
|
post: str.slice(r[1] + b.length)
|
||
|
};
|
||
|
}
|
||
|
|
||
|
function maybeMatch(reg, str) {
|
||
|
var m = str.match(reg);
|
||
|
return m ? m[0] : null;
|
||
|
}
|
||
|
|
||
|
balanced.range = range;
|
||
|
function range(a, b, str) {
|
||
|
var begs, beg, left, right, result;
|
||
|
var ai = str.indexOf(a);
|
||
|
var bi = str.indexOf(b, ai + 1);
|
||
|
var i = ai;
|
||
|
|
||
|
if (ai >= 0 && bi > 0) {
|
||
|
if(a===b) {
|
||
|
return [ai, bi];
|
||
|
}
|
||
|
begs = [];
|
||
|
left = str.length;
|
||
|
|
||
|
while (i >= 0 && !result) {
|
||
|
if (i == ai) {
|
||
|
begs.push(i);
|
||
|
ai = str.indexOf(a, i + 1);
|
||
|
} else if (begs.length == 1) {
|
||
|
result = [ begs.pop(), bi ];
|
||
|
} else {
|
||
|
beg = begs.pop();
|
||
|
if (beg < left) {
|
||
|
left = beg;
|
||
|
right = bi;
|
||
|
}
|
||
|
|
||
|
bi = str.indexOf(b, i + 1);
|
||
|
}
|
||
|
|
||
|
i = ai < bi && ai >= 0 ? ai : bi;
|
||
|
}
|
||
|
|
||
|
if (begs.length) {
|
||
|
result = [ left, right ];
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return result;
|
||
|
}
|
||
|
|
||
|
var braceExpansion = expandTop;
|
||
|
|
||
|
var escSlash = '\0SLASH'+Math.random()+'\0';
|
||
|
var escOpen = '\0OPEN'+Math.random()+'\0';
|
||
|
var escClose = '\0CLOSE'+Math.random()+'\0';
|
||
|
var escComma = '\0COMMA'+Math.random()+'\0';
|
||
|
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
||
|
|
||
|
function numeric(str) {
|
||
|
return parseInt(str, 10) == str
|
||
|
? parseInt(str, 10)
|
||
|
: str.charCodeAt(0);
|
||
|
}
|
||
|
|
||
|
function escapeBraces(str) {
|
||
|
return str.split('\\\\').join(escSlash)
|
||
|
.split('\\{').join(escOpen)
|
||
|
.split('\\}').join(escClose)
|
||
|
.split('\\,').join(escComma)
|
||
|
.split('\\.').join(escPeriod);
|
||
|
}
|
||
|
|
||
|
function unescapeBraces(str) {
|
||
|
return str.split(escSlash).join('\\')
|
||
|
.split(escOpen).join('{')
|
||
|
.split(escClose).join('}')
|
||
|
.split(escComma).join(',')
|
||
|
.split(escPeriod).join('.');
|
||
|
}
|
||
|
|
||
|
|
||
|
// Basically just str.split(","), but handling cases
|
||
|
// where we have nested braced sections, which should be
|
||
|
// treated as individual members, like {a,{b,c},d}
|
||
|
function parseCommaParts(str) {
|
||
|
if (!str)
|
||
|
return [''];
|
||
|
|
||
|
var parts = [];
|
||
|
var m = balancedMatch('{', '}', str);
|
||
|
|
||
|
if (!m)
|
||
|
return str.split(',');
|
||
|
|
||
|
var pre = m.pre;
|
||
|
var body = m.body;
|
||
|
var post = m.post;
|
||
|
var p = pre.split(',');
|
||
|
|
||
|
p[p.length-1] += '{' + body + '}';
|
||
|
var postParts = parseCommaParts(post);
|
||
|
if (post.length) {
|
||
|
p[p.length-1] += postParts.shift();
|
||
|
p.push.apply(p, postParts);
|
||
|
}
|
||
|
|
||
|
parts.push.apply(parts, p);
|
||
|
|
||
|
return parts;
|
||
|
}
|
||
|
|
||
|
function expandTop(str) {
|
||
|
if (!str)
|
||
|
return [];
|
||
|
|
||
|
// I don't know why Bash 4.3 does this, but it does.
|
||
|
// Anything starting with {} will have the first two bytes preserved
|
||
|
// but *only* at the top level, so {},a}b will not expand to anything,
|
||
|
// but a{},b}c will be expanded to [a}c,abc].
|
||
|
// One could argue that this is a bug in Bash, but since the goal of
|
||
|
// this module is to match Bash's rules, we escape a leading {}
|
||
|
if (str.substr(0, 2) === '{}') {
|
||
|
str = '\\{\\}' + str.substr(2);
|
||
|
}
|
||
|
|
||
|
return expand(escapeBraces(str), true).map(unescapeBraces);
|
||
|
}
|
||
|
|
||
|
function embrace(str) {
|
||
|
return '{' + str + '}';
|
||
|
}
|
||
|
function isPadded(el) {
|
||
|
return /^-?0\d/.test(el);
|
||
|
}
|
||
|
|
||
|
function lte(i, y) {
|
||
|
return i <= y;
|
||
|
}
|
||
|
function gte(i, y) {
|
||
|
return i >= y;
|
||
|
}
|
||
|
|
||
|
function expand(str, isTop) {
|
||
|
var expansions = [];
|
||
|
|
||
|
var m = balancedMatch('{', '}', str);
|
||
|
if (!m || /\$$/.test(m.pre)) return [str];
|
||
|
|
||
|
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||
|
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||
|
var isSequence = isNumericSequence || isAlphaSequence;
|
||
|
var isOptions = m.body.indexOf(',') >= 0;
|
||
|
if (!isSequence && !isOptions) {
|
||
|
// {a},b}
|
||
|
if (m.post.match(/,.*\}/)) {
|
||
|
str = m.pre + '{' + m.body + escClose + m.post;
|
||
|
return expand(str);
|
||
|
}
|
||
|
return [str];
|
||
|
}
|
||
|
|
||
|
var n;
|
||
|
if (isSequence) {
|
||
|
n = m.body.split(/\.\./);
|
||
|
} else {
|
||
|
n = parseCommaParts(m.body);
|
||
|
if (n.length === 1) {
|
||
|
// x{{a,b}}y ==> x{a}y x{b}y
|
||
|
n = expand(n[0], false).map(embrace);
|
||
|
if (n.length === 1) {
|
||
|
var post = m.post.length
|
||
|
? expand(m.post, false)
|
||
|
: [''];
|
||
|
return post.map(function(p) {
|
||
|
return m.pre + n[0] + p;
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// at this point, n is the parts, and we know it's not a comma set
|
||
|
// with a single entry.
|
||
|
|
||
|
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
||
|
var pre = m.pre;
|
||
|
var post = m.post.length
|
||
|
? expand(m.post, false)
|
||
|
: [''];
|
||
|
|
||
|
var N;
|
||
|
|
||
|
if (isSequence) {
|
||
|
var x = numeric(n[0]);
|
||
|
var y = numeric(n[1]);
|
||
|
var width = Math.max(n[0].length, n[1].length);
|
||
|
var incr = n.length == 3
|
||
|
? Math.abs(numeric(n[2]))
|
||
|
: 1;
|
||
|
var test = lte;
|
||
|
var reverse = y < x;
|
||
|
if (reverse) {
|
||
|
incr *= -1;
|
||
|
test = gte;
|
||
|
}
|
||
|
var pad = n.some(isPadded);
|
||
|
|
||
|
N = [];
|
||
|
|
||
|
for (var i = x; test(i, y); i += incr) {
|
||
|
var c;
|
||
|
if (isAlphaSequence) {
|
||
|
c = String.fromCharCode(i);
|
||
|
if (c === '\\')
|
||
|
c = '';
|
||
|
} else {
|
||
|
c = String(i);
|
||
|
if (pad) {
|
||
|
var need = width - c.length;
|
||
|
if (need > 0) {
|
||
|
var z = new Array(need + 1).join('0');
|
||
|
if (i < 0)
|
||
|
c = '-' + z + c.slice(1);
|
||
|
else
|
||
|
c = z + c;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
N.push(c);
|
||
|
}
|
||
|
} else {
|
||
|
N = concatMap(n, function(el) { return expand(el, false) });
|
||
|
}
|
||
|
|
||
|
for (var j = 0; j < N.length; j++) {
|
||
|
for (var k = 0; k < post.length; k++) {
|
||
|
var expansion = pre + N[j] + post[k];
|
||
|
if (!isTop || isSequence || expansion)
|
||
|
expansions.push(expansion);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return expansions;
|
||
|
}
|
||
|
|
||
|
var minimatch_1 = minimatch;
|
||
|
minimatch.Minimatch = Minimatch$1;
|
||
|
|
||
|
var path = { sep: '/' };
|
||
|
try {
|
||
|
path = path__default['default'];
|
||
|
} catch (er) {}
|
||
|
|
||
|
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch$1.GLOBSTAR = {};
|
||
|
|
||
|
|
||
|
var plTypes = {
|
||
|
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||
|
'?': { open: '(?:', close: ')?' },
|
||
|
'+': { open: '(?:', close: ')+' },
|
||
|
'*': { open: '(?:', close: ')*' },
|
||
|
'@': { open: '(?:', close: ')' }
|
||
|
};
|
||
|
|
||
|
// any single thing other than /
|
||
|
// don't need to escape / when using new RegExp()
|
||
|
var qmark = '[^/]';
|
||
|
|
||
|
// * => any number of characters
|
||
|
var star = qmark + '*?';
|
||
|
|
||
|
// ** when dots are allowed. Anything goes, except .. and .
|
||
|
// not (^ or / followed by one or two dots followed by $ or /),
|
||
|
// followed by anything, any number of times.
|
||
|
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?';
|
||
|
|
||
|
// not a ^ or / followed by a dot,
|
||
|
// followed by anything, any number of times.
|
||
|
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?';
|
||
|
|
||
|
// characters that need to be escaped in RegExp.
|
||
|
var reSpecials = charSet('().*{}+?[]^$\\!');
|
||
|
|
||
|
// "abc" -> { a:true, b:true, c:true }
|
||
|
function charSet (s) {
|
||
|
return s.split('').reduce(function (set, c) {
|
||
|
set[c] = true;
|
||
|
return set
|
||
|
}, {})
|
||
|
}
|
||
|
|
||
|
// normalizes slashes.
|
||
|
var slashSplit = /\/+/;
|
||
|
|
||
|
minimatch.filter = filter;
|
||
|
function filter (pattern, options) {
|
||
|
options = options || {};
|
||
|
return function (p, i, list) {
|
||
|
return minimatch(p, pattern, options)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function ext (a, b) {
|
||
|
a = a || {};
|
||
|
b = b || {};
|
||
|
var t = {};
|
||
|
Object.keys(b).forEach(function (k) {
|
||
|
t[k] = b[k];
|
||
|
});
|
||
|
Object.keys(a).forEach(function (k) {
|
||
|
t[k] = a[k];
|
||
|
});
|
||
|
return t
|
||
|
}
|
||
|
|
||
|
minimatch.defaults = function (def) {
|
||
|
if (!def || !Object.keys(def).length) return minimatch
|
||
|
|
||
|
var orig = minimatch;
|
||
|
|
||
|
var m = function minimatch (p, pattern, options) {
|
||
|
return orig.minimatch(p, pattern, ext(def, options))
|
||
|
};
|
||
|
|
||
|
m.Minimatch = function Minimatch (pattern, options) {
|
||
|
return new orig.Minimatch(pattern, ext(def, options))
|
||
|
};
|
||
|
|
||
|
return m
|
||
|
};
|
||
|
|
||
|
Minimatch$1.defaults = function (def) {
|
||
|
if (!def || !Object.keys(def).length) return Minimatch$1
|
||
|
return minimatch.defaults(def).Minimatch
|
||
|
};
|
||
|
|
||
|
function minimatch (p, pattern, options) {
|
||
|
if (typeof pattern !== 'string') {
|
||
|
throw new TypeError('glob pattern string required')
|
||
|
}
|
||
|
|
||
|
if (!options) options = {};
|
||
|
|
||
|
// shortcut: comments match nothing.
|
||
|
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
// "" only matches ""
|
||
|
if (pattern.trim() === '') return p === ''
|
||
|
|
||
|
return new Minimatch$1(pattern, options).match(p)
|
||
|
}
|
||
|
|
||
|
function Minimatch$1 (pattern, options) {
|
||
|
if (!(this instanceof Minimatch$1)) {
|
||
|
return new Minimatch$1(pattern, options)
|
||
|
}
|
||
|
|
||
|
if (typeof pattern !== 'string') {
|
||
|
throw new TypeError('glob pattern string required')
|
||
|
}
|
||
|
|
||
|
if (!options) options = {};
|
||
|
pattern = pattern.trim();
|
||
|
|
||
|
// windows support: need to use /, not \
|
||
|
if (path.sep !== '/') {
|
||
|
pattern = pattern.split(path.sep).join('/');
|
||
|
}
|
||
|
|
||
|
this.options = options;
|
||
|
this.set = [];
|
||
|
this.pattern = pattern;
|
||
|
this.regexp = null;
|
||
|
this.negate = false;
|
||
|
this.comment = false;
|
||
|
this.empty = false;
|
||
|
|
||
|
// make the set of regexps etc.
|
||
|
this.make();
|
||
|
}
|
||
|
|
||
|
Minimatch$1.prototype.debug = function () {};
|
||
|
|
||
|
Minimatch$1.prototype.make = make;
|
||
|
function make () {
|
||
|
// don't do it more than once.
|
||
|
if (this._made) return
|
||
|
|
||
|
var pattern = this.pattern;
|
||
|
var options = this.options;
|
||
|
|
||
|
// empty patterns and comments match nothing.
|
||
|
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||
|
this.comment = true;
|
||
|
return
|
||
|
}
|
||
|
if (!pattern) {
|
||
|
this.empty = true;
|
||
|
return
|
||
|
}
|
||
|
|
||
|
// step 1: figure out negation, etc.
|
||
|
this.parseNegate();
|
||
|
|
||
|
// step 2: expand braces
|
||
|
var set = this.globSet = this.braceExpand();
|
||
|
|
||
|
if (options.debug) this.debug = console.error;
|
||
|
|
||
|
this.debug(this.pattern, set);
|
||
|
|
||
|
// step 3: now we have a set, so turn each one into a series of path-portion
|
||
|
// matching patterns.
|
||
|
// These will be regexps, except in the case of "**", which is
|
||
|
// set to the GLOBSTAR object for globstar behavior,
|
||
|
// and will not contain any / characters
|
||
|
set = this.globParts = set.map(function (s) {
|
||
|
return s.split(slashSplit)
|
||
|
});
|
||
|
|
||
|
this.debug(this.pattern, set);
|
||
|
|
||
|
// glob --> regexps
|
||
|
set = set.map(function (s, si, set) {
|
||
|
return s.map(this.parse, this)
|
||
|
}, this);
|
||
|
|
||
|
this.debug(this.pattern, set);
|
||
|
|
||
|
// filter out everything that didn't compile properly.
|
||
|
set = set.filter(function (s) {
|
||
|
return s.indexOf(false) === -1
|
||
|
});
|
||
|
|
||
|
this.debug(this.pattern, set);
|
||
|
|
||
|
this.set = set;
|
||
|
}
|
||
|
|
||
|
Minimatch$1.prototype.parseNegate = parseNegate;
|
||
|
function parseNegate () {
|
||
|
var pattern = this.pattern;
|
||
|
var negate = false;
|
||
|
var options = this.options;
|
||
|
var negateOffset = 0;
|
||
|
|
||
|
if (options.nonegate) return
|
||
|
|
||
|
for (var i = 0, l = pattern.length
|
||
|
; i < l && pattern.charAt(i) === '!'
|
||
|
; i++) {
|
||
|
negate = !negate;
|
||
|
negateOffset++;
|
||
|
}
|
||
|
|
||
|
if (negateOffset) this.pattern = pattern.substr(negateOffset);
|
||
|
this.negate = negate;
|
||
|
}
|
||
|
|
||
|
// Brace expansion:
|
||
|
// a{b,c}d -> abd acd
|
||
|
// a{b,}c -> abc ac
|
||
|
// a{0..3}d -> a0d a1d a2d a3d
|
||
|
// a{b,c{d,e}f}g -> abg acdfg acefg
|
||
|
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
|
||
|
//
|
||
|
// Invalid sets are not expanded.
|
||
|
// a{2..}b -> a{2..}b
|
||
|
// a{b}c -> a{b}c
|
||
|
minimatch.braceExpand = function (pattern, options) {
|
||
|
return braceExpand(pattern, options)
|
||
|
};
|
||
|
|
||
|
Minimatch$1.prototype.braceExpand = braceExpand;
|
||
|
|
||
|
function braceExpand (pattern, options) {
|
||
|
if (!options) {
|
||
|
if (this instanceof Minimatch$1) {
|
||
|
options = this.options;
|
||
|
} else {
|
||
|
options = {};
|
||
|
}
|
||
|
}
|
||
|
|
||
|
pattern = typeof pattern === 'undefined'
|
||
|
? this.pattern : pattern;
|
||
|
|
||
|
if (typeof pattern === 'undefined') {
|
||
|
throw new TypeError('undefined pattern')
|
||
|
}
|
||
|
|
||
|
if (options.nobrace ||
|
||
|
!pattern.match(/\{.*\}/)) {
|
||
|
// shortcut. no need to expand.
|
||
|
return [pattern]
|
||
|
}
|
||
|
|
||
|
return braceExpansion(pattern)
|
||
|
}
|
||
|
|
||
|
// parse a component of the expanded set.
|
||
|
// At this point, no pattern may contain "/" in it
|
||
|
// so we're going to return a 2d array, where each entry is the full
|
||
|
// pattern, split on '/', and then turned into a regular expression.
|
||
|
// A regexp is made at the end which joins each array with an
|
||
|
// escaped /, and another full one which joins each regexp with |.
|
||
|
//
|
||
|
// Following the lead of Bash 4.1, note that "**" only has special meaning
|
||
|
// when it is the *only* thing in a path portion. Otherwise, any series
|
||
|
// of * is equivalent to a single *. Globstar behavior is enabled by
|
||
|
// default, and can be disabled by setting options.noglobstar.
|
||
|
Minimatch$1.prototype.parse = parse;
|
||
|
var SUBPARSE = {};
|
||
|
function parse (pattern, isSub) {
|
||
|
if (pattern.length > 1024 * 64) {
|
||
|
throw new TypeError('pattern is too long')
|
||
|
}
|
||
|
|
||
|
var options = this.options;
|
||
|
|
||
|
// shortcuts
|
||
|
if (!options.noglobstar && pattern === '**') return GLOBSTAR
|
||
|
if (pattern === '') return ''
|
||
|
|
||
|
var re = '';
|
||
|
var hasMagic = !!options.nocase;
|
||
|
var escaping = false;
|
||
|
// ? => one single character
|
||
|
var patternListStack = [];
|
||
|
var negativeLists = [];
|
||
|
var stateChar;
|
||
|
var inClass = false;
|
||
|
var reClassStart = -1;
|
||
|
var classStart = -1;
|
||
|
// . and .. never match anything that doesn't start with .,
|
||
|
// even when options.dot is set.
|
||
|
var patternStart = pattern.charAt(0) === '.' ? '' // anything
|
||
|
// not (start or / followed by . or .. followed by / or end)
|
||
|
: options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
|
||
|
: '(?!\\.)';
|
||
|
var self = this;
|
||
|
|
||
|
function clearStateChar () {
|
||
|
if (stateChar) {
|
||
|
// we had some state-tracking character
|
||
|
// that wasn't consumed by this pass.
|
||
|
switch (stateChar) {
|
||
|
case '*':
|
||
|
re += star;
|
||
|
hasMagic = true;
|
||
|
break
|
||
|
case '?':
|
||
|
re += qmark;
|
||
|
hasMagic = true;
|
||
|
break
|
||
|
default:
|
||
|
re += '\\' + stateChar;
|
||
|
break
|
||
|
}
|
||
|
self.debug('clearStateChar %j %j', stateChar, re);
|
||
|
stateChar = false;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
for (var i = 0, len = pattern.length, c
|
||
|
; (i < len) && (c = pattern.charAt(i))
|
||
|
; i++) {
|
||
|
this.debug('%s\t%s %s %j', pattern, i, re, c);
|
||
|
|
||
|
// skip over any that are escaped.
|
||
|
if (escaping && reSpecials[c]) {
|
||
|
re += '\\' + c;
|
||
|
escaping = false;
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
switch (c) {
|
||
|
case '/':
|
||
|
// completely not allowed, even escaped.
|
||
|
// Should already be path-split by now.
|
||
|
return false
|
||
|
|
||
|
case '\\':
|
||
|
clearStateChar();
|
||
|
escaping = true;
|
||
|
continue
|
||
|
|
||
|
// the various stateChar values
|
||
|
// for the "extglob" stuff.
|
||
|
case '?':
|
||
|
case '*':
|
||
|
case '+':
|
||
|
case '@':
|
||
|
case '!':
|
||
|
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c);
|
||
|
|
||
|
// all of those are literals inside a class, except that
|
||
|
// the glob [!a] means [^a] in regexp
|
||
|
if (inClass) {
|
||
|
this.debug(' in class');
|
||
|
if (c === '!' && i === classStart + 1) c = '^';
|
||
|
re += c;
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
// if we already have a stateChar, then it means
|
||
|
// that there was something like ** or +? in there.
|
||
|
// Handle the stateChar, then proceed with this one.
|
||
|
self.debug('call clearStateChar %j', stateChar);
|
||
|
clearStateChar();
|
||
|
stateChar = c;
|
||
|
// if extglob is disabled, then +(asdf|foo) isn't a thing.
|
||
|
// just clear the statechar *now*, rather than even diving into
|
||
|
// the patternList stuff.
|
||
|
if (options.noext) clearStateChar();
|
||
|
continue
|
||
|
|
||
|
case '(':
|
||
|
if (inClass) {
|
||
|
re += '(';
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
if (!stateChar) {
|
||
|
re += '\\(';
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
patternListStack.push({
|
||
|
type: stateChar,
|
||
|
start: i - 1,
|
||
|
reStart: re.length,
|
||
|
open: plTypes[stateChar].open,
|
||
|
close: plTypes[stateChar].close
|
||
|
});
|
||
|
// negation is (?:(?!js)[^/]*)
|
||
|
re += stateChar === '!' ? '(?:(?!(?:' : '(?:';
|
||
|
this.debug('plType %j %j', stateChar, re);
|
||
|
stateChar = false;
|
||
|
continue
|
||
|
|
||
|
case ')':
|
||
|
if (inClass || !patternListStack.length) {
|
||
|
re += '\\)';
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
clearStateChar();
|
||
|
hasMagic = true;
|
||
|
var pl = patternListStack.pop();
|
||
|
// negation is (?:(?!js)[^/]*)
|
||
|
// The others are (?:<pattern>)<type>
|
||
|
re += pl.close;
|
||
|
if (pl.type === '!') {
|
||
|
negativeLists.push(pl);
|
||
|
}
|
||
|
pl.reEnd = re.length;
|
||
|
continue
|
||
|
|
||
|
case '|':
|
||
|
if (inClass || !patternListStack.length || escaping) {
|
||
|
re += '\\|';
|
||
|
escaping = false;
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
clearStateChar();
|
||
|
re += '|';
|
||
|
continue
|
||
|
|
||
|
// these are mostly the same in regexp and glob
|
||
|
case '[':
|
||
|
// swallow any state-tracking char before the [
|
||
|
clearStateChar();
|
||
|
|
||
|
if (inClass) {
|
||
|
re += '\\' + c;
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
inClass = true;
|
||
|
classStart = i;
|
||
|
reClassStart = re.length;
|
||
|
re += c;
|
||
|
continue
|
||
|
|
||
|
case ']':
|
||
|
// a right bracket shall lose its special
|
||
|
// meaning and represent itself in
|
||
|
// a bracket expression if it occurs
|
||
|
// first in the list. -- POSIX.2 2.8.3.2
|
||
|
if (i === classStart + 1 || !inClass) {
|
||
|
re += '\\' + c;
|
||
|
escaping = false;
|
||
|
continue
|
||
|
}
|
||
|
|
||
|
// handle the case where we left a class open.
|
||
|
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||
|
if (inClass) {
|
||
|
// split where the last [ was, make sure we don't have
|
||
|
// an invalid re. if so, re-walk the contents of the
|
||
|
// would-be class to re-translate any characters that
|
||
|
// were passed through as-is
|
||
|
// TODO: It would probably be faster to determine this
|
||
|
// without a try/catch and a new RegExp, but it's tricky
|
||
|
// to do safely. For now, this is safe and works.
|
||
|
var cs = pattern.substring(classStart + 1, i);
|
||
|
try {
|
||
|
RegExp('[' + cs + ']');
|
||
|
} catch (er) {
|
||
|
// not a valid class!
|
||
|
var sp = this.parse(cs, SUBPARSE);
|
||
|
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]';
|
||
|
hasMagic = hasMagic || sp[1];
|
||
|
inClass = false;
|
||
|
continue
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// finish up the class.
|
||
|
hasMagic = true;
|
||
|
inClass = false;
|
||
|
re += c;
|
||
|
continue
|
||
|
|
||
|
default:
|
||
|
// swallow any state char that wasn't consumed
|
||
|
clearStateChar();
|
||
|
|
||
|
if (escaping) {
|
||
|
// no need
|
||
|
escaping = false;
|
||
|
} else if (reSpecials[c]
|
||
|
&& !(c === '^' && inClass)) {
|
||
|
re += '\\';
|
||
|
}
|
||
|
|
||
|
re += c;
|
||
|
|
||
|
} // switch
|
||
|
} // for
|
||
|
|
||
|
// handle the case where we left a class open.
|
||
|
// "[abc" is valid, equivalent to "\[abc"
|
||
|
if (inClass) {
|
||
|
// split where the last [ was, and escape it
|
||
|
// this is a huge pita. We now have to re-walk
|
||
|
// the contents of the would-be class to re-translate
|
||
|
// any characters that were passed through as-is
|
||
|
cs = pattern.substr(classStart + 1);
|
||
|
sp = this.parse(cs, SUBPARSE);
|
||
|
re = re.substr(0, reClassStart) + '\\[' + sp[0];
|
||
|
hasMagic = hasMagic || sp[1];
|
||
|
}
|
||
|
|
||
|
// handle the case where we had a +( thing at the *end*
|
||
|
// of the pattern.
|
||
|
// each pattern list stack adds 3 chars, and we need to go through
|
||
|
// and escape any | chars that were passed through as-is for the regexp.
|
||
|
// Go through and escape them, taking care not to double-escape any
|
||
|
// | chars that were already escaped.
|
||
|
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
|
||
|
var tail = re.slice(pl.reStart + pl.open.length);
|
||
|
this.debug('setting tail', re, pl);
|
||
|
// maybe some even number of \, then maybe 1 \, followed by a |
|
||
|
tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
|
||
|
if (!$2) {
|
||
|
// the | isn't already escaped, so escape it.
|
||
|
$2 = '\\';
|
||
|
}
|
||
|
|
||
|
// need to escape all those slashes *again*, without escaping the
|
||
|
// one that we need for escaping the | character. As it works out,
|
||
|
// escaping an even number of slashes can be done by simply repeating
|
||
|
// it exactly after itself. That's why this trick works.
|
||
|
//
|
||
|
// I am sorry that you have to see this.
|
||
|
return $1 + $1 + $2 + '|'
|
||
|
});
|
||
|
|
||
|
this.debug('tail=%j\n %s', tail, tail, pl, re);
|
||
|
var t = pl.type === '*' ? star
|
||
|
: pl.type === '?' ? qmark
|
||
|
: '\\' + pl.type;
|
||
|
|
||
|
hasMagic = true;
|
||
|
re = re.slice(0, pl.reStart) + t + '\\(' + tail;
|
||
|
}
|
||
|
|
||
|
// handle trailing things that only matter at the very end.
|
||
|
clearStateChar();
|
||
|
if (escaping) {
|
||
|
// trailing \\
|
||
|
re += '\\\\';
|
||
|
}
|
||
|
|
||
|
// only need to apply the nodot start if the re starts with
|
||
|
// something that could conceivably capture a dot
|
||
|
var addPatternStart = false;
|
||
|
switch (re.charAt(0)) {
|
||
|
case '.':
|
||
|
case '[':
|
||
|
case '(': addPatternStart = true;
|
||
|
}
|
||
|
|
||
|
// Hack to work around lack of negative lookbehind in JS
|
||
|
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
|
||
|
// like 'a.xyz.yz' doesn't match. So, the first negative
|
||
|
// lookahead, has to look ALL the way ahead, to the end of
|
||
|
// the pattern.
|
||
|
for (var n = negativeLists.length - 1; n > -1; n--) {
|
||
|
var nl = negativeLists[n];
|
||
|
|
||
|
var nlBefore = re.slice(0, nl.reStart);
|
||
|
var nlFirst = re.slice(nl.reStart, nl.reEnd - 8);
|
||
|
var nlLast = re.slice(nl.reEnd - 8, nl.reEnd);
|
||
|
var nlAfter = re.slice(nl.reEnd);
|
||
|
|
||
|
nlLast += nlAfter;
|
||
|
|
||
|
// Handle nested stuff like *(*.js|!(*.json)), where open parens
|
||
|
// mean that we should *not* include the ) in the bit that is considered
|
||
|
// "after" the negated section.
|
||
|
var openParensBefore = nlBefore.split('(').length - 1;
|
||
|
var cleanAfter = nlAfter;
|
||
|
for (i = 0; i < openParensBefore; i++) {
|
||
|
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '');
|
||
|
}
|
||
|
nlAfter = cleanAfter;
|
||
|
|
||
|
var dollar = '';
|
||
|
if (nlAfter === '' && isSub !== SUBPARSE) {
|
||
|
dollar = '$';
|
||
|
}
|
||
|
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast;
|
||
|
re = newRe;
|
||
|
}
|
||
|
|
||
|
// if the re is not "" at this point, then we need to make sure
|
||
|
// it doesn't match against an empty path part.
|
||
|
// Otherwise a/* will match a/, which it should not.
|
||
|
if (re !== '' && hasMagic) {
|
||
|
re = '(?=.)' + re;
|
||
|
}
|
||
|
|
||
|
if (addPatternStart) {
|
||
|
re = patternStart + re;
|
||
|
}
|
||
|
|
||
|
// parsing just a piece of a larger pattern.
|
||
|
if (isSub === SUBPARSE) {
|
||
|
return [re, hasMagic]
|
||
|
}
|
||
|
|
||
|
// skip the regexp for non-magical patterns
|
||
|
// unescape anything in it, though, so that it'll be
|
||
|
// an exact match against a file etc.
|
||
|
if (!hasMagic) {
|
||
|
return globUnescape(pattern)
|
||
|
}
|
||
|
|
||
|
var flags = options.nocase ? 'i' : '';
|
||
|
try {
|
||
|
var regExp = new RegExp('^' + re + '$', flags);
|
||
|
} catch (er) {
|
||
|
// If it was an invalid regular expression, then it can't match
|
||
|
// anything. This trick looks for a character after the end of
|
||
|
// the string, which is of course impossible, except in multi-line
|
||
|
// mode, but it's not a /m regex.
|
||
|
return new RegExp('$.')
|
||
|
}
|
||
|
|
||
|
regExp._glob = pattern;
|
||
|
regExp._src = re;
|
||
|
|
||
|
return regExp
|
||
|
}
|
||
|
|
||
|
minimatch.makeRe = function (pattern, options) {
|
||
|
return new Minimatch$1(pattern, options || {}).makeRe()
|
||
|
};
|
||
|
|
||
|
Minimatch$1.prototype.makeRe = makeRe;
|
||
|
function makeRe () {
|
||
|
if (this.regexp || this.regexp === false) return this.regexp
|
||
|
|
||
|
// at this point, this.set is a 2d array of partial
|
||
|
// pattern strings, or "**".
|
||
|
//
|
||
|
// It's better to use .match(). This function shouldn't
|
||
|
// be used, really, but it's pretty convenient sometimes,
|
||
|
// when you just want to work with a regex.
|
||
|
var set = this.set;
|
||
|
|
||
|
if (!set.length) {
|
||
|
this.regexp = false;
|
||
|
return this.regexp
|
||
|
}
|
||
|
var options = this.options;
|
||
|
|
||
|
var twoStar = options.noglobstar ? star
|
||
|
: options.dot ? twoStarDot
|
||
|
: twoStarNoDot;
|
||
|
var flags = options.nocase ? 'i' : '';
|
||
|
|
||
|
var re = set.map(function (pattern) {
|
||
|
return pattern.map(function (p) {
|
||
|
return (p === GLOBSTAR) ? twoStar
|
||
|
: (typeof p === 'string') ? regExpEscape(p)
|
||
|
: p._src
|
||
|
}).join('\\\/')
|
||
|
}).join('|');
|
||
|
|
||
|
// must match entire pattern
|
||
|
// ending in a * or ** will make it less strict.
|
||
|
re = '^(?:' + re + ')$';
|
||
|
|
||
|
// can match anything, as long as it's not this.
|
||
|
if (this.negate) re = '^(?!' + re + ').*$';
|
||
|
|
||
|
try {
|
||
|
this.regexp = new RegExp(re, flags);
|
||
|
} catch (ex) {
|
||
|
this.regexp = false;
|
||
|
}
|
||
|
return this.regexp
|
||
|
}
|
||
|
|
||
|
minimatch.match = function (list, pattern, options) {
|
||
|
options = options || {};
|
||
|
var mm = new Minimatch$1(pattern, options);
|
||
|
list = list.filter(function (f) {
|
||
|
return mm.match(f)
|
||
|
});
|
||
|
if (mm.options.nonull && !list.length) {
|
||
|
list.push(pattern);
|
||
|
}
|
||
|
return list
|
||
|
};
|
||
|
|
||
|
Minimatch$1.prototype.match = match;
|
||
|
function match (f, partial) {
|
||
|
this.debug('match', f, this.pattern);
|
||
|
// short-circuit in the case of busted things.
|
||
|
// comments, etc.
|
||
|
if (this.comment) return false
|
||
|
if (this.empty) return f === ''
|
||
|
|
||
|
if (f === '/' && partial) return true
|
||
|
|
||
|
var options = this.options;
|
||
|
|
||
|
// windows: need to use /, not \
|
||
|
if (path.sep !== '/') {
|
||
|
f = f.split(path.sep).join('/');
|
||
|
}
|
||
|
|
||
|
// treat the test path as a set of pathparts.
|
||
|
f = f.split(slashSplit);
|
||
|
this.debug(this.pattern, 'split', f);
|
||
|
|
||
|
// just ONE of the pattern sets in this.set needs to match
|
||
|
// in order for it to be valid. If negating, then just one
|
||
|
// match means that we have failed.
|
||
|
// Either way, return on the first hit.
|
||
|
|
||
|
var set = this.set;
|
||
|
this.debug(this.pattern, 'set', set);
|
||
|
|
||
|
// Find the basename of the path by looking for the last non-empty segment
|
||
|
var filename;
|
||
|
var i;
|
||
|
for (i = f.length - 1; i >= 0; i--) {
|
||
|
filename = f[i];
|
||
|
if (filename) break
|
||
|
}
|
||
|
|
||
|
for (i = 0; i < set.length; i++) {
|
||
|
var pattern = set[i];
|
||
|
var file = f;
|
||
|
if (options.matchBase && pattern.length === 1) {
|
||
|
file = [filename];
|
||
|
}
|
||
|
var hit = this.matchOne(file, pattern, partial);
|
||
|
if (hit) {
|
||
|
if (options.flipNegate) return true
|
||
|
return !this.negate
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// didn't get any hits. this is success if it's a negative
|
||
|
// pattern, failure otherwise.
|
||
|
if (options.flipNegate) return false
|
||
|
return this.negate
|
||
|
}
|
||
|
|
||
|
// set partial to true to test if, for example,
|
||
|
// "/a/b" matches the start of "/*/b/*/d"
|
||
|
// Partial means, if you run out of file before you run
|
||
|
// out of pattern, then that's fine, as long as all
|
||
|
// the parts match.
|
||
|
Minimatch$1.prototype.matchOne = function (file, pattern, partial) {
|
||
|
var options = this.options;
|
||
|
|
||
|
this.debug('matchOne',
|
||
|
{ 'this': this, file: file, pattern: pattern });
|
||
|
|
||
|
this.debug('matchOne', file.length, pattern.length);
|
||
|
|
||
|
for (var fi = 0,
|
||
|
pi = 0,
|
||
|
fl = file.length,
|
||
|
pl = pattern.length
|
||
|
; (fi < fl) && (pi < pl)
|
||
|
; fi++, pi++) {
|
||
|
this.debug('matchOne loop');
|
||
|
var p = pattern[pi];
|
||
|
var f = file[fi];
|
||
|
|
||
|
this.debug(pattern, p, f);
|
||
|
|
||
|
// should be impossible.
|
||
|
// some invalid regexp stuff in the set.
|
||
|
if (p === false) return false
|
||
|
|
||
|
if (p === GLOBSTAR) {
|
||
|
this.debug('GLOBSTAR', [pattern, p, f]);
|
||
|
|
||
|
// "**"
|
||
|
// a/**/b/**/c would match the following:
|
||
|
// a/b/x/y/z/c
|
||
|
// a/x/y/z/b/c
|
||
|
// a/b/x/b/x/c
|
||
|
// a/b/c
|
||
|
// To do this, take the rest of the pattern after
|
||
|
// the **, and see if it would match the file remainder.
|
||
|
// If so, return success.
|
||
|
// If not, the ** "swallows" a segment, and try again.
|
||
|
// This is recursively awful.
|
||
|
//
|
||
|
// a/**/b/**/c matching a/b/x/y/z/c
|
||
|
// - a matches a
|
||
|
// - doublestar
|
||
|
// - matchOne(b/x/y/z/c, b/**/c)
|
||
|
// - b matches b
|
||
|
// - doublestar
|
||
|
// - matchOne(x/y/z/c, c) -> no
|
||
|
// - matchOne(y/z/c, c) -> no
|
||
|
// - matchOne(z/c, c) -> no
|
||
|
// - matchOne(c, c) yes, hit
|
||
|
var fr = fi;
|
||
|
var pr = pi + 1;
|
||
|
if (pr === pl) {
|
||
|
this.debug('** at the end');
|
||
|
// a ** at the end will just swallow the rest.
|
||
|
// We have found a match.
|
||
|
// however, it will not swallow /.x, unless
|
||
|
// options.dot is set.
|
||
|
// . and .. are *never* matched by **, for explosively
|
||
|
// exponential reasons.
|
||
|
for (; fi < fl; fi++) {
|
||
|
if (file[fi] === '.' || file[fi] === '..' ||
|
||
|
(!options.dot && file[fi].charAt(0) === '.')) return false
|
||
|
}
|
||
|
return true
|
||
|
}
|
||
|
|
||
|
// ok, let's see if we can swallow whatever we can.
|
||
|
while (fr < fl) {
|
||
|
var swallowee = file[fr];
|
||
|
|
||
|
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
|
||
|
|
||
|
// XXX remove this slice. Just pass the start index.
|
||
|
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
|
||
|
this.debug('globstar found match!', fr, fl, swallowee);
|
||
|
// found a match.
|
||
|
return true
|
||
|
} else {
|
||
|
// can't swallow "." or ".." ever.
|
||
|
// can only swallow ".foo" when explicitly asked.
|
||
|
if (swallowee === '.' || swallowee === '..' ||
|
||
|
(!options.dot && swallowee.charAt(0) === '.')) {
|
||
|
this.debug('dot detected!', file, fr, pattern, pr);
|
||
|
break
|
||
|
}
|
||
|
|
||
|
// ** swallows a segment, and continue.
|
||
|
this.debug('globstar swallow a segment, and continue');
|
||
|
fr++;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// no match was found.
|
||
|
// However, in partial mode, we can't say this is necessarily over.
|
||
|
// If there's more *pattern* left, then
|
||
|
if (partial) {
|
||
|
// ran out of file
|
||
|
this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
|
||
|
if (fr === fl) return true
|
||
|
}
|
||
|
return false
|
||
|
}
|
||
|
|
||
|
// something other than **
|
||
|
// non-magic patterns just have to match exactly
|
||
|
// patterns with magic have been turned into regexps.
|
||
|
var hit;
|
||
|
if (typeof p === 'string') {
|
||
|
if (options.nocase) {
|
||
|
hit = f.toLowerCase() === p.toLowerCase();
|
||
|
} else {
|
||
|
hit = f === p;
|
||
|
}
|
||
|
this.debug('string match', p, f, hit);
|
||
|
} else {
|
||
|
hit = f.match(p);
|
||
|
this.debug('pattern match', p, f, hit);
|
||
|
}
|
||
|
|
||
|
if (!hit) return false
|
||
|
}
|
||
|
|
||
|
// Note: ending in / means that we'll get a final ""
|
||
|
// at the end of the pattern. This can only match a
|
||
|
// corresponding "" at the end of the file.
|
||
|
// If the file ends in /, then it can only match a
|
||
|
// a pattern that ends in /, unless the pattern just
|
||
|
// doesn't have any more for it. But, a/b/ should *not*
|
||
|
// match "a/b/*", even though "" matches against the
|
||
|
// [^/]*? pattern, except in partial mode, where it might
|
||
|
// simply not be reached yet.
|
||
|
// However, a/b/ should still satisfy a/*
|
||
|
|
||
|
// now either we fell off the end of the pattern, or we're done.
|
||
|
if (fi === fl && pi === pl) {
|
||
|
// ran out of pattern and filename at the same time.
|
||
|
// an exact hit!
|
||
|
return true
|
||
|
} else if (fi === fl) {
|
||
|
// ran out of file, but still had pattern left.
|
||
|
// this is ok if we're doing the match as part of
|
||
|
// a glob fs traversal.
|
||
|
return partial
|
||
|
} else if (pi === pl) {
|
||
|
// ran out of pattern, still have file left.
|
||
|
// this is only acceptable if we're on the very last
|
||
|
// empty segment of a file with a trailing slash.
|
||
|
// a/* should match a/b/
|
||
|
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '');
|
||
|
return emptyFileEnd
|
||
|
}
|
||
|
|
||
|
// should be unreachable.
|
||
|
throw new Error('wtf?')
|
||
|
};
|
||
|
|
||
|
// replace stuff like \* with *
|
||
|
function globUnescape (s) {
|
||
|
return s.replace(/\\(.)/g, '$1')
|
||
|
}
|
||
|
|
||
|
function regExpEscape (s) {
|
||
|
return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
|
||
|
}
|
||
|
|
||
|
var inherits_browser = createCommonjsModule(function (module) {
|
||
|
if (typeof Object.create === 'function') {
|
||
|
// implementation from standard node.js 'util' module
|
||
|
module.exports = function inherits(ctor, superCtor) {
|
||
|
if (superCtor) {
|
||
|
ctor.super_ = superCtor;
|
||
|
ctor.prototype = Object.create(superCtor.prototype, {
|
||
|
constructor: {
|
||
|
value: ctor,
|
||
|
enumerable: false,
|
||
|
writable: true,
|
||
|
configurable: true
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
} else {
|
||
|
// old school shim for old browsers
|
||
|
module.exports = function inherits(ctor, superCtor) {
|
||
|
if (superCtor) {
|
||
|
ctor.super_ = superCtor;
|
||
|
var TempCtor = function () {};
|
||
|
TempCtor.prototype = superCtor.prototype;
|
||
|
ctor.prototype = new TempCtor();
|
||
|
ctor.prototype.constructor = ctor;
|
||
|
}
|
||
|
};
|
||
|
}
|
||
|
});
|
||
|
|
||
|
var inherits = createCommonjsModule(function (module) {
|
||
|
try {
|
||
|
var util = require$$0__default['default'];
|
||
|
/* istanbul ignore next */
|
||
|
if (typeof util.inherits !== 'function') throw '';
|
||
|
module.exports = util.inherits;
|
||
|
} catch (e) {
|
||
|
/* istanbul ignore next */
|
||
|
module.exports = inherits_browser;
|
||
|
}
|
||
|
});
|
||
|
|
||
|
function posix(path) {
|
||
|
return path.charAt(0) === '/';
|
||
|
}
|
||
|
|
||
|
function win32(path) {
|
||
|
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
|
||
|
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/;
|
||
|
var result = splitDeviceRe.exec(path);
|
||
|
var device = result[1] || '';
|
||
|
var isUnc = Boolean(device && device.charAt(1) !== ':');
|
||
|
|
||
|
// UNC paths are always absolute
|
||
|
return Boolean(result[2] || isUnc);
|
||
|
}
|
||
|
|
||
|
var pathIsAbsolute = process.platform === 'win32' ? win32 : posix;
|
||
|
var posix_1 = posix;
|
||
|
var win32_1 = win32;
|
||
|
pathIsAbsolute.posix = posix_1;
|
||
|
pathIsAbsolute.win32 = win32_1;
|
||
|
|
||
|
var alphasort_1 = alphasort;
|
||
|
var alphasorti_1 = alphasorti;
|
||
|
var setopts_1 = setopts$2;
|
||
|
var ownProp_1 = ownProp$2;
|
||
|
var makeAbs_1 = makeAbs;
|
||
|
var finish_1 = finish;
|
||
|
var mark_1 = mark;
|
||
|
var isIgnored_1 = isIgnored$2;
|
||
|
var childrenIgnored_1 = childrenIgnored$2;
|
||
|
|
||
|
function ownProp$2 (obj, field) {
|
||
|
return Object.prototype.hasOwnProperty.call(obj, field)
|
||
|
}
|
||
|
|
||
|
|
||
|
|
||
|
|
||
|
var Minimatch = minimatch_1.Minimatch;
|
||
|
|
||
|
function alphasorti (a, b) {
|
||
|
return a.toLowerCase().localeCompare(b.toLowerCase())
|
||
|
}
|
||
|
|
||
|
function alphasort (a, b) {
|
||
|
return a.localeCompare(b)
|
||
|
}
|
||
|
|
||
|
function setupIgnores (self, options) {
|
||
|
self.ignore = options.ignore || [];
|
||
|
|
||
|
if (!Array.isArray(self.ignore))
|
||
|
self.ignore = [self.ignore];
|
||
|
|
||
|
if (self.ignore.length) {
|
||
|
self.ignore = self.ignore.map(ignoreMap);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// ignore patterns are always in dot:true mode.
|
||
|
function ignoreMap (pattern) {
|
||
|
var gmatcher = null;
|
||
|
if (pattern.slice(-3) === '/**') {
|
||
|
var gpattern = pattern.replace(/(\/\*\*)+$/, '');
|
||
|
gmatcher = new Minimatch(gpattern, { dot: true });
|
||
|
}
|
||
|
|
||
|
return {
|
||
|
matcher: new Minimatch(pattern, { dot: true }),
|
||
|
gmatcher: gmatcher
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function setopts$2 (self, pattern, options) {
|
||
|
if (!options)
|
||
|
options = {};
|
||
|
|
||
|
// base-matching: just use globstar for that.
|
||
|
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
||
|
if (options.noglobstar) {
|
||
|
throw new Error("base matching requires globstar")
|
||
|
}
|
||
|
pattern = "**/" + pattern;
|
||
|
}
|
||
|
|
||
|
self.silent = !!options.silent;
|
||
|
self.pattern = pattern;
|
||
|
self.strict = options.strict !== false;
|
||
|
self.realpath = !!options.realpath;
|
||
|
self.realpathCache = options.realpathCache || Object.create(null);
|
||
|
self.follow = !!options.follow;
|
||
|
self.dot = !!options.dot;
|
||
|
self.mark = !!options.mark;
|
||
|
self.nodir = !!options.nodir;
|
||
|
if (self.nodir)
|
||
|
self.mark = true;
|
||
|
self.sync = !!options.sync;
|
||
|
self.nounique = !!options.nounique;
|
||
|
self.nonull = !!options.nonull;
|
||
|
self.nosort = !!options.nosort;
|
||
|
self.nocase = !!options.nocase;
|
||
|
self.stat = !!options.stat;
|
||
|
self.noprocess = !!options.noprocess;
|
||
|
self.absolute = !!options.absolute;
|
||
|
|
||
|
self.maxLength = options.maxLength || Infinity;
|
||
|
self.cache = options.cache || Object.create(null);
|
||
|
self.statCache = options.statCache || Object.create(null);
|
||
|
self.symlinks = options.symlinks || Object.create(null);
|
||
|
|
||
|
setupIgnores(self, options);
|
||
|
|
||
|
self.changedCwd = false;
|
||
|
var cwd = process.cwd();
|
||
|
if (!ownProp$2(options, "cwd"))
|
||
|
self.cwd = cwd;
|
||
|
else {
|
||
|
self.cwd = path__default['default'].resolve(options.cwd);
|
||
|
self.changedCwd = self.cwd !== cwd;
|
||
|
}
|
||
|
|
||
|
self.root = options.root || path__default['default'].resolve(self.cwd, "/");
|
||
|
self.root = path__default['default'].resolve(self.root);
|
||
|
if (process.platform === "win32")
|
||
|
self.root = self.root.replace(/\\/g, "/");
|
||
|
|
||
|
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
|
||
|
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
|
||
|
self.cwdAbs = pathIsAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd);
|
||
|
if (process.platform === "win32")
|
||
|
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/");
|
||
|
self.nomount = !!options.nomount;
|
||
|
|
||
|
// disable comments and negation in Minimatch.
|
||
|
// Note that they are not supported in Glob itself anyway.
|
||
|
options.nonegate = true;
|
||
|
options.nocomment = true;
|
||
|
|
||
|
self.minimatch = new Minimatch(pattern, options);
|
||
|
self.options = self.minimatch.options;
|
||
|
}
|
||
|
|
||
|
function finish (self) {
|
||
|
var nou = self.nounique;
|
||
|
var all = nou ? [] : Object.create(null);
|
||
|
|
||
|
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
||
|
var matches = self.matches[i];
|
||
|
if (!matches || Object.keys(matches).length === 0) {
|
||
|
if (self.nonull) {
|
||
|
// do like the shell, and spit out the literal glob
|
||
|
var literal = self.minimatch.globSet[i];
|
||
|
if (nou)
|
||
|
all.push(literal);
|
||
|
else
|
||
|
all[literal] = true;
|
||
|
}
|
||
|
} else {
|
||
|
// had matches
|
||
|
var m = Object.keys(matches);
|
||
|
if (nou)
|
||
|
all.push.apply(all, m);
|
||
|
else
|
||
|
m.forEach(function (m) {
|
||
|
all[m] = true;
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (!nou)
|
||
|
all = Object.keys(all);
|
||
|
|
||
|
if (!self.nosort)
|
||
|
all = all.sort(self.nocase ? alphasorti : alphasort);
|
||
|
|
||
|
// at *some* point we statted all of these
|
||
|
if (self.mark) {
|
||
|
for (var i = 0; i < all.length; i++) {
|
||
|
all[i] = self._mark(all[i]);
|
||
|
}
|
||
|
if (self.nodir) {
|
||
|
all = all.filter(function (e) {
|
||
|
var notDir = !(/\/$/.test(e));
|
||
|
var c = self.cache[e] || self.cache[makeAbs(self, e)];
|
||
|
if (notDir && c)
|
||
|
notDir = c !== 'DIR' && !Array.isArray(c);
|
||
|
return notDir
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (self.ignore.length)
|
||
|
all = all.filter(function(m) {
|
||
|
return !isIgnored$2(self, m)
|
||
|
});
|
||
|
|
||
|
self.found = all;
|
||
|
}
|
||
|
|
||
|
function mark (self, p) {
|
||
|
var abs = makeAbs(self, p);
|
||
|
var c = self.cache[abs];
|
||
|
var m = p;
|
||
|
if (c) {
|
||
|
var isDir = c === 'DIR' || Array.isArray(c);
|
||
|
var slash = p.slice(-1) === '/';
|
||
|
|
||
|
if (isDir && !slash)
|
||
|
m += '/';
|
||
|
else if (!isDir && slash)
|
||
|
m = m.slice(0, -1);
|
||
|
|
||
|
if (m !== p) {
|
||
|
var mabs = makeAbs(self, m);
|
||
|
self.statCache[mabs] = self.statCache[abs];
|
||
|
self.cache[mabs] = self.cache[abs];
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return m
|
||
|
}
|
||
|
|
||
|
// lotta situps...
|
||
|
function makeAbs (self, f) {
|
||
|
var abs = f;
|
||
|
if (f.charAt(0) === '/') {
|
||
|
abs = path__default['default'].join(self.root, f);
|
||
|
} else if (pathIsAbsolute(f) || f === '') {
|
||
|
abs = f;
|
||
|
} else if (self.changedCwd) {
|
||
|
abs = path__default['default'].resolve(self.cwd, f);
|
||
|
} else {
|
||
|
abs = path__default['default'].resolve(f);
|
||
|
}
|
||
|
|
||
|
if (process.platform === 'win32')
|
||
|
abs = abs.replace(/\\/g, '/');
|
||
|
|
||
|
return abs
|
||
|
}
|
||
|
|
||
|
|
||
|
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
||
|
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
||
|
function isIgnored$2 (self, path) {
|
||
|
if (!self.ignore.length)
|
||
|
return false
|
||
|
|
||
|
return self.ignore.some(function(item) {
|
||
|
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function childrenIgnored$2 (self, path) {
|
||
|
if (!self.ignore.length)
|
||
|
return false
|
||
|
|
||
|
return self.ignore.some(function(item) {
|
||
|
return !!(item.gmatcher && item.gmatcher.match(path))
|
||
|
})
|
||
|
}
|
||
|
|
||
|
var common = {
|
||
|
alphasort: alphasort_1,
|
||
|
alphasorti: alphasorti_1,
|
||
|
setopts: setopts_1,
|
||
|
ownProp: ownProp_1,
|
||
|
makeAbs: makeAbs_1,
|
||
|
finish: finish_1,
|
||
|
mark: mark_1,
|
||
|
isIgnored: isIgnored_1,
|
||
|
childrenIgnored: childrenIgnored_1
|
||
|
};
|
||
|
|
||
|
var sync = globSync;
|
||
|
globSync.GlobSync = GlobSync$1;
|
||
|
var setopts$1 = common.setopts;
|
||
|
var ownProp$1 = common.ownProp;
|
||
|
var childrenIgnored$1 = common.childrenIgnored;
|
||
|
var isIgnored$1 = common.isIgnored;
|
||
|
|
||
|
function globSync (pattern, options) {
|
||
|
if (typeof options === 'function' || arguments.length === 3)
|
||
|
throw new TypeError('callback provided to sync glob\n'+
|
||
|
'See: https://github.com/isaacs/node-glob/issues/167')
|
||
|
|
||
|
return new GlobSync$1(pattern, options).found
|
||
|
}
|
||
|
|
||
|
function GlobSync$1 (pattern, options) {
|
||
|
if (!pattern)
|
||
|
throw new Error('must provide pattern')
|
||
|
|
||
|
if (typeof options === 'function' || arguments.length === 3)
|
||
|
throw new TypeError('callback provided to sync glob\n'+
|
||
|
'See: https://github.com/isaacs/node-glob/issues/167')
|
||
|
|
||
|
if (!(this instanceof GlobSync$1))
|
||
|
return new GlobSync$1(pattern, options)
|
||
|
|
||
|
setopts$1(this, pattern, options);
|
||
|
|
||
|
if (this.noprocess)
|
||
|
return this
|
||
|
|
||
|
var n = this.minimatch.set.length;
|
||
|
this.matches = new Array(n);
|
||
|
for (var i = 0; i < n; i ++) {
|
||
|
this._process(this.minimatch.set[i], i, false);
|
||
|
}
|
||
|
this._finish();
|
||
|
}
|
||
|
|
||
|
GlobSync$1.prototype._finish = function () {
|
||
|
assert__default['default'](this instanceof GlobSync$1);
|
||
|
if (this.realpath) {
|
||
|
var self = this;
|
||
|
this.matches.forEach(function (matchset, index) {
|
||
|
var set = self.matches[index] = Object.create(null);
|
||
|
for (var p in matchset) {
|
||
|
try {
|
||
|
p = self._makeAbs(p);
|
||
|
var real = fs_realpath.realpathSync(p, self.realpathCache);
|
||
|
set[real] = true;
|
||
|
} catch (er) {
|
||
|
if (er.syscall === 'stat')
|
||
|
set[self._makeAbs(p)] = true;
|
||
|
else
|
||
|
throw er
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
common.finish(this);
|
||
|
};
|
||
|
|
||
|
|
||
|
GlobSync$1.prototype._process = function (pattern, index, inGlobStar) {
|
||
|
assert__default['default'](this instanceof GlobSync$1);
|
||
|
|
||
|
// Get the first [n] parts of pattern that are all strings.
|
||
|
var n = 0;
|
||
|
while (typeof pattern[n] === 'string') {
|
||
|
n ++;
|
||
|
}
|
||
|
// now n is the index of the first one that is *not* a string.
|
||
|
|
||
|
// See if there's anything else
|
||
|
var prefix;
|
||
|
switch (n) {
|
||
|
// if not, then this is rather simple
|
||
|
case pattern.length:
|
||
|
this._processSimple(pattern.join('/'), index);
|
||
|
return
|
||
|
|
||
|
case 0:
|
||
|
// pattern *starts* with some non-trivial item.
|
||
|
// going to readdir(cwd), but not include the prefix in matches.
|
||
|
prefix = null;
|
||
|
break
|
||
|
|
||
|
default:
|
||
|
// pattern has some string bits in the front.
|
||
|
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||
|
// or 'relative' like '../baz'
|
||
|
prefix = pattern.slice(0, n).join('/');
|
||
|
break
|
||
|
}
|
||
|
|
||
|
var remain = pattern.slice(n);
|
||
|
|
||
|
// get the list of entries.
|
||
|
var read;
|
||
|
if (prefix === null)
|
||
|
read = '.';
|
||
|
else if (pathIsAbsolute(prefix) || pathIsAbsolute(pattern.join('/'))) {
|
||
|
if (!prefix || !pathIsAbsolute(prefix))
|
||
|
prefix = '/' + prefix;
|
||
|
read = prefix;
|
||
|
} else
|
||
|
read = prefix;
|
||
|
|
||
|
var abs = this._makeAbs(read);
|
||
|
|
||
|
//if ignored, skip processing
|
||
|
if (childrenIgnored$1(this, read))
|
||
|
return
|
||
|
|
||
|
var isGlobStar = remain[0] === minimatch_1.GLOBSTAR;
|
||
|
if (isGlobStar)
|
||
|
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar);
|
||
|
else
|
||
|
this._processReaddir(prefix, read, abs, remain, index, inGlobStar);
|
||
|
};
|
||
|
|
||
|
|
||
|
GlobSync$1.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
|
||
|
var entries = this._readdir(abs, inGlobStar);
|
||
|
|
||
|
// if the abs isn't a dir, then nothing can match!
|
||
|
if (!entries)
|
||
|
return
|
||
|
|
||
|
// It will only match dot entries if it starts with a dot, or if
|
||
|
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||
|
var pn = remain[0];
|
||
|
var negate = !!this.minimatch.negate;
|
||
|
var rawGlob = pn._glob;
|
||
|
var dotOk = this.dot || rawGlob.charAt(0) === '.';
|
||
|
|
||
|
var matchedEntries = [];
|
||
|
for (var i = 0; i < entries.length; i++) {
|
||
|
var e = entries[i];
|
||
|
if (e.charAt(0) !== '.' || dotOk) {
|
||
|
var m;
|
||
|
if (negate && !prefix) {
|
||
|
m = !e.match(pn);
|
||
|
} else {
|
||
|
m = e.match(pn);
|
||
|
}
|
||
|
if (m)
|
||
|
matchedEntries.push(e);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var len = matchedEntries.length;
|
||
|
// If there are no matched entries, then nothing matches.
|
||
|
if (len === 0)
|
||
|
return
|
||
|
|
||
|
// if this is the last remaining pattern bit, then no need for
|
||
|
// an additional stat *unless* the user has specified mark or
|
||
|
// stat explicitly. We know they exist, since readdir returned
|
||
|
// them.
|
||
|
|
||
|
if (remain.length === 1 && !this.mark && !this.stat) {
|
||
|
if (!this.matches[index])
|
||
|
this.matches[index] = Object.create(null);
|
||
|
|
||
|
for (var i = 0; i < len; i ++) {
|
||
|
var e = matchedEntries[i];
|
||
|
if (prefix) {
|
||
|
if (prefix.slice(-1) !== '/')
|
||
|
e = prefix + '/' + e;
|
||
|
else
|
||
|
e = prefix + e;
|
||
|
}
|
||
|
|
||
|
if (e.charAt(0) === '/' && !this.nomount) {
|
||
|
e = path__default['default'].join(this.root, e);
|
||
|
}
|
||
|
this._emitMatch(index, e);
|
||
|
}
|
||
|
// This was the last one, and no stats were needed
|
||
|
return
|
||
|
}
|
||
|
|
||
|
// now test all matched entries as stand-ins for that part
|
||
|
// of the pattern.
|
||
|
remain.shift();
|
||
|
for (var i = 0; i < len; i ++) {
|
||
|
var e = matchedEntries[i];
|
||
|
var newPattern;
|
||
|
if (prefix)
|
||
|
newPattern = [prefix, e];
|
||
|
else
|
||
|
newPattern = [e];
|
||
|
this._process(newPattern.concat(remain), index, inGlobStar);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
|
||
|
GlobSync$1.prototype._emitMatch = function (index, e) {
|
||
|
if (isIgnored$1(this, e))
|
||
|
return
|
||
|
|
||
|
var abs = this._makeAbs(e);
|
||
|
|
||
|
if (this.mark)
|
||
|
e = this._mark(e);
|
||
|
|
||
|
if (this.absolute) {
|
||
|
e = abs;
|
||
|
}
|
||
|
|
||
|
if (this.matches[index][e])
|
||
|
return
|
||
|
|
||
|
if (this.nodir) {
|
||
|
var c = this.cache[abs];
|
||
|
if (c === 'DIR' || Array.isArray(c))
|
||
|
return
|
||
|
}
|
||
|
|
||
|
this.matches[index][e] = true;
|
||
|
|
||
|
if (this.stat)
|
||
|
this._stat(e);
|
||
|
};
|
||
|
|
||
|
|
||
|
GlobSync$1.prototype._readdirInGlobStar = function (abs) {
|
||
|
// follow all symlinked directories forever
|
||
|
// just proceed as if this is a non-globstar situation
|
||
|
if (this.follow)
|
||
|
return this._readdir(abs, false)
|
||
|
|
||
|
var entries;
|
||
|
var lstat;
|
||
|
try {
|
||
|
lstat = fs__default['default'].lstatSync(abs);
|
||
|
} catch (er) {
|
||
|
if (er.code === 'ENOENT') {
|
||
|
// lstat failed, doesn't exist
|
||
|
return null
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var isSym = lstat && lstat.isSymbolicLink();
|
||
|
this.symlinks[abs] = isSym;
|
||
|
|
||
|
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||
|
// don't bother doing a readdir in that case.
|
||
|
if (!isSym && lstat && !lstat.isDirectory())
|
||
|
this.cache[abs] = 'FILE';
|
||
|
else
|
||
|
entries = this._readdir(abs, false);
|
||
|
|
||
|
return entries
|
||
|
};
|
||
|
|
||
|
GlobSync$1.prototype._readdir = function (abs, inGlobStar) {
|
||
|
|
||
|
if (inGlobStar && !ownProp$1(this.symlinks, abs))
|
||
|
return this._readdirInGlobStar(abs)
|
||
|
|
||
|
if (ownProp$1(this.cache, abs)) {
|
||
|
var c = this.cache[abs];
|
||
|
if (!c || c === 'FILE')
|
||
|
return null
|
||
|
|
||
|
if (Array.isArray(c))
|
||
|
return c
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
return this._readdirEntries(abs, fs__default['default'].readdirSync(abs))
|
||
|
} catch (er) {
|
||
|
this._readdirError(abs, er);
|
||
|
return null
|
||
|
}
|
||
|
};
|
||
|
|
||
|
GlobSync$1.prototype._readdirEntries = function (abs, entries) {
|
||
|
// if we haven't asked to stat everything, then just
|
||
|
// assume that everything in there exists, so we can avoid
|
||
|
// having to stat it a second time.
|
||
|
if (!this.mark && !this.stat) {
|
||
|
for (var i = 0; i < entries.length; i ++) {
|
||
|
var e = entries[i];
|
||
|
if (abs === '/')
|
||
|
e = abs + e;
|
||
|
else
|
||
|
e = abs + '/' + e;
|
||
|
this.cache[e] = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.cache[abs] = entries;
|
||
|
|
||
|
// mark and cache dir-ness
|
||
|
return entries
|
||
|
};
|
||
|
|
||
|
GlobSync$1.prototype._readdirError = function (f, er) {
|
||
|
// handle errors, and cache the information
|
||
|
switch (er.code) {
|
||
|
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||
|
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||
|
var abs = this._makeAbs(f);
|
||
|
this.cache[abs] = 'FILE';
|
||
|
if (abs === this.cwdAbs) {
|
||
|
var error = new Error(er.code + ' invalid cwd ' + this.cwd);
|
||
|
error.path = this.cwd;
|
||
|
error.code = er.code;
|
||
|
throw error
|
||
|
}
|
||
|
break
|
||
|
|
||
|
case 'ENOENT': // not terribly unusual
|
||
|
case 'ELOOP':
|
||
|
case 'ENAMETOOLONG':
|
||
|
case 'UNKNOWN':
|
||
|
this.cache[this._makeAbs(f)] = false;
|
||
|
break
|
||
|
|
||
|
default: // some unusual error. Treat as failure.
|
||
|
this.cache[this._makeAbs(f)] = false;
|
||
|
if (this.strict)
|
||
|
throw er
|
||
|
if (!this.silent)
|
||
|
console.error('glob error', er);
|
||
|
break
|
||
|
}
|
||
|
};
|
||
|
|
||
|
GlobSync$1.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
|
||
|
|
||
|
var entries = this._readdir(abs, inGlobStar);
|
||
|
|
||
|
// no entries means not a dir, so it can never have matches
|
||
|
// foo.txt/** doesn't match foo.txt
|
||
|
if (!entries)
|
||
|
return
|
||
|
|
||
|
// test without the globstar, and with every child both below
|
||
|
// and replacing the globstar.
|
||
|
var remainWithoutGlobStar = remain.slice(1);
|
||
|
var gspref = prefix ? [ prefix ] : [];
|
||
|
var noGlobStar = gspref.concat(remainWithoutGlobStar);
|
||
|
|
||
|
// the noGlobStar pattern exits the inGlobStar state
|
||
|
this._process(noGlobStar, index, false);
|
||
|
|
||
|
var len = entries.length;
|
||
|
var isSym = this.symlinks[abs];
|
||
|
|
||
|
// If it's a symlink, and we're in a globstar, then stop
|
||
|
if (isSym && inGlobStar)
|
||
|
return
|
||
|
|
||
|
for (var i = 0; i < len; i++) {
|
||
|
var e = entries[i];
|
||
|
if (e.charAt(0) === '.' && !this.dot)
|
||
|
continue
|
||
|
|
||
|
// these two cases enter the inGlobStar state
|
||
|
var instead = gspref.concat(entries[i], remainWithoutGlobStar);
|
||
|
this._process(instead, index, true);
|
||
|
|
||
|
var below = gspref.concat(entries[i], remain);
|
||
|
this._process(below, index, true);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
GlobSync$1.prototype._processSimple = function (prefix, index) {
|
||
|
// XXX review this. Shouldn't it be doing the mounting etc
|
||
|
// before doing stat? kinda weird?
|
||
|
var exists = this._stat(prefix);
|
||
|
|
||
|
if (!this.matches[index])
|
||
|
this.matches[index] = Object.create(null);
|
||
|
|
||
|
// If it doesn't exist, then just mark the lack of results
|
||
|
if (!exists)
|
||
|
return
|
||
|
|
||
|
if (prefix && pathIsAbsolute(prefix) && !this.nomount) {
|
||
|
var trail = /[\/\\]$/.test(prefix);
|
||
|
if (prefix.charAt(0) === '/') {
|
||
|
prefix = path__default['default'].join(this.root, prefix);
|
||
|
} else {
|
||
|
prefix = path__default['default'].resolve(this.root, prefix);
|
||
|
if (trail)
|
||
|
prefix += '/';
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (process.platform === 'win32')
|
||
|
prefix = prefix.replace(/\\/g, '/');
|
||
|
|
||
|
// Mark this as a match
|
||
|
this._emitMatch(index, prefix);
|
||
|
};
|
||
|
|
||
|
// Returns either 'DIR', 'FILE', or false
|
||
|
GlobSync$1.prototype._stat = function (f) {
|
||
|
var abs = this._makeAbs(f);
|
||
|
var needDir = f.slice(-1) === '/';
|
||
|
|
||
|
if (f.length > this.maxLength)
|
||
|
return false
|
||
|
|
||
|
if (!this.stat && ownProp$1(this.cache, abs)) {
|
||
|
var c = this.cache[abs];
|
||
|
|
||
|
if (Array.isArray(c))
|
||
|
c = 'DIR';
|
||
|
|
||
|
// It exists, but maybe not how we need it
|
||
|
if (!needDir || c === 'DIR')
|
||
|
return c
|
||
|
|
||
|
if (needDir && c === 'FILE')
|
||
|
return false
|
||
|
|
||
|
// otherwise we have to stat, because maybe c=true
|
||
|
// if we know it exists, but not what it is.
|
||
|
}
|
||
|
var stat = this.statCache[abs];
|
||
|
if (!stat) {
|
||
|
var lstat;
|
||
|
try {
|
||
|
lstat = fs__default['default'].lstatSync(abs);
|
||
|
} catch (er) {
|
||
|
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||
|
this.statCache[abs] = false;
|
||
|
return false
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (lstat && lstat.isSymbolicLink()) {
|
||
|
try {
|
||
|
stat = fs__default['default'].statSync(abs);
|
||
|
} catch (er) {
|
||
|
stat = lstat;
|
||
|
}
|
||
|
} else {
|
||
|
stat = lstat;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.statCache[abs] = stat;
|
||
|
|
||
|
var c = true;
|
||
|
if (stat)
|
||
|
c = stat.isDirectory() ? 'DIR' : 'FILE';
|
||
|
|
||
|
this.cache[abs] = this.cache[abs] || c;
|
||
|
|
||
|
if (needDir && c === 'FILE')
|
||
|
return false
|
||
|
|
||
|
return c
|
||
|
};
|
||
|
|
||
|
GlobSync$1.prototype._mark = function (p) {
|
||
|
return common.mark(this, p)
|
||
|
};
|
||
|
|
||
|
GlobSync$1.prototype._makeAbs = function (f) {
|
||
|
return common.makeAbs(this, f)
|
||
|
};
|
||
|
|
||
|
// Returns a wrapper function that returns a wrapped callback
|
||
|
// The wrapper function should do some stuff, and return a
|
||
|
// presumably different callback function.
|
||
|
// This makes sure that own properties are retained, so that
|
||
|
// decorations and such are not lost along the way.
|
||
|
var wrappy_1 = wrappy;
|
||
|
function wrappy (fn, cb) {
|
||
|
if (fn && cb) return wrappy(fn)(cb)
|
||
|
|
||
|
if (typeof fn !== 'function')
|
||
|
throw new TypeError('need wrapper function')
|
||
|
|
||
|
Object.keys(fn).forEach(function (k) {
|
||
|
wrapper[k] = fn[k];
|
||
|
});
|
||
|
|
||
|
return wrapper
|
||
|
|
||
|
function wrapper() {
|
||
|
var args = new Array(arguments.length);
|
||
|
for (var i = 0; i < args.length; i++) {
|
||
|
args[i] = arguments[i];
|
||
|
}
|
||
|
var ret = fn.apply(this, args);
|
||
|
var cb = args[args.length-1];
|
||
|
if (typeof ret === 'function' && ret !== cb) {
|
||
|
Object.keys(cb).forEach(function (k) {
|
||
|
ret[k] = cb[k];
|
||
|
});
|
||
|
}
|
||
|
return ret
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var once_1 = wrappy_1(once);
|
||
|
var strict = wrappy_1(onceStrict);
|
||
|
|
||
|
once.proto = once(function () {
|
||
|
Object.defineProperty(Function.prototype, 'once', {
|
||
|
value: function () {
|
||
|
return once(this)
|
||
|
},
|
||
|
configurable: true
|
||
|
});
|
||
|
|
||
|
Object.defineProperty(Function.prototype, 'onceStrict', {
|
||
|
value: function () {
|
||
|
return onceStrict(this)
|
||
|
},
|
||
|
configurable: true
|
||
|
});
|
||
|
});
|
||
|
|
||
|
function once (fn) {
|
||
|
var f = function () {
|
||
|
if (f.called) return f.value
|
||
|
f.called = true;
|
||
|
return f.value = fn.apply(this, arguments)
|
||
|
};
|
||
|
f.called = false;
|
||
|
return f
|
||
|
}
|
||
|
|
||
|
function onceStrict (fn) {
|
||
|
var f = function () {
|
||
|
if (f.called)
|
||
|
throw new Error(f.onceError)
|
||
|
f.called = true;
|
||
|
return f.value = fn.apply(this, arguments)
|
||
|
};
|
||
|
var name = fn.name || 'Function wrapped with `once`';
|
||
|
f.onceError = name + " shouldn't be called more than once";
|
||
|
f.called = false;
|
||
|
return f
|
||
|
}
|
||
|
once_1.strict = strict;
|
||
|
|
||
|
var reqs = Object.create(null);
|
||
|
|
||
|
|
||
|
var inflight_1 = wrappy_1(inflight);
|
||
|
|
||
|
function inflight (key, cb) {
|
||
|
if (reqs[key]) {
|
||
|
reqs[key].push(cb);
|
||
|
return null
|
||
|
} else {
|
||
|
reqs[key] = [cb];
|
||
|
return makeres(key)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function makeres (key) {
|
||
|
return once_1(function RES () {
|
||
|
var cbs = reqs[key];
|
||
|
var len = cbs.length;
|
||
|
var args = slice(arguments);
|
||
|
|
||
|
// XXX It's somewhat ambiguous whether a new callback added in this
|
||
|
// pass should be queued for later execution if something in the
|
||
|
// list of callbacks throws, or if it should just be discarded.
|
||
|
// However, it's such an edge case that it hardly matters, and either
|
||
|
// choice is likely as surprising as the other.
|
||
|
// As it happens, we do go ahead and schedule it for later execution.
|
||
|
try {
|
||
|
for (var i = 0; i < len; i++) {
|
||
|
cbs[i].apply(null, args);
|
||
|
}
|
||
|
} finally {
|
||
|
if (cbs.length > len) {
|
||
|
// added more in the interim.
|
||
|
// de-zalgo, just in case, but don't call again.
|
||
|
cbs.splice(0, len);
|
||
|
process.nextTick(function () {
|
||
|
RES.apply(null, args);
|
||
|
});
|
||
|
} else {
|
||
|
delete reqs[key];
|
||
|
}
|
||
|
}
|
||
|
})
|
||
|
}
|
||
|
|
||
|
function slice (args) {
|
||
|
var length = args.length;
|
||
|
var array = [];
|
||
|
|
||
|
for (var i = 0; i < length; i++) array[i] = args[i];
|
||
|
return array
|
||
|
}
|
||
|
|
||
|
// Approach:
|
||
|
//
|
||
|
// 1. Get the minimatch set
|
||
|
// 2. For each pattern in the set, PROCESS(pattern, false)
|
||
|
// 3. Store matches per-set, then uniq them
|
||
|
//
|
||
|
// PROCESS(pattern, inGlobStar)
|
||
|
// Get the first [n] items from pattern that are all strings
|
||
|
// Join these together. This is PREFIX.
|
||
|
// If there is no more remaining, then stat(PREFIX) and
|
||
|
// add to matches if it succeeds. END.
|
||
|
//
|
||
|
// If inGlobStar and PREFIX is symlink and points to dir
|
||
|
// set ENTRIES = []
|
||
|
// else readdir(PREFIX) as ENTRIES
|
||
|
// If fail, END
|
||
|
//
|
||
|
// with ENTRIES
|
||
|
// If pattern[n] is GLOBSTAR
|
||
|
// // handle the case where the globstar match is empty
|
||
|
// // by pruning it out, and testing the resulting pattern
|
||
|
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
|
||
|
// // handle other cases.
|
||
|
// for ENTRY in ENTRIES (not dotfiles)
|
||
|
// // attach globstar + tail onto the entry
|
||
|
// // Mark that this entry is a globstar match
|
||
|
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
|
||
|
//
|
||
|
// else // not globstar
|
||
|
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
|
||
|
// Test ENTRY against pattern[n]
|
||
|
// If fails, continue
|
||
|
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
|
||
|
//
|
||
|
// Caveat:
|
||
|
// Cache all stats and readdirs results to minimize syscall. Since all
|
||
|
// we ever care about is existence and directory-ness, we can just keep
|
||
|
// `true` for files, and [children,...] for directories, or `false` for
|
||
|
// things that don't exist.
|
||
|
|
||
|
var glob_1 = glob;
|
||
|
|
||
|
var EE = require$$0__default$1['default'].EventEmitter;
|
||
|
var setopts = common.setopts;
|
||
|
var ownProp = common.ownProp;
|
||
|
|
||
|
|
||
|
var childrenIgnored = common.childrenIgnored;
|
||
|
var isIgnored = common.isIgnored;
|
||
|
|
||
|
|
||
|
|
||
|
function glob (pattern, options, cb) {
|
||
|
if (typeof options === 'function') cb = options, options = {};
|
||
|
if (!options) options = {};
|
||
|
|
||
|
if (options.sync) {
|
||
|
if (cb)
|
||
|
throw new TypeError('callback provided to sync glob')
|
||
|
return sync(pattern, options)
|
||
|
}
|
||
|
|
||
|
return new Glob(pattern, options, cb)
|
||
|
}
|
||
|
|
||
|
glob.sync = sync;
|
||
|
var GlobSync = glob.GlobSync = sync.GlobSync;
|
||
|
|
||
|
// old api surface
|
||
|
glob.glob = glob;
|
||
|
|
||
|
function extend (origin, add) {
|
||
|
if (add === null || typeof add !== 'object') {
|
||
|
return origin
|
||
|
}
|
||
|
|
||
|
var keys = Object.keys(add);
|
||
|
var i = keys.length;
|
||
|
while (i--) {
|
||
|
origin[keys[i]] = add[keys[i]];
|
||
|
}
|
||
|
return origin
|
||
|
}
|
||
|
|
||
|
glob.hasMagic = function (pattern, options_) {
|
||
|
var options = extend({}, options_);
|
||
|
options.noprocess = true;
|
||
|
|
||
|
var g = new Glob(pattern, options);
|
||
|
var set = g.minimatch.set;
|
||
|
|
||
|
if (!pattern)
|
||
|
return false
|
||
|
|
||
|
if (set.length > 1)
|
||
|
return true
|
||
|
|
||
|
for (var j = 0; j < set[0].length; j++) {
|
||
|
if (typeof set[0][j] !== 'string')
|
||
|
return true
|
||
|
}
|
||
|
|
||
|
return false
|
||
|
};
|
||
|
|
||
|
glob.Glob = Glob;
|
||
|
inherits(Glob, EE);
|
||
|
function Glob (pattern, options, cb) {
|
||
|
if (typeof options === 'function') {
|
||
|
cb = options;
|
||
|
options = null;
|
||
|
}
|
||
|
|
||
|
if (options && options.sync) {
|
||
|
if (cb)
|
||
|
throw new TypeError('callback provided to sync glob')
|
||
|
return new GlobSync(pattern, options)
|
||
|
}
|
||
|
|
||
|
if (!(this instanceof Glob))
|
||
|
return new Glob(pattern, options, cb)
|
||
|
|
||
|
setopts(this, pattern, options);
|
||
|
this._didRealPath = false;
|
||
|
|
||
|
// process each pattern in the minimatch set
|
||
|
var n = this.minimatch.set.length;
|
||
|
|
||
|
// The matches are stored as {<filename>: true,...} so that
|
||
|
// duplicates are automagically pruned.
|
||
|
// Later, we do an Object.keys() on these.
|
||
|
// Keep them as a list so we can fill in when nonull is set.
|
||
|
this.matches = new Array(n);
|
||
|
|
||
|
if (typeof cb === 'function') {
|
||
|
cb = once_1(cb);
|
||
|
this.on('error', cb);
|
||
|
this.on('end', function (matches) {
|
||
|
cb(null, matches);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
var self = this;
|
||
|
this._processing = 0;
|
||
|
|
||
|
this._emitQueue = [];
|
||
|
this._processQueue = [];
|
||
|
this.paused = false;
|
||
|
|
||
|
if (this.noprocess)
|
||
|
return this
|
||
|
|
||
|
if (n === 0)
|
||
|
return done()
|
||
|
|
||
|
var sync = true;
|
||
|
for (var i = 0; i < n; i ++) {
|
||
|
this._process(this.minimatch.set[i], i, false, done);
|
||
|
}
|
||
|
sync = false;
|
||
|
|
||
|
function done () {
|
||
|
--self._processing;
|
||
|
if (self._processing <= 0) {
|
||
|
if (sync) {
|
||
|
process.nextTick(function () {
|
||
|
self._finish();
|
||
|
});
|
||
|
} else {
|
||
|
self._finish();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype._finish = function () {
|
||
|
assert__default['default'](this instanceof Glob);
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
if (this.realpath && !this._didRealpath)
|
||
|
return this._realpath()
|
||
|
|
||
|
common.finish(this);
|
||
|
this.emit('end', this.found);
|
||
|
};
|
||
|
|
||
|
Glob.prototype._realpath = function () {
|
||
|
if (this._didRealpath)
|
||
|
return
|
||
|
|
||
|
this._didRealpath = true;
|
||
|
|
||
|
var n = this.matches.length;
|
||
|
if (n === 0)
|
||
|
return this._finish()
|
||
|
|
||
|
var self = this;
|
||
|
for (var i = 0; i < this.matches.length; i++)
|
||
|
this._realpathSet(i, next);
|
||
|
|
||
|
function next () {
|
||
|
if (--n === 0)
|
||
|
self._finish();
|
||
|
}
|
||
|
};
|
||
|
|
||
|
Glob.prototype._realpathSet = function (index, cb) {
|
||
|
var matchset = this.matches[index];
|
||
|
if (!matchset)
|
||
|
return cb()
|
||
|
|
||
|
var found = Object.keys(matchset);
|
||
|
var self = this;
|
||
|
var n = found.length;
|
||
|
|
||
|
if (n === 0)
|
||
|
return cb()
|
||
|
|
||
|
var set = this.matches[index] = Object.create(null);
|
||
|
found.forEach(function (p, i) {
|
||
|
// If there's a problem with the stat, then it means that
|
||
|
// one or more of the links in the realpath couldn't be
|
||
|
// resolved. just return the abs value in that case.
|
||
|
p = self._makeAbs(p);
|
||
|
fs_realpath.realpath(p, self.realpathCache, function (er, real) {
|
||
|
if (!er)
|
||
|
set[real] = true;
|
||
|
else if (er.syscall === 'stat')
|
||
|
set[p] = true;
|
||
|
else
|
||
|
self.emit('error', er); // srsly wtf right here
|
||
|
|
||
|
if (--n === 0) {
|
||
|
self.matches[index] = set;
|
||
|
cb();
|
||
|
}
|
||
|
});
|
||
|
});
|
||
|
};
|
||
|
|
||
|
Glob.prototype._mark = function (p) {
|
||
|
return common.mark(this, p)
|
||
|
};
|
||
|
|
||
|
Glob.prototype._makeAbs = function (f) {
|
||
|
return common.makeAbs(this, f)
|
||
|
};
|
||
|
|
||
|
Glob.prototype.abort = function () {
|
||
|
this.aborted = true;
|
||
|
this.emit('abort');
|
||
|
};
|
||
|
|
||
|
Glob.prototype.pause = function () {
|
||
|
if (!this.paused) {
|
||
|
this.paused = true;
|
||
|
this.emit('pause');
|
||
|
}
|
||
|
};
|
||
|
|
||
|
Glob.prototype.resume = function () {
|
||
|
if (this.paused) {
|
||
|
this.emit('resume');
|
||
|
this.paused = false;
|
||
|
if (this._emitQueue.length) {
|
||
|
var eq = this._emitQueue.slice(0);
|
||
|
this._emitQueue.length = 0;
|
||
|
for (var i = 0; i < eq.length; i ++) {
|
||
|
var e = eq[i];
|
||
|
this._emitMatch(e[0], e[1]);
|
||
|
}
|
||
|
}
|
||
|
if (this._processQueue.length) {
|
||
|
var pq = this._processQueue.slice(0);
|
||
|
this._processQueue.length = 0;
|
||
|
for (var i = 0; i < pq.length; i ++) {
|
||
|
var p = pq[i];
|
||
|
this._processing--;
|
||
|
this._process(p[0], p[1], p[2], p[3]);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
|
||
|
assert__default['default'](this instanceof Glob);
|
||
|
assert__default['default'](typeof cb === 'function');
|
||
|
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
this._processing++;
|
||
|
if (this.paused) {
|
||
|
this._processQueue.push([pattern, index, inGlobStar, cb]);
|
||
|
return
|
||
|
}
|
||
|
|
||
|
//console.error('PROCESS %d', this._processing, pattern)
|
||
|
|
||
|
// Get the first [n] parts of pattern that are all strings.
|
||
|
var n = 0;
|
||
|
while (typeof pattern[n] === 'string') {
|
||
|
n ++;
|
||
|
}
|
||
|
// now n is the index of the first one that is *not* a string.
|
||
|
|
||
|
// see if there's anything else
|
||
|
var prefix;
|
||
|
switch (n) {
|
||
|
// if not, then this is rather simple
|
||
|
case pattern.length:
|
||
|
this._processSimple(pattern.join('/'), index, cb);
|
||
|
return
|
||
|
|
||
|
case 0:
|
||
|
// pattern *starts* with some non-trivial item.
|
||
|
// going to readdir(cwd), but not include the prefix in matches.
|
||
|
prefix = null;
|
||
|
break
|
||
|
|
||
|
default:
|
||
|
// pattern has some string bits in the front.
|
||
|
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||
|
// or 'relative' like '../baz'
|
||
|
prefix = pattern.slice(0, n).join('/');
|
||
|
break
|
||
|
}
|
||
|
|
||
|
var remain = pattern.slice(n);
|
||
|
|
||
|
// get the list of entries.
|
||
|
var read;
|
||
|
if (prefix === null)
|
||
|
read = '.';
|
||
|
else if (pathIsAbsolute(prefix) || pathIsAbsolute(pattern.join('/'))) {
|
||
|
if (!prefix || !pathIsAbsolute(prefix))
|
||
|
prefix = '/' + prefix;
|
||
|
read = prefix;
|
||
|
} else
|
||
|
read = prefix;
|
||
|
|
||
|
var abs = this._makeAbs(read);
|
||
|
|
||
|
//if ignored, skip _processing
|
||
|
if (childrenIgnored(this, read))
|
||
|
return cb()
|
||
|
|
||
|
var isGlobStar = remain[0] === minimatch_1.GLOBSTAR;
|
||
|
if (isGlobStar)
|
||
|
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb);
|
||
|
else
|
||
|
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb);
|
||
|
};
|
||
|
|
||
|
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||
|
var self = this;
|
||
|
this._readdir(abs, inGlobStar, function (er, entries) {
|
||
|
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||
|
});
|
||
|
};
|
||
|
|
||
|
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||
|
|
||
|
// if the abs isn't a dir, then nothing can match!
|
||
|
if (!entries)
|
||
|
return cb()
|
||
|
|
||
|
// It will only match dot entries if it starts with a dot, or if
|
||
|
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||
|
var pn = remain[0];
|
||
|
var negate = !!this.minimatch.negate;
|
||
|
var rawGlob = pn._glob;
|
||
|
var dotOk = this.dot || rawGlob.charAt(0) === '.';
|
||
|
|
||
|
var matchedEntries = [];
|
||
|
for (var i = 0; i < entries.length; i++) {
|
||
|
var e = entries[i];
|
||
|
if (e.charAt(0) !== '.' || dotOk) {
|
||
|
var m;
|
||
|
if (negate && !prefix) {
|
||
|
m = !e.match(pn);
|
||
|
} else {
|
||
|
m = e.match(pn);
|
||
|
}
|
||
|
if (m)
|
||
|
matchedEntries.push(e);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
|
||
|
|
||
|
var len = matchedEntries.length;
|
||
|
// If there are no matched entries, then nothing matches.
|
||
|
if (len === 0)
|
||
|
return cb()
|
||
|
|
||
|
// if this is the last remaining pattern bit, then no need for
|
||
|
// an additional stat *unless* the user has specified mark or
|
||
|
// stat explicitly. We know they exist, since readdir returned
|
||
|
// them.
|
||
|
|
||
|
if (remain.length === 1 && !this.mark && !this.stat) {
|
||
|
if (!this.matches[index])
|
||
|
this.matches[index] = Object.create(null);
|
||
|
|
||
|
for (var i = 0; i < len; i ++) {
|
||
|
var e = matchedEntries[i];
|
||
|
if (prefix) {
|
||
|
if (prefix !== '/')
|
||
|
e = prefix + '/' + e;
|
||
|
else
|
||
|
e = prefix + e;
|
||
|
}
|
||
|
|
||
|
if (e.charAt(0) === '/' && !this.nomount) {
|
||
|
e = path__default['default'].join(this.root, e);
|
||
|
}
|
||
|
this._emitMatch(index, e);
|
||
|
}
|
||
|
// This was the last one, and no stats were needed
|
||
|
return cb()
|
||
|
}
|
||
|
|
||
|
// now test all matched entries as stand-ins for that part
|
||
|
// of the pattern.
|
||
|
remain.shift();
|
||
|
for (var i = 0; i < len; i ++) {
|
||
|
var e = matchedEntries[i];
|
||
|
if (prefix) {
|
||
|
if (prefix !== '/')
|
||
|
e = prefix + '/' + e;
|
||
|
else
|
||
|
e = prefix + e;
|
||
|
}
|
||
|
this._process([e].concat(remain), index, inGlobStar, cb);
|
||
|
}
|
||
|
cb();
|
||
|
};
|
||
|
|
||
|
Glob.prototype._emitMatch = function (index, e) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
if (isIgnored(this, e))
|
||
|
return
|
||
|
|
||
|
if (this.paused) {
|
||
|
this._emitQueue.push([index, e]);
|
||
|
return
|
||
|
}
|
||
|
|
||
|
var abs = pathIsAbsolute(e) ? e : this._makeAbs(e);
|
||
|
|
||
|
if (this.mark)
|
||
|
e = this._mark(e);
|
||
|
|
||
|
if (this.absolute)
|
||
|
e = abs;
|
||
|
|
||
|
if (this.matches[index][e])
|
||
|
return
|
||
|
|
||
|
if (this.nodir) {
|
||
|
var c = this.cache[abs];
|
||
|
if (c === 'DIR' || Array.isArray(c))
|
||
|
return
|
||
|
}
|
||
|
|
||
|
this.matches[index][e] = true;
|
||
|
|
||
|
var st = this.statCache[abs];
|
||
|
if (st)
|
||
|
this.emit('stat', e, st);
|
||
|
|
||
|
this.emit('match', e);
|
||
|
};
|
||
|
|
||
|
Glob.prototype._readdirInGlobStar = function (abs, cb) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
// follow all symlinked directories forever
|
||
|
// just proceed as if this is a non-globstar situation
|
||
|
if (this.follow)
|
||
|
return this._readdir(abs, false, cb)
|
||
|
|
||
|
var lstatkey = 'lstat\0' + abs;
|
||
|
var self = this;
|
||
|
var lstatcb = inflight_1(lstatkey, lstatcb_);
|
||
|
|
||
|
if (lstatcb)
|
||
|
fs__default['default'].lstat(abs, lstatcb);
|
||
|
|
||
|
function lstatcb_ (er, lstat) {
|
||
|
if (er && er.code === 'ENOENT')
|
||
|
return cb()
|
||
|
|
||
|
var isSym = lstat && lstat.isSymbolicLink();
|
||
|
self.symlinks[abs] = isSym;
|
||
|
|
||
|
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||
|
// don't bother doing a readdir in that case.
|
||
|
if (!isSym && lstat && !lstat.isDirectory()) {
|
||
|
self.cache[abs] = 'FILE';
|
||
|
cb();
|
||
|
} else
|
||
|
self._readdir(abs, false, cb);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
cb = inflight_1('readdir\0'+abs+'\0'+inGlobStar, cb);
|
||
|
if (!cb)
|
||
|
return
|
||
|
|
||
|
//console.error('RD %j %j', +inGlobStar, abs)
|
||
|
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||
|
return this._readdirInGlobStar(abs, cb)
|
||
|
|
||
|
if (ownProp(this.cache, abs)) {
|
||
|
var c = this.cache[abs];
|
||
|
if (!c || c === 'FILE')
|
||
|
return cb()
|
||
|
|
||
|
if (Array.isArray(c))
|
||
|
return cb(null, c)
|
||
|
}
|
||
|
fs__default['default'].readdir(abs, readdirCb(this, abs, cb));
|
||
|
};
|
||
|
|
||
|
function readdirCb (self, abs, cb) {
|
||
|
return function (er, entries) {
|
||
|
if (er)
|
||
|
self._readdirError(abs, er, cb);
|
||
|
else
|
||
|
self._readdirEntries(abs, entries, cb);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Glob.prototype._readdirEntries = function (abs, entries, cb) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
// if we haven't asked to stat everything, then just
|
||
|
// assume that everything in there exists, so we can avoid
|
||
|
// having to stat it a second time.
|
||
|
if (!this.mark && !this.stat) {
|
||
|
for (var i = 0; i < entries.length; i ++) {
|
||
|
var e = entries[i];
|
||
|
if (abs === '/')
|
||
|
e = abs + e;
|
||
|
else
|
||
|
e = abs + '/' + e;
|
||
|
this.cache[e] = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
this.cache[abs] = entries;
|
||
|
return cb(null, entries)
|
||
|
};
|
||
|
|
||
|
Glob.prototype._readdirError = function (f, er, cb) {
|
||
|
if (this.aborted)
|
||
|
return
|
||
|
|
||
|
// handle errors, and cache the information
|
||
|
switch (er.code) {
|
||
|
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||
|
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||
|
var abs = this._makeAbs(f);
|
||
|
this.cache[abs] = 'FILE';
|
||
|
if (abs === this.cwdAbs) {
|
||
|
var error = new Error(er.code + ' invalid cwd ' + this.cwd);
|
||
|
error.path = this.cwd;
|
||
|
error.code = er.code;
|
||
|
this.emit('error', error);
|
||
|
this.abort();
|
||
|
}
|
||
|
break
|
||
|
|
||
|
case 'ENOENT': // not terribly unusual
|
||
|
case 'ELOOP':
|
||
|
case 'ENAMETOOLONG':
|
||
|
case 'UNKNOWN':
|
||
|
this.cache[this._makeAbs(f)] = false;
|
||
|
break
|
||
|
|
||
|
default: // some unusual error. Treat as failure.
|
||
|
this.cache[this._makeAbs(f)] = false;
|
||
|
if (this.strict) {
|
||
|
this.emit('error', er);
|
||
|
// If the error is handled, then we abort
|
||
|
// if not, we threw out of here
|
||
|
this.abort();
|
||
|
}
|
||
|
if (!this.silent)
|
||
|
console.error('glob error', er);
|
||
|
break
|
||
|
}
|
||
|
|
||
|
return cb()
|
||
|
};
|
||
|
|
||
|
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||
|
var self = this;
|
||
|
this._readdir(abs, inGlobStar, function (er, entries) {
|
||
|
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb);
|
||
|
});
|
||
|
};
|
||
|
|
||
|
|
||
|
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||
|
//console.error('pgs2', prefix, remain[0], entries)
|
||
|
|
||
|
// no entries means not a dir, so it can never have matches
|
||
|
// foo.txt/** doesn't match foo.txt
|
||
|
if (!entries)
|
||
|
return cb()
|
||
|
|
||
|
// test without the globstar, and with every child both below
|
||
|
// and replacing the globstar.
|
||
|
var remainWithoutGlobStar = remain.slice(1);
|
||
|
var gspref = prefix ? [ prefix ] : [];
|
||
|
var noGlobStar = gspref.concat(remainWithoutGlobStar);
|
||
|
|
||
|
// the noGlobStar pattern exits the inGlobStar state
|
||
|
this._process(noGlobStar, index, false, cb);
|
||
|
|
||
|
var isSym = this.symlinks[abs];
|
||
|
var len = entries.length;
|
||
|
|
||
|
// If it's a symlink, and we're in a globstar, then stop
|
||
|
if (isSym && inGlobStar)
|
||
|
return cb()
|
||
|
|
||
|
for (var i = 0; i < len; i++) {
|
||
|
var e = entries[i];
|
||
|
if (e.charAt(0) === '.' && !this.dot)
|
||
|
continue
|
||
|
|
||
|
// these two cases enter the inGlobStar state
|
||
|
var instead = gspref.concat(entries[i], remainWithoutGlobStar);
|
||
|
this._process(instead, index, true, cb);
|
||
|
|
||
|
var below = gspref.concat(entries[i], remain);
|
||
|
this._process(below, index, true, cb);
|
||
|
}
|
||
|
|
||
|
cb();
|
||
|
};
|
||
|
|
||
|
Glob.prototype._processSimple = function (prefix, index, cb) {
|
||
|
// XXX review this. Shouldn't it be doing the mounting etc
|
||
|
// before doing stat? kinda weird?
|
||
|
var self = this;
|
||
|
this._stat(prefix, function (er, exists) {
|
||
|
self._processSimple2(prefix, index, er, exists, cb);
|
||
|
});
|
||
|
};
|
||
|
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
|
||
|
|
||
|
//console.error('ps2', prefix, exists)
|
||
|
|
||
|
if (!this.matches[index])
|
||
|
this.matches[index] = Object.create(null);
|
||
|
|
||
|
// If it doesn't exist, then just mark the lack of results
|
||
|
if (!exists)
|
||
|
return cb()
|
||
|
|
||
|
if (prefix && pathIsAbsolute(prefix) && !this.nomount) {
|
||
|
var trail = /[\/\\]$/.test(prefix);
|
||
|
if (prefix.charAt(0) === '/') {
|
||
|
prefix = path__default['default'].join(this.root, prefix);
|
||
|
} else {
|
||
|
prefix = path__default['default'].resolve(this.root, prefix);
|
||
|
if (trail)
|
||
|
prefix += '/';
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (process.platform === 'win32')
|
||
|
prefix = prefix.replace(/\\/g, '/');
|
||
|
|
||
|
// Mark this as a match
|
||
|
this._emitMatch(index, prefix);
|
||
|
cb();
|
||
|
};
|
||
|
|
||
|
// Returns either 'DIR', 'FILE', or false
|
||
|
Glob.prototype._stat = function (f, cb) {
|
||
|
var abs = this._makeAbs(f);
|
||
|
var needDir = f.slice(-1) === '/';
|
||
|
|
||
|
if (f.length > this.maxLength)
|
||
|
return cb()
|
||
|
|
||
|
if (!this.stat && ownProp(this.cache, abs)) {
|
||
|
var c = this.cache[abs];
|
||
|
|
||
|
if (Array.isArray(c))
|
||
|
c = 'DIR';
|
||
|
|
||
|
// It exists, but maybe not how we need it
|
||
|
if (!needDir || c === 'DIR')
|
||
|
return cb(null, c)
|
||
|
|
||
|
if (needDir && c === 'FILE')
|
||
|
return cb()
|
||
|
|
||
|
// otherwise we have to stat, because maybe c=true
|
||
|
// if we know it exists, but not what it is.
|
||
|
}
|
||
|
var stat = this.statCache[abs];
|
||
|
if (stat !== undefined) {
|
||
|
if (stat === false)
|
||
|
return cb(null, stat)
|
||
|
else {
|
||
|
var type = stat.isDirectory() ? 'DIR' : 'FILE';
|
||
|
if (needDir && type === 'FILE')
|
||
|
return cb()
|
||
|
else
|
||
|
return cb(null, type, stat)
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var self = this;
|
||
|
var statcb = inflight_1('stat\0' + abs, lstatcb_);
|
||
|
if (statcb)
|
||
|
fs__default['default'].lstat(abs, statcb);
|
||
|
|
||
|
function lstatcb_ (er, lstat) {
|
||
|
if (lstat && lstat.isSymbolicLink()) {
|
||
|
// If it's a symlink, then treat it as the target, unless
|
||
|
// the target does not exist, then treat it as a file.
|
||
|
return fs__default['default'].stat(abs, function (er, stat) {
|
||
|
if (er)
|
||
|
self._stat2(f, abs, null, lstat, cb);
|
||
|
else
|
||
|
self._stat2(f, abs, er, stat, cb);
|
||
|
})
|
||
|
} else {
|
||
|
self._stat2(f, abs, er, lstat, cb);
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
|
||
|
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||
|
this.statCache[abs] = false;
|
||
|
return cb()
|
||
|
}
|
||
|
|
||
|
var needDir = f.slice(-1) === '/';
|
||
|
this.statCache[abs] = stat;
|
||
|
|
||
|
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
|
||
|
return cb(null, false, stat)
|
||
|
|
||
|
var c = true;
|
||
|
if (stat)
|
||
|
c = stat.isDirectory() ? 'DIR' : 'FILE';
|
||
|
this.cache[abs] = this.cache[abs] || c;
|
||
|
|
||
|
if (needDir && c === 'FILE')
|
||
|
return cb()
|
||
|
|
||
|
return cb(null, c, stat)
|
||
|
};
|
||
|
|
||
|
var rimraf_1 = rimraf;
|
||
|
rimraf.sync = rimrafSync;
|
||
|
|
||
|
|
||
|
|
||
|
|
||
|
|
||
|
var _0666 = parseInt('666', 8);
|
||
|
|
||
|
var defaultGlobOpts = {
|
||
|
nosort: true,
|
||
|
silent: true
|
||
|
};
|
||
|
|
||
|
// for EMFILE handling
|
||
|
var timeout = 0;
|
||
|
|
||
|
var isWindows = (process.platform === "win32");
|
||
|
|
||
|
function defaults (options) {
|
||
|
var methods = [
|
||
|
'unlink',
|
||
|
'chmod',
|
||
|
'stat',
|
||
|
'lstat',
|
||
|
'rmdir',
|
||
|
'readdir'
|
||
|
];
|
||
|
methods.forEach(function(m) {
|
||
|
options[m] = options[m] || fs__default['default'][m];
|
||
|
m = m + 'Sync';
|
||
|
options[m] = options[m] || fs__default['default'][m];
|
||
|
});
|
||
|
|
||
|
options.maxBusyTries = options.maxBusyTries || 3;
|
||
|
options.emfileWait = options.emfileWait || 1000;
|
||
|
if (options.glob === false) {
|
||
|
options.disableGlob = true;
|
||
|
}
|
||
|
options.disableGlob = options.disableGlob || false;
|
||
|
options.glob = options.glob || defaultGlobOpts;
|
||
|
}
|
||
|
|
||
|
function rimraf (p, options, cb) {
|
||
|
if (typeof options === 'function') {
|
||
|
cb = options;
|
||
|
options = {};
|
||
|
}
|
||
|
|
||
|
assert__default['default'](p, 'rimraf: missing path');
|
||
|
assert__default['default'].equal(typeof p, 'string', 'rimraf: path should be a string');
|
||
|
assert__default['default'].equal(typeof cb, 'function', 'rimraf: callback function required');
|
||
|
assert__default['default'](options, 'rimraf: invalid options argument provided');
|
||
|
assert__default['default'].equal(typeof options, 'object', 'rimraf: options should be object');
|
||
|
|
||
|
defaults(options);
|
||
|
|
||
|
var busyTries = 0;
|
||
|
var errState = null;
|
||
|
var n = 0;
|
||
|
|
||
|
if (options.disableGlob || !glob_1.hasMagic(p))
|
||
|
return afterGlob(null, [p])
|
||
|
|
||
|
options.lstat(p, function (er, stat) {
|
||
|
if (!er)
|
||
|
return afterGlob(null, [p])
|
||
|
|
||
|
glob_1(p, options.glob, afterGlob);
|
||
|
});
|
||
|
|
||
|
function next (er) {
|
||
|
errState = errState || er;
|
||
|
if (--n === 0)
|
||
|
cb(errState);
|
||
|
}
|
||
|
|
||
|
function afterGlob (er, results) {
|
||
|
if (er)
|
||
|
return cb(er)
|
||
|
|
||
|
n = results.length;
|
||
|
if (n === 0)
|
||
|
return cb()
|
||
|
|
||
|
results.forEach(function (p) {
|
||
|
rimraf_(p, options, function CB (er) {
|
||
|
if (er) {
|
||
|
if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
|
||
|
busyTries < options.maxBusyTries) {
|
||
|
busyTries ++;
|
||
|
var time = busyTries * 100;
|
||
|
// try again, with the same exact callback as this one.
|
||
|
return setTimeout(function () {
|
||
|
rimraf_(p, options, CB);
|
||
|
}, time)
|
||
|
}
|
||
|
|
||
|
// this one won't happen if graceful-fs is used.
|
||
|
if (er.code === "EMFILE" && timeout < options.emfileWait) {
|
||
|
return setTimeout(function () {
|
||
|
rimraf_(p, options, CB);
|
||
|
}, timeout ++)
|
||
|
}
|
||
|
|
||
|
// already gone
|
||
|
if (er.code === "ENOENT") er = null;
|
||
|
}
|
||
|
|
||
|
timeout = 0;
|
||
|
next(er);
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// Two possible strategies.
|
||
|
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
|
||
|
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
|
||
|
//
|
||
|
// Both result in an extra syscall when you guess wrong. However, there
|
||
|
// are likely far more normal files in the world than directories. This
|
||
|
// is based on the assumption that a the average number of files per
|
||
|
// directory is >= 1.
|
||
|
//
|
||
|
// If anyone ever complains about this, then I guess the strategy could
|
||
|
// be made configurable somehow. But until then, YAGNI.
|
||
|
function rimraf_ (p, options, cb) {
|
||
|
assert__default['default'](p);
|
||
|
assert__default['default'](options);
|
||
|
assert__default['default'](typeof cb === 'function');
|
||
|
|
||
|
// sunos lets the root user unlink directories, which is... weird.
|
||
|
// so we have to lstat here and make sure it's not a dir.
|
||
|
options.lstat(p, function (er, st) {
|
||
|
if (er && er.code === "ENOENT")
|
||
|
return cb(null)
|
||
|
|
||
|
// Windows can EPERM on stat. Life is suffering.
|
||
|
if (er && er.code === "EPERM" && isWindows)
|
||
|
fixWinEPERM(p, options, er, cb);
|
||
|
|
||
|
if (st && st.isDirectory())
|
||
|
return rmdir(p, options, er, cb)
|
||
|
|
||
|
options.unlink(p, function (er) {
|
||
|
if (er) {
|
||
|
if (er.code === "ENOENT")
|
||
|
return cb(null)
|
||
|
if (er.code === "EPERM")
|
||
|
return (isWindows)
|
||
|
? fixWinEPERM(p, options, er, cb)
|
||
|
: rmdir(p, options, er, cb)
|
||
|
if (er.code === "EISDIR")
|
||
|
return rmdir(p, options, er, cb)
|
||
|
}
|
||
|
return cb(er)
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function fixWinEPERM (p, options, er, cb) {
|
||
|
assert__default['default'](p);
|
||
|
assert__default['default'](options);
|
||
|
assert__default['default'](typeof cb === 'function');
|
||
|
if (er)
|
||
|
assert__default['default'](er instanceof Error);
|
||
|
|
||
|
options.chmod(p, _0666, function (er2) {
|
||
|
if (er2)
|
||
|
cb(er2.code === "ENOENT" ? null : er);
|
||
|
else
|
||
|
options.stat(p, function(er3, stats) {
|
||
|
if (er3)
|
||
|
cb(er3.code === "ENOENT" ? null : er);
|
||
|
else if (stats.isDirectory())
|
||
|
rmdir(p, options, er, cb);
|
||
|
else
|
||
|
options.unlink(p, cb);
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function fixWinEPERMSync (p, options, er) {
|
||
|
assert__default['default'](p);
|
||
|
assert__default['default'](options);
|
||
|
if (er)
|
||
|
assert__default['default'](er instanceof Error);
|
||
|
|
||
|
try {
|
||
|
options.chmodSync(p, _0666);
|
||
|
} catch (er2) {
|
||
|
if (er2.code === "ENOENT")
|
||
|
return
|
||
|
else
|
||
|
throw er
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
var stats = options.statSync(p);
|
||
|
} catch (er3) {
|
||
|
if (er3.code === "ENOENT")
|
||
|
return
|
||
|
else
|
||
|
throw er
|
||
|
}
|
||
|
|
||
|
if (stats.isDirectory())
|
||
|
rmdirSync(p, options, er);
|
||
|
else
|
||
|
options.unlinkSync(p);
|
||
|
}
|
||
|
|
||
|
function rmdir (p, options, originalEr, cb) {
|
||
|
assert__default['default'](p);
|
||
|
assert__default['default'](options);
|
||
|
if (originalEr)
|
||
|
assert__default['default'](originalEr instanceof Error);
|
||
|
assert__default['default'](typeof cb === 'function');
|
||
|
|
||
|
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
|
||
|
// if we guessed wrong, and it's not a directory, then
|
||
|
// raise the original error.
|
||
|
options.rmdir(p, function (er) {
|
||
|
if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
|
||
|
rmkids(p, options, cb);
|
||
|
else if (er && er.code === "ENOTDIR")
|
||
|
cb(originalEr);
|
||
|
else
|
||
|
cb(er);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function rmkids(p, options, cb) {
|
||
|
assert__default['default'](p);
|
||
|
assert__default['default'](options);
|
||
|
assert__default['default'](typeof cb === 'function');
|
||
|
|
||
|
options.readdir(p, function (er, files) {
|
||
|
if (er)
|
||
|
return cb(er)
|
||
|
var n = files.length;
|
||
|
if (n === 0)
|
||
|
return options.rmdir(p, cb)
|
||
|
var errState;
|
||
|
files.forEach(function (f) {
|
||
|
rimraf(path__default['default'].join(p, f), options, function (er) {
|
||
|
if (errState)
|
||
|
return
|
||
|
if (er)
|
||
|
return cb(errState = er)
|
||
|
if (--n === 0)
|
||
|
options.rmdir(p, cb);
|
||
|
});
|
||
|
});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
// this looks simpler, and is strictly *faster*, but will
|
||
|
// tie up the JavaScript thread and fail on excessively
|
||
|
// deep directory trees.
|
||
|
function rimrafSync (p, options) {
|
||
|
options = options || {};
|
||
|
defaults(options);
|
||
|
|
||
|
assert__default['default'](p, 'rimraf: missing path');
|
||
|
assert__default['default'].equal(typeof p, 'string', 'rimraf: path should be a string');
|
||
|
assert__default['default'](options, 'rimraf: missing options');
|
||
|
assert__default['default'].equal(typeof options, 'object', 'rimraf: options should be object');
|
||
|
|
||
|
var results;
|
||
|
|
||
|
if (options.disableGlob || !glob_1.hasMagic(p)) {
|
||
|
results = [p];
|
||
|
} else {
|
||
|
try {
|
||
|
options.lstatSync(p);
|
||
|
results = [p];
|
||
|
} catch (er) {
|
||
|
results = glob_1.sync(p, options.glob);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (!results.length)
|
||
|
return
|
||
|
|
||
|
for (var i = 0; i < results.length; i++) {
|
||
|
var p = results[i];
|
||
|
|
||
|
try {
|
||
|
var st = options.lstatSync(p);
|
||
|
} catch (er) {
|
||
|
if (er.code === "ENOENT")
|
||
|
return
|
||
|
|
||
|
// Windows can EPERM on stat. Life is suffering.
|
||
|
if (er.code === "EPERM" && isWindows)
|
||
|
fixWinEPERMSync(p, options, er);
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
// sunos lets the root user unlink directories, which is... weird.
|
||
|
if (st && st.isDirectory())
|
||
|
rmdirSync(p, options, null);
|
||
|
else
|
||
|
options.unlinkSync(p);
|
||
|
} catch (er) {
|
||
|
if (er.code === "ENOENT")
|
||
|
return
|
||
|
if (er.code === "EPERM")
|
||
|
return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
|
||
|
if (er.code !== "EISDIR")
|
||
|
throw er
|
||
|
|
||
|
rmdirSync(p, options, er);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function rmdirSync (p, options, originalEr) {
|
||
|
assert__default['default'](p);
|
||
|
assert__default['default'](options);
|
||
|
if (originalEr)
|
||
|
assert__default['default'](originalEr instanceof Error);
|
||
|
|
||
|
try {
|
||
|
options.rmdirSync(p);
|
||
|
} catch (er) {
|
||
|
if (er.code === "ENOENT")
|
||
|
return
|
||
|
if (er.code === "ENOTDIR")
|
||
|
throw originalEr
|
||
|
if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")
|
||
|
rmkidsSync(p, options);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function rmkidsSync (p, options) {
|
||
|
assert__default['default'](p);
|
||
|
assert__default['default'](options);
|
||
|
options.readdirSync(p).forEach(function (f) {
|
||
|
rimrafSync(path__default['default'].join(p, f), options);
|
||
|
});
|
||
|
|
||
|
// We only end up here once we got ENOTEMPTY at least once, and
|
||
|
// at this point, we are guaranteed to have removed all the kids.
|
||
|
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
|
||
|
// try really hard to delete stuff on windows, because it has a
|
||
|
// PROFOUNDLY annoying habit of not closing handles promptly when
|
||
|
// files are deleted, resulting in spurious ENOTEMPTY errors.
|
||
|
var retries = isWindows ? 100 : 1;
|
||
|
var i = 0;
|
||
|
do {
|
||
|
var threw = true;
|
||
|
try {
|
||
|
var ret = options.rmdirSync(p, options);
|
||
|
threw = false;
|
||
|
return ret
|
||
|
} finally {
|
||
|
if (++i < retries && threw)
|
||
|
continue
|
||
|
}
|
||
|
} while (true)
|
||
|
}
|
||
|
|
||
|
var _0777 = parseInt('0777', 8);
|
||
|
|
||
|
var mkdirp = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
|
||
|
|
||
|
function mkdirP (p, opts, f, made) {
|
||
|
if (typeof opts === 'function') {
|
||
|
f = opts;
|
||
|
opts = {};
|
||
|
}
|
||
|
else if (!opts || typeof opts !== 'object') {
|
||
|
opts = { mode: opts };
|
||
|
}
|
||
|
|
||
|
var mode = opts.mode;
|
||
|
var xfs = opts.fs || fs__default['default'];
|
||
|
|
||
|
if (mode === undefined) {
|
||
|
mode = _0777;
|
||
|
}
|
||
|
if (!made) made = null;
|
||
|
|
||
|
var cb = f || function () {};
|
||
|
p = path__default['default'].resolve(p);
|
||
|
|
||
|
xfs.mkdir(p, mode, function (er) {
|
||
|
if (!er) {
|
||
|
made = made || p;
|
||
|
return cb(null, made);
|
||
|
}
|
||
|
switch (er.code) {
|
||
|
case 'ENOENT':
|
||
|
if (path__default['default'].dirname(p) === p) return cb(er);
|
||
|
mkdirP(path__default['default'].dirname(p), opts, function (er, made) {
|
||
|
if (er) cb(er, made);
|
||
|
else mkdirP(p, opts, cb, made);
|
||
|
});
|
||
|
break;
|
||
|
|
||
|
// In the case of any other error, just see if there's a dir
|
||
|
// there already. If so, then hooray! If not, then something
|
||
|
// is borked.
|
||
|
default:
|
||
|
xfs.stat(p, function (er2, stat) {
|
||
|
// if the stat fails, then that's super weird.
|
||
|
// let the original error be the failure reason.
|
||
|
if (er2 || !stat.isDirectory()) cb(er, made);
|
||
|
else cb(null, made);
|
||
|
});
|
||
|
break;
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
|
||
|
mkdirP.sync = function sync (p, opts, made) {
|
||
|
if (!opts || typeof opts !== 'object') {
|
||
|
opts = { mode: opts };
|
||
|
}
|
||
|
|
||
|
var mode = opts.mode;
|
||
|
var xfs = opts.fs || fs__default['default'];
|
||
|
|
||
|
if (mode === undefined) {
|
||
|
mode = _0777;
|
||
|
}
|
||
|
if (!made) made = null;
|
||
|
|
||
|
p = path__default['default'].resolve(p);
|
||
|
|
||
|
try {
|
||
|
xfs.mkdirSync(p, mode);
|
||
|
made = made || p;
|
||
|
}
|
||
|
catch (err0) {
|
||
|
switch (err0.code) {
|
||
|
case 'ENOENT' :
|
||
|
made = sync(path__default['default'].dirname(p), opts, made);
|
||
|
sync(p, opts, made);
|
||
|
break;
|
||
|
|
||
|
// In the case of any other error, just see if there's a dir
|
||
|
// there already. If so, then hooray! If not, then something
|
||
|
// is borked.
|
||
|
default:
|
||
|
var stat;
|
||
|
try {
|
||
|
stat = xfs.statSync(p);
|
||
|
}
|
||
|
catch (err1) {
|
||
|
throw err0;
|
||
|
}
|
||
|
if (!stat.isDirectory()) throw err0;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return made;
|
||
|
};
|
||
|
|
||
|
var temp = createCommonjsModule(function (module, exports) {
|
||
|
os__default['default'].tmpdir();
|
||
|
|
||
|
const rimrafSync = rimraf_1.sync;
|
||
|
|
||
|
//== helpers
|
||
|
//
|
||
|
let dir = path__default['default'].resolve(os__default['default'].tmpdir());
|
||
|
|
||
|
let RDWR_EXCL = cnst__default['default'].O_CREAT | cnst__default['default'].O_TRUNC | cnst__default['default'].O_RDWR | cnst__default['default'].O_EXCL;
|
||
|
|
||
|
let promisify = function(callback) {
|
||
|
if (typeof callback === 'function') {
|
||
|
return [undefined, callback];
|
||
|
}
|
||
|
|
||
|
var promiseCallback;
|
||
|
var promise = new Promise(function(resolve, reject) {
|
||
|
promiseCallback = function() {
|
||
|
var args = Array.from(arguments);
|
||
|
var err = args.shift();
|
||
|
|
||
|
process.nextTick(function() {
|
||
|
if (err) {
|
||
|
reject(err);
|
||
|
} else if (args.length === 1) {
|
||
|
resolve(args[0]);
|
||
|
} else {
|
||
|
resolve(args);
|
||
|
}
|
||
|
});
|
||
|
};
|
||
|
});
|
||
|
|
||
|
return [promise, promiseCallback];
|
||
|
};
|
||
|
|
||
|
var generateName = function(rawAffixes, defaultPrefix) {
|
||
|
var affixes = parseAffixes(rawAffixes, defaultPrefix);
|
||
|
var now = new Date();
|
||
|
var name = [affixes.prefix,
|
||
|
now.getFullYear(), now.getMonth(), now.getDate(),
|
||
|
'-',
|
||
|
process.pid,
|
||
|
'-',
|
||
|
(Math.random() * 0x100000000 + 1).toString(36),
|
||
|
affixes.suffix].join('');
|
||
|
return path__default['default'].join(affixes.dir || dir, name);
|
||
|
};
|
||
|
|
||
|
var parseAffixes = function(rawAffixes, defaultPrefix) {
|
||
|
var affixes = {prefix: null, suffix: null};
|
||
|
if(rawAffixes) {
|
||
|
switch (typeof(rawAffixes)) {
|
||
|
case 'string':
|
||
|
affixes.prefix = rawAffixes;
|
||
|
break;
|
||
|
case 'object':
|
||
|
affixes = rawAffixes;
|
||
|
break;
|
||
|
default:
|
||
|
throw new Error("Unknown affix declaration: " + affixes);
|
||
|
}
|
||
|
} else {
|
||
|
affixes.prefix = defaultPrefix;
|
||
|
}
|
||
|
return affixes;
|
||
|
};
|
||
|
|
||
|
/* -------------------------------------------------------------------------
|
||
|
* Don't forget to call track() if you want file tracking and exit handlers!
|
||
|
* -------------------------------------------------------------------------
|
||
|
* When any temp file or directory is created, it is added to filesToDelete
|
||
|
* or dirsToDelete. The first time any temp file is created, a listener is
|
||
|
* added to remove all temp files and directories at exit.
|
||
|
*/
|
||
|
var tracking = false;
|
||
|
var track = function(value) {
|
||
|
tracking = (value !== false);
|
||
|
return module.exports; // chainable
|
||
|
};
|
||
|
var exitListenerAttached = false;
|
||
|
var filesToDelete = [];
|
||
|
var dirsToDelete = [];
|
||
|
|
||
|
function deleteFileOnExit(filePath) {
|
||
|
if (!tracking) return false;
|
||
|
attachExitListener();
|
||
|
filesToDelete.push(filePath);
|
||
|
}
|
||
|
|
||
|
function deleteDirOnExit(dirPath) {
|
||
|
if (!tracking) return false;
|
||
|
attachExitListener();
|
||
|
dirsToDelete.push(dirPath);
|
||
|
}
|
||
|
|
||
|
function attachExitListener() {
|
||
|
if (!tracking) return false;
|
||
|
if (!exitListenerAttached) {
|
||
|
process.addListener('exit', function() {
|
||
|
try {
|
||
|
cleanupSync();
|
||
|
} catch(err) {
|
||
|
console.warn("Fail to clean temporary files on exit : ", err);
|
||
|
throw err;
|
||
|
}
|
||
|
});
|
||
|
exitListenerAttached = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function cleanupFilesSync() {
|
||
|
if (!tracking) {
|
||
|
return false;
|
||
|
}
|
||
|
var count = 0;
|
||
|
var toDelete;
|
||
|
while ((toDelete = filesToDelete.shift()) !== undefined) {
|
||
|
rimrafSync(toDelete, { maxBusyTries: 6 });
|
||
|
count++;
|
||
|
}
|
||
|
return count;
|
||
|
}
|
||
|
|
||
|
function cleanupFiles(callback) {
|
||
|
var p = promisify(callback);
|
||
|
var promise = p[0];
|
||
|
callback = p[1];
|
||
|
|
||
|
if (!tracking) {
|
||
|
callback(new Error("not tracking"));
|
||
|
return promise;
|
||
|
}
|
||
|
var count = 0;
|
||
|
var left = filesToDelete.length;
|
||
|
if (!left) {
|
||
|
callback(null, count);
|
||
|
return promise;
|
||
|
}
|
||
|
var toDelete;
|
||
|
var rimrafCallback = function(err) {
|
||
|
if (!left) {
|
||
|
// Prevent processing if aborted
|
||
|
return;
|
||
|
}
|
||
|
if (err) {
|
||
|
// This shouldn't happen; pass error to callback and abort
|
||
|
// processing
|
||
|
callback(err);
|
||
|
left = 0;
|
||
|
return;
|
||
|
} else {
|
||
|
count++;
|
||
|
}
|
||
|
left--;
|
||
|
if (!left) {
|
||
|
callback(null, count);
|
||
|
}
|
||
|
};
|
||
|
while ((toDelete = filesToDelete.shift()) !== undefined) {
|
||
|
rimraf_1(toDelete, { maxBusyTries: 6 }, rimrafCallback);
|
||
|
}
|
||
|
return promise;
|
||
|
}
|
||
|
|
||
|
function cleanupDirsSync() {
|
||
|
if (!tracking) {
|
||
|
return false;
|
||
|
}
|
||
|
var count = 0;
|
||
|
var toDelete;
|
||
|
while ((toDelete = dirsToDelete.shift()) !== undefined) {
|
||
|
rimrafSync(toDelete, { maxBusyTries: 6 });
|
||
|
count++;
|
||
|
}
|
||
|
return count;
|
||
|
}
|
||
|
|
||
|
function cleanupDirs(callback) {
|
||
|
var p = promisify(callback);
|
||
|
var promise = p[0];
|
||
|
callback = p[1];
|
||
|
|
||
|
if (!tracking) {
|
||
|
callback(new Error("not tracking"));
|
||
|
return promise;
|
||
|
}
|
||
|
var count = 0;
|
||
|
var left = dirsToDelete.length;
|
||
|
if (!left) {
|
||
|
callback(null, count);
|
||
|
return promise;
|
||
|
}
|
||
|
var toDelete;
|
||
|
var rimrafCallback = function (err) {
|
||
|
if (!left) {
|
||
|
// Prevent processing if aborted
|
||
|
return;
|
||
|
}
|
||
|
if (err) {
|
||
|
// rimraf handles most "normal" errors; pass the error to the
|
||
|
// callback and abort processing
|
||
|
callback(err, count);
|
||
|
left = 0;
|
||
|
return;
|
||
|
} else {
|
||
|
count++;
|
||
|
}
|
||
|
left--;
|
||
|
if (!left) {
|
||
|
callback(null, count);
|
||
|
}
|
||
|
};
|
||
|
while ((toDelete = dirsToDelete.shift()) !== undefined) {
|
||
|
rimraf_1(toDelete, { maxBusyTries: 6 }, rimrafCallback);
|
||
|
}
|
||
|
return promise;
|
||
|
}
|
||
|
|
||
|
function cleanupSync() {
|
||
|
if (!tracking) {
|
||
|
return false;
|
||
|
}
|
||
|
var fileCount = cleanupFilesSync();
|
||
|
var dirCount = cleanupDirsSync();
|
||
|
return {files: fileCount, dirs: dirCount};
|
||
|
}
|
||
|
|
||
|
function cleanup(callback) {
|
||
|
var p = promisify(callback);
|
||
|
var promise = p[0];
|
||
|
callback = p[1];
|
||
|
|
||
|
if (!tracking) {
|
||
|
callback(new Error("not tracking"));
|
||
|
return promise;
|
||
|
}
|
||
|
cleanupFiles(function(fileErr, fileCount) {
|
||
|
if (fileErr) {
|
||
|
callback(fileErr, {files: fileCount});
|
||
|
} else {
|
||
|
cleanupDirs(function(dirErr, dirCount) {
|
||
|
callback(dirErr, {files: fileCount, dirs: dirCount});
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
return promise;
|
||
|
}
|
||
|
|
||
|
//== directories
|
||
|
//
|
||
|
const mkdir = (affixes, callback) => {
|
||
|
const p = promisify(callback);
|
||
|
const promise = p[0];
|
||
|
callback = p[1];
|
||
|
|
||
|
let dirPath = generateName(affixes, 'd-');
|
||
|
mkdirp(dirPath, 0o700, (err) => {
|
||
|
if (!err) {
|
||
|
deleteDirOnExit(dirPath);
|
||
|
}
|
||
|
callback(err, dirPath);
|
||
|
});
|
||
|
return promise;
|
||
|
};
|
||
|
|
||
|
const mkdirSync = (affixes) => {
|
||
|
let dirPath = generateName(affixes, 'd-');
|
||
|
mkdirp.sync(dirPath, 0o700);
|
||
|
deleteDirOnExit(dirPath);
|
||
|
return dirPath;
|
||
|
};
|
||
|
|
||
|
//== files
|
||
|
//
|
||
|
const open = (affixes, callback) => {
|
||
|
const p = promisify(callback);
|
||
|
const promise = p[0];
|
||
|
callback = p[1];
|
||
|
|
||
|
const path = generateName(affixes, 'f-');
|
||
|
fs__default['default'].open(path, RDWR_EXCL, 0o600, (err, fd) => {
|
||
|
if (!err) {
|
||
|
deleteFileOnExit(path);
|
||
|
}
|
||
|
callback(err, { path, fd });
|
||
|
});
|
||
|
return promise;
|
||
|
};
|
||
|
|
||
|
const openSync = (affixes) => {
|
||
|
const path = generateName(affixes, 'f-');
|
||
|
let fd = fs__default['default'].openSync(path, RDWR_EXCL, 0o600);
|
||
|
deleteFileOnExit(path);
|
||
|
return { path, fd };
|
||
|
};
|
||
|
|
||
|
const createWriteStream = (affixes) => {
|
||
|
const path = generateName(affixes, 's-');
|
||
|
let stream = fs__default['default'].createWriteStream(path, { flags: RDWR_EXCL, mode: 0o600 });
|
||
|
deleteFileOnExit(path);
|
||
|
return stream;
|
||
|
};
|
||
|
|
||
|
//== settings
|
||
|
//
|
||
|
exports.dir = dir;
|
||
|
exports.track = track;
|
||
|
|
||
|
//== functions
|
||
|
//
|
||
|
exports.mkdir = mkdir;
|
||
|
exports.mkdirSync = mkdirSync;
|
||
|
exports.open = open;
|
||
|
exports.openSync = openSync;
|
||
|
exports.path = generateName;
|
||
|
exports.cleanup = cleanup;
|
||
|
exports.cleanupSync = cleanupSync;
|
||
|
exports.createWriteStream = createWriteStream;
|
||
|
});
|
||
|
|
||
|
var mathJaxFontCSS = `
|
||
|
@font-face /* 0 */ {
|
||
|
font-family: MJXZERO;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Zero.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 1 */ {
|
||
|
font-family: MJXTEX;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Main-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 2 */ {
|
||
|
font-family: MJXTEX-B;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Main-Bold.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 3 */ {
|
||
|
font-family: MJXTEX-I;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Math-Italic.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 4 */ {
|
||
|
font-family: MJXTEX-MI;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Main-Italic.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 5 */ {
|
||
|
font-family: MJXTEX-BI;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Math-BoldItalic.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 6 */ {
|
||
|
font-family: MJXTEX-S1;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Size1-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 7 */ {
|
||
|
font-family: MJXTEX-S2;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Size2-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 8 */ {
|
||
|
font-family: MJXTEX-S3;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Size3-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 9 */ {
|
||
|
font-family: MJXTEX-S4;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Size4-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 10 */ {
|
||
|
font-family: MJXTEX-A;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_AMS-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 11 */ {
|
||
|
font-family: MJXTEX-C;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Calligraphic-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 12 */ {
|
||
|
font-family: MJXTEX-CB;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Calligraphic-Bold.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 13 */ {
|
||
|
font-family: MJXTEX-FR;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Fraktur-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 14 */ {
|
||
|
font-family: MJXTEX-FRB;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Fraktur-Bold.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 15 */ {
|
||
|
font-family: MJXTEX-SS;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_SansSerif-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 16 */ {
|
||
|
font-family: MJXTEX-SSB;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_SansSerif-Bold.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 17 */ {
|
||
|
font-family: MJXTEX-SSI;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_SansSerif-Italic.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 18 */ {
|
||
|
font-family: MJXTEX-SC;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Script-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 19 */ {
|
||
|
font-family: MJXTEX-T;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Typewriter-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 20 */ {
|
||
|
font-family: MJXTEX-V;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Vector-Regular.woff") format("woff");
|
||
|
}
|
||
|
|
||
|
@font-face /* 21 */ {
|
||
|
font-family: MJXTEX-VB;
|
||
|
src: url("http://cdn.mathjax.org/mathjax/latest/fonts/HTML-CSS/TeX/woff/MathJax_Vector-Bold.woff") format("woff");
|
||
|
}`;
|
||
|
|
||
|
// This CSS is composed of Prism.css and a small amount of Obsidian CSS,
|
||
|
// which is copyrighted by the Obsidian developers.
|
||
|
// I've received permission from @Licat on Discord to include this snippet in the plugin
|
||
|
// and HTML exports from it.
|
||
|
// See https://discord.com/channels/686053708261228577/707816848615407697/830630553883377690
|
||
|
function variables(light = true) {
|
||
|
if (light)
|
||
|
return `
|
||
|
:root {
|
||
|
--default-font: 'Inter', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Microsoft YaHei Light", sans-serif;
|
||
|
--font-monospace: 'Source Code Pro', monospace;
|
||
|
--background-primary: #ffffff;
|
||
|
--background-modifier-border: #ddd;
|
||
|
--text-accent: #705dcf;
|
||
|
--text-accent-hover: #7a6ae6;
|
||
|
--text-normal: #2e3338;
|
||
|
--background-secondary: #f2f3f5;
|
||
|
--background-secondary-alt: #e3e5e8;
|
||
|
--text-muted: #888888;
|
||
|
}`;
|
||
|
else
|
||
|
return `
|
||
|
:root {
|
||
|
--background-primary: #202020;
|
||
|
--background-modifier-border: #333;
|
||
|
--text-accent: #7f6df2;
|
||
|
--text-accent-hover: #8875ff;
|
||
|
--text-normal: #dcddde;
|
||
|
--background-secondary: #161616;
|
||
|
--background-secondary-alt: #000000;
|
||
|
--text-muted: #999;
|
||
|
}
|
||
|
`;
|
||
|
}
|
||
|
function appCSS (light = true) {
|
||
|
return variables(light) + body();
|
||
|
}
|
||
|
function body() {
|
||
|
return `
|
||
|
pre, code {
|
||
|
font-family: var(--font-monospace);
|
||
|
}
|
||
|
h1, h2, h3, h4, h5, h6 {
|
||
|
font-weight: 800;
|
||
|
}
|
||
|
a {
|
||
|
color: var(--text-accent);
|
||
|
outline: none;
|
||
|
}
|
||
|
a:hover {
|
||
|
color: var(--text-accent-hover);
|
||
|
}
|
||
|
audio {
|
||
|
outline: none;
|
||
|
}
|
||
|
hr {
|
||
|
border: none;
|
||
|
border-top: 1px solid;
|
||
|
border-color: var(--background-modifier-border);
|
||
|
margin: 26px 0;
|
||
|
}
|
||
|
* {
|
||
|
box-sizing: border-box;
|
||
|
}
|
||
|
body {
|
||
|
text-rendering: optimizeLegibility;
|
||
|
font-family: var(--default-font);
|
||
|
line-height: 1.5em;
|
||
|
font-size: 16px;
|
||
|
background-color: var(--background-primary);
|
||
|
color: var(--text-normal);
|
||
|
}
|
||
|
ul ul, ol ul, ol ul, ul ol {
|
||
|
list-style-type: disc;
|
||
|
}
|
||
|
|
||
|
|
||
|
|
||
|
/* PrismJS 1.20.0
|
||
|
https://prismjs.com/download.html#themes=prism&languages=markup+css+clike+javascript+abap+abnf+actionscript+ada+al+antlr4+apacheconf+apl+applescript+aql+arduino+arff+asciidoc+asm6502+aspnet+autohotkey+autoit+bash+basic+batch+bbcode+bison+bnf+brainfuck+brightscript+bro+c+concurnas+csharp+cpp+cil+coffeescript+cmake+clojure+crystal+csp+css-extras+d+dart+dax+diff+django+dns-zone-file+docker+ebnf+eiffel+ejs+elixir+elm+etlua+erb+erlang+excel-formula+fsharp+factor+firestore-security-rules+flow+fortran+ftl+gcode+gdscript+gedcom+gherkin+git+glsl+gml+go+graphql+groovy+haml+handlebars+haskell+haxe+hcl+hlsl+http+hpkp+hsts+ichigojam+icon+iecst+inform7+ini+io+j+java+javadoc+javadoclike+javastacktrace+jolie+jq+jsdoc+js-extras+js-templates+json+jsonp+json5+julia+keyman+kotlin+latex+latte+less+lilypond+liquid+lisp+livescript+llvm+lolcode+lua+makefile+markdown+markup-templating+matlab+mel+mizar+monkey+moonscript+n1ql+n4js+nand2tetris-hdl+nasm+neon+nginx+nim+nix+nsis+objectivec+ocaml+opencl+oz+parigp+parser+pascal+pascaligo+pcaxis+peoplecode+perl+php+phpdoc+php-extras+plsql+powerquery+powershell+processing+prolog+properties+protobuf+pug+puppet+pure+purebasic+python+q+qml+qore+r+racket+jsx+tsx+renpy+reason+regex+rest+rip+roboconf+robotframework+ruby+rust+sas+sass+scss+scala+scheme+shell-session+smalltalk+smarty+solidity+solution-file+soy+sparql+splunk-spl+sqf+sql+stylus+swift+tap+tcl+textile+toml+tt2+turtle+twig+typescript+t4-cs+t4-vb+t4-templating+unrealscript+vala+vbnet+velocity+verilog+vhdl+vim+visual-basic+warpscript+wasm+wiki+xeora+xml-doc+xojo+xquery+yaml+zig */
|
||
|
/**
|
||
|
* prism.js default theme for JavaScript, CSS and HTML
|
||
|
* Based on dabblet (http://dabblet.com)
|
||
|
* @author Lea Verou
|
||
|
*/
|
||
|
/* Code blocks */
|
||
|
/* Inline code */
|
||
|
code[class*="language-"],
|
||
|
pre[class*="language-"] {
|
||
|
color: black;
|
||
|
background: none;
|
||
|
text-shadow: 0 1px white;
|
||
|
font-family: var(--font-monospace);
|
||
|
text-align: left;
|
||
|
white-space: pre;
|
||
|
word-spacing: normal;
|
||
|
word-break: normal;
|
||
|
word-wrap: normal;
|
||
|
line-height: 1.5;
|
||
|
-moz-tab-size: 4;
|
||
|
-o-tab-size: 4;
|
||
|
tab-size: 4;
|
||
|
-webkit-hyphens: none;
|
||
|
-moz-hyphens: none;
|
||
|
-ms-hyphens: none;
|
||
|
hyphens: none;
|
||
|
}
|
||
|
pre[class*="language-"]::-moz-selection,
|
||
|
pre[class*="language-"] ::-moz-selection,
|
||
|
code[class*="language-"]::-moz-selection,
|
||
|
code[class*="language-"] ::-moz-selection {
|
||
|
text-shadow: none;
|
||
|
background: #b3d4fc;
|
||
|
}
|
||
|
pre[class*="language-"]::selection,
|
||
|
pre[class*="language-"] ::selection,
|
||
|
code[class*="language-"]::selection,
|
||
|
code[class*="language-"] ::selection {
|
||
|
text-shadow: none;
|
||
|
background: #b3d4fc;
|
||
|
}
|
||
|
@media print {
|
||
|
code[class*="language-"],
|
||
|
pre[class*="language-"] {
|
||
|
text-shadow: none;
|
||
|
}
|
||
|
}
|
||
|
pre[class*="language-"] {
|
||
|
padding: 1em;
|
||
|
margin: 0.5em 0;
|
||
|
overflow: auto;
|
||
|
}
|
||
|
:not(pre) > code[class*="language-"],
|
||
|
pre[class*="language-"] {
|
||
|
background: #f5f2f0;
|
||
|
}
|
||
|
:not(pre) > code[class*="language-"] {
|
||
|
padding: 0.1em;
|
||
|
border-radius: 0.3em;
|
||
|
white-space: normal;
|
||
|
}
|
||
|
.token.comment,
|
||
|
.token.prolog,
|
||
|
.token.doctype,
|
||
|
.token.cdata {
|
||
|
color: slategray;
|
||
|
}
|
||
|
.token.punctuation {
|
||
|
color: #999;
|
||
|
}
|
||
|
.token.namespace {
|
||
|
opacity: 0.7;
|
||
|
}
|
||
|
.token.property,
|
||
|
.token.tag,
|
||
|
.token.boolean,
|
||
|
.token.number,
|
||
|
.token.constant,
|
||
|
.token.symbol,
|
||
|
.token.deleted {
|
||
|
color: #905;
|
||
|
}
|
||
|
.token.selector,
|
||
|
.token.attr-name,
|
||
|
.token.string,
|
||
|
.token.char,
|
||
|
.token.builtin,
|
||
|
.token.inserted {
|
||
|
color: #690;
|
||
|
}
|
||
|
.token.operator,
|
||
|
.token.entity,
|
||
|
.token.url,
|
||
|
.language-css .token.string,
|
||
|
.style .token.string {
|
||
|
color: #9a6e3a;
|
||
|
background: hsla(0, 0%, 100%, 0.5);
|
||
|
}
|
||
|
.token.atrule,
|
||
|
.token.attr-value,
|
||
|
.token.keyword {
|
||
|
color: #07a;
|
||
|
}
|
||
|
.token.function,
|
||
|
.token.class-name {
|
||
|
color: #DD4A68;
|
||
|
}
|
||
|
.token.regex,
|
||
|
.token.important,
|
||
|
.token.variable {
|
||
|
color: #e90;
|
||
|
}
|
||
|
.token.important,
|
||
|
.token.bold {
|
||
|
font-weight: bold;
|
||
|
}
|
||
|
.token.italic {
|
||
|
font-style: italic;
|
||
|
}
|
||
|
.token.entity {
|
||
|
cursor: help;
|
||
|
}
|
||
|
|
||
|
`;
|
||
|
}
|
||
|
|
||
|
/*
|
||
|
* renderer.ts
|
||
|
*
|
||
|
* This module exposes a function that turns an Obsidian markdown string into
|
||
|
* an HTML string with as many inconsistencies ironed out as possible
|
||
|
*
|
||
|
*/
|
||
|
// Note: parentFiles is for internal use (to prevent recursively embedded notes)
|
||
|
// inputFile must be an absolute file path
|
||
|
function render(plugin, view, inputFile, outputFormat, parentFiles = []) {
|
||
|
var _a;
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
// Use Obsidian's markdown renderer to render to a hidden <div>
|
||
|
const markdown = view.data;
|
||
|
const wrapper = document.createElement('div');
|
||
|
wrapper.style.display = 'hidden';
|
||
|
document.body.appendChild(wrapper);
|
||
|
yield obsidian.MarkdownRenderer.renderMarkdown(markdown, wrapper, path__namespace.dirname(inputFile), view);
|
||
|
// Post-process the HTML in-place
|
||
|
yield postProcessRenderedHTML(plugin, inputFile, wrapper, outputFormat, parentFiles, yield mermaidCSS(plugin.settings, plugin.vaultBasePath()));
|
||
|
let html = wrapper.innerHTML;
|
||
|
document.body.removeChild(wrapper);
|
||
|
// If it's a top level note, make the HTML a standalone document - inject CSS, a <title>, etc.
|
||
|
const metadata = getYAMLMetadata(markdown);
|
||
|
(_a = metadata.title) !== null && _a !== void 0 ? _a : (metadata.title = fileBaseName(inputFile));
|
||
|
if (parentFiles.length === 0) {
|
||
|
html = yield standaloneHTML(plugin.settings, html, metadata.title, plugin.vaultBasePath());
|
||
|
}
|
||
|
return { html, metadata };
|
||
|
});
|
||
|
}
|
||
|
// Takes any file path like '/home/oliver/zettelkasten/Obsidian.md' and
|
||
|
// takes the base name, in this case 'Obsidian'
|
||
|
function fileBaseName(file) {
|
||
|
return path__namespace.basename(file, path__namespace.extname(file));
|
||
|
}
|
||
|
function getYAMLMetadata(markdown) {
|
||
|
markdown = markdown.trim();
|
||
|
if (markdown.startsWith('---')) {
|
||
|
const trailing = markdown.substring(3);
|
||
|
const frontmatter = trailing.substring(0, trailing.indexOf('---')).trim();
|
||
|
return parse$1(frontmatter);
|
||
|
}
|
||
|
return {};
|
||
|
}
|
||
|
function getCustomCSS(settings, vaultBasePath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
if (!settings.customCSSFile)
|
||
|
return;
|
||
|
let file = settings.customCSSFile;
|
||
|
let buffer = null;
|
||
|
// Try absolute path
|
||
|
try {
|
||
|
let test = yield fs__namespace.promises.readFile(file);
|
||
|
buffer = test;
|
||
|
}
|
||
|
catch (e) { }
|
||
|
// Try relative path
|
||
|
try {
|
||
|
let test = yield fs__namespace.promises.readFile(path__namespace.join(vaultBasePath, file));
|
||
|
buffer = test;
|
||
|
}
|
||
|
catch (e) { }
|
||
|
if (!buffer) {
|
||
|
new obsidian.Notice('Failed to load custom Pandoc CSS file: ' + settings.customCSSFile);
|
||
|
return '';
|
||
|
}
|
||
|
else {
|
||
|
return buffer.toString();
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
function getAppConfig(vaultBasePath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
return JSON.parse((yield fs__namespace.promises.readFile(path__namespace.join(vaultBasePath, '.obsidian', 'config'))).toString());
|
||
|
});
|
||
|
}
|
||
|
function currentThemeIsLight(vaultBasePath, config = null) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
try {
|
||
|
if (!config)
|
||
|
config = yield getAppConfig(vaultBasePath);
|
||
|
return config.theme !== 'obsidian';
|
||
|
}
|
||
|
catch (e) {
|
||
|
return true;
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
function mermaidCSS(settings, vaultBasePath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
// We always inject CSS into Mermaid diagrams, using light theme if the user has requested no CSS
|
||
|
// otherwise the diagrams look terrible. The output is a PNG either way
|
||
|
let light = true;
|
||
|
if (settings.injectAppCSS === 'dark')
|
||
|
light = false;
|
||
|
if (settings.injectAppCSS === 'current') {
|
||
|
light = yield currentThemeIsLight(vaultBasePath);
|
||
|
}
|
||
|
return variables(light);
|
||
|
});
|
||
|
}
|
||
|
// Gets a small subset of app CSS and 3rd party theme CSS if desired
|
||
|
function getThemeCSS(settings, vaultBasePath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
if (settings.injectAppCSS === 'none')
|
||
|
return '';
|
||
|
try {
|
||
|
const config = yield getAppConfig(vaultBasePath);
|
||
|
let light = yield currentThemeIsLight(vaultBasePath, config);
|
||
|
if (settings.injectAppCSS === 'light')
|
||
|
light = true;
|
||
|
if (settings.injectAppCSS === 'dark')
|
||
|
light = false;
|
||
|
return appCSS(light);
|
||
|
}
|
||
|
catch (e) {
|
||
|
return '';
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
function getDesiredCSS(settings, html, vaultBasePath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
let css = yield getThemeCSS(settings, vaultBasePath);
|
||
|
if (settings.injectAppCSS !== 'none') {
|
||
|
css += ' ' + Array.from(document.querySelectorAll('style'))
|
||
|
.map(s => s.innerHTML).join(' ');
|
||
|
}
|
||
|
// Inject MathJax font CSS if needed (at this stage embedded notes are
|
||
|
// already embedded so doesn't duplicate CSS)
|
||
|
if (html.indexOf('jax="CHTML"') !== -1)
|
||
|
css += ' ' + mathJaxFontCSS;
|
||
|
// Inject custom local CSS file if it exists
|
||
|
css += yield getCustomCSS(settings, vaultBasePath);
|
||
|
return css;
|
||
|
});
|
||
|
}
|
||
|
function standaloneHTML(settings, html, title, vaultBasePath) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
// Wraps an HTML fragment in a proper document structure
|
||
|
// and injects the page's CSS
|
||
|
const css = yield getDesiredCSS(settings, html, vaultBasePath);
|
||
|
return `<!doctype html>\n` +
|
||
|
`<html>\n` +
|
||
|
` <head>\n` +
|
||
|
` <title>${title}</title>\n` +
|
||
|
` <meta charset='utf-8'/>\n` +
|
||
|
` <style>\n${css}\n</style>\n` +
|
||
|
` </head>\n` +
|
||
|
` <body>\n` +
|
||
|
`${html}\n` +
|
||
|
` </body>\n` +
|
||
|
`</html>`;
|
||
|
});
|
||
|
}
|
||
|
function postProcessRenderedHTML(plugin, inputFile, wrapper, outputFormat, parentFiles = [], css = '') {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
const dirname = path__namespace.dirname(inputFile);
|
||
|
const adapter = plugin.app.vault.adapter;
|
||
|
const settings = plugin.settings;
|
||
|
// Fix <span src="image.png">
|
||
|
for (let span of Array.from(wrapper.querySelectorAll('span[src$=".png"], span[src$=".jpg"], span[src$=".gif"], span[src$=".jpeg"]'))) {
|
||
|
span.innerHTML = '';
|
||
|
span.outerHTML = span.outerHTML.replace(/span/g, 'img');
|
||
|
}
|
||
|
// Fix <span class='internal-embed' src='another_note_without_extension'>
|
||
|
for (let span of Array.from(wrapper.querySelectorAll('span.internal-embed'))) {
|
||
|
let src = span.getAttribute('src');
|
||
|
if (src) {
|
||
|
const subfolder = inputFile.substring(adapter.getBasePath().length); // TODO: this is messy
|
||
|
const file = plugin.app.metadataCache.getFirstLinkpathDest(src, subfolder);
|
||
|
try {
|
||
|
if (parentFiles.indexOf(file.path) !== -1) {
|
||
|
// We've got an infinite recursion on our hands
|
||
|
// We should replace the embed with a wikilink
|
||
|
// Then our link processing happens afterwards
|
||
|
span.outerHTML = `<a href="${file}">${span.innerHTML}</a>`;
|
||
|
}
|
||
|
else {
|
||
|
const markdown = yield adapter.read(file.path);
|
||
|
const newParentFiles = [...parentFiles];
|
||
|
newParentFiles.push(inputFile);
|
||
|
// TODO: because of this cast, embedded notes won't be able to handle complex plugins (eg DataView)
|
||
|
const html = yield render(plugin, { data: markdown }, file.path, outputFormat, newParentFiles);
|
||
|
span.outerHTML = html.html;
|
||
|
}
|
||
|
}
|
||
|
catch (e) {
|
||
|
// Continue if it can't be loaded
|
||
|
console.error("Pandoc plugin encountered an error trying to load an embedded note: " + e.toString());
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
// Fix <a href="app://obsidian.md/markdown_file_without_extension">
|
||
|
const prefix = 'app://obsidian.md/';
|
||
|
for (let a of Array.from(wrapper.querySelectorAll('a'))) {
|
||
|
if (!a.href.startsWith(prefix))
|
||
|
continue;
|
||
|
// This is now an internal link (wikilink)
|
||
|
if (settings.linkStrippingBehaviour === 'link' || outputFormat === 'html') {
|
||
|
let href = path__namespace.join(dirname, a.href.substring(prefix.length));
|
||
|
if (settings.addExtensionsToInternalLinks.length && a.href.startsWith(prefix)) {
|
||
|
if (path__namespace.extname(href) === '') {
|
||
|
const dir = path__namespace.dirname(href);
|
||
|
const base = path__namespace.basename(href);
|
||
|
// Be careful to turn [[note#heading]] into note.extension#heading not note#heading.extension
|
||
|
const hashIndex = base.indexOf('#');
|
||
|
if (hashIndex !== -1) {
|
||
|
href = path__namespace.join(dir, base.substring(0, hashIndex) + '.' + settings.addExtensionsToInternalLinks + base.substring(hashIndex));
|
||
|
}
|
||
|
else {
|
||
|
href = path__namespace.join(dir, base + '.' + settings.addExtensionsToInternalLinks);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
a.href = href;
|
||
|
}
|
||
|
else if (settings.linkStrippingBehaviour === 'strip') {
|
||
|
a.outerHTML = '';
|
||
|
}
|
||
|
else if (settings.linkStrippingBehaviour === 'text') {
|
||
|
a.outerHTML = a.innerText;
|
||
|
}
|
||
|
else if (settings.linkStrippingBehaviour === 'unchanged') {
|
||
|
a.outerHTML = '[[' + a.outerHTML + ']]';
|
||
|
}
|
||
|
}
|
||
|
// Fix <img src="app://obsidian.md/image.png">
|
||
|
// Note: this will throw errors when Obsidian tries to load images with a (now invalid) src
|
||
|
// These errors can be safely ignored
|
||
|
if (outputFormat !== 'html') {
|
||
|
for (let img of Array.from(wrapper.querySelectorAll('img'))) {
|
||
|
if (img.src.startsWith(prefix) && img.getAttribute('data-touched') !== 'true') {
|
||
|
img.src = adapter.getFullPath(img.src.substring(prefix.length));
|
||
|
img.setAttribute('data-touched', 'true');
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
// Remove YAML frontmatter from the output if desired
|
||
|
if (!settings.displayYAMLFrontmatter) {
|
||
|
Array.from(wrapper.querySelectorAll('.frontmatter, .frontmatter-container'))
|
||
|
.forEach(el => wrapper.removeChild(el));
|
||
|
}
|
||
|
// Fix Mermaid.js diagrams
|
||
|
for (let svg of Array.from(wrapper.querySelectorAll('svg'))) {
|
||
|
// Insert the CSS variables as a CSS string (even if the user doesn't want CSS injected; Mermaid diagrams look terrible otherwise)
|
||
|
// TODO: it injects light theme CSS, do we want this?
|
||
|
let style = svg.querySelector('style') || svg.appendChild(document.createElement('style'));
|
||
|
style.innerHTML += css;
|
||
|
// Inject a marker (arrowhead) for Mermaid.js diagrams and use it at the end of paths
|
||
|
svg.innerHTML += `"<marker id="mermaid_arrowhead" viewBox="0 0 10 10" refX="9" refY="5" markerUnits="strokeWidth" markerWidth="8" markerHeight="6" orient="auto"><path d="M 0 0 L 10 5 L 0 10 z" class="arrowheadPath" style="stroke-width: 1; stroke-dasharray: 1, 0;"></path></marker>"`;
|
||
|
svg.innerHTML = svg.innerHTML.replace(/app:\/\/obsidian\.md\/index\.html#arrowhead\d*/g, "#mermaid_arrowhead");
|
||
|
// If the output isn't HTML, replace the SVG with a PNG for compatibility
|
||
|
if (outputFormat !== 'html') {
|
||
|
const scale = settings.highDPIDiagrams ? 2 : 1;
|
||
|
const png = yield convertSVGToPNG(svg, scale);
|
||
|
svg.parentNode.replaceChild(png, svg);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
// This creates an unmounted <img> element with a transparent background PNG data URL as the src
|
||
|
// The scale parameter is used for high DPI renders (the <img> element size is the same,
|
||
|
// but the underlying PNG is higher resolution)
|
||
|
function convertSVGToPNG(svg, scale = 1) {
|
||
|
const canvas = document.createElement('canvas');
|
||
|
canvas.width = Math.ceil(svg.width.baseVal.value * scale);
|
||
|
canvas.height = Math.ceil(svg.height.baseVal.value * scale);
|
||
|
const ctx = canvas.getContext('2d');
|
||
|
var svgImg = new Image;
|
||
|
svgImg.src = "data:image/svg+xml;base64," + btoa(svg.outerHTML);
|
||
|
return new Promise((resolve, reject) => {
|
||
|
svgImg.onload = () => {
|
||
|
ctx.drawImage(svgImg, 0, 0, canvas.width, canvas.height);
|
||
|
const pngData = canvas.toDataURL('png');
|
||
|
const img = document.createElement('img');
|
||
|
img.src = pngData;
|
||
|
img.width = Math.ceil(svg.width.baseVal.value);
|
||
|
img.height = Math.ceil(svg.height.baseVal.value);
|
||
|
resolve(img);
|
||
|
};
|
||
|
});
|
||
|
}
|
||
|
|
||
|
/*
|
||
|
* settings.ts
|
||
|
*
|
||
|
* Creates the settings UI
|
||
|
*
|
||
|
*/
|
||
|
class PandocPluginSettingTab extends obsidian.PluginSettingTab {
|
||
|
constructor(app, plugin) {
|
||
|
super(app, plugin);
|
||
|
this.errorMessages = {
|
||
|
pandoc: "Pandoc is not installed or accessible on your PATH. This plugin's functionality will be limited.",
|
||
|
latex: "LaTeX is not installed or accessible on your PATH. Please install it if you want PDF exports via LaTeX.",
|
||
|
};
|
||
|
this.plugin = plugin;
|
||
|
}
|
||
|
display() {
|
||
|
let { containerEl } = this;
|
||
|
containerEl.empty();
|
||
|
containerEl.createEl('h3', { text: 'Pandoc Plugin' });
|
||
|
const createError = (text) => containerEl.createEl('p', { cls: 'pandoc-plugin-error', text });
|
||
|
for (const binary in this.plugin.features) {
|
||
|
const path = this.plugin.features[binary];
|
||
|
if (path === undefined) {
|
||
|
createError(this.errorMessages[binary]);
|
||
|
}
|
||
|
}
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("Custom CSS file for HTML output")
|
||
|
.setDesc("This local CSS file will be read and injected into HTML exports. Use an absolute path or a path relative to the vault.")
|
||
|
.addText(text => text
|
||
|
.setPlaceholder('File name')
|
||
|
.setValue(this.plugin.settings.customCSSFile)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
if (!value.length)
|
||
|
this.plugin.settings.customCSSFile = null;
|
||
|
else
|
||
|
this.plugin.settings.customCSSFile = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
})));
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("Inject app CSS (HTML output only)")
|
||
|
.setDesc("This applies app & plugin CSS to HTML exports, but the files become a little bigger.")
|
||
|
.addDropdown(dropdown => dropdown
|
||
|
.addOptions({
|
||
|
"current": "Current theme",
|
||
|
"none": "Neither theme",
|
||
|
"light": "Light theme",
|
||
|
"dark": "Dark theme",
|
||
|
})
|
||
|
.setValue(this.plugin.settings.injectAppCSS)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
this.plugin.settings.injectAppCSS = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
})));
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("Internal link processing")
|
||
|
.setDesc("This controls how [[wiki-links]] are formatted. Doesn't affect HTML output.")
|
||
|
.addDropdown(dropdown => dropdown
|
||
|
.addOptions({
|
||
|
"text": "Turn into text",
|
||
|
"link": "Leave as links",
|
||
|
"strip": "Remove links",
|
||
|
"unchanged": "Leave unchanged",
|
||
|
})
|
||
|
.setValue(this.plugin.settings.linkStrippingBehaviour)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
this.plugin.settings.linkStrippingBehaviour = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
})));
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("Export files from HTML or markdown?")
|
||
|
.setDesc("Export from markdown, or from the HTML visible in Obsidian? HTML supports fancy plugin features, markdown supports Pandoc features like citations.")
|
||
|
.addDropdown(dropdown => dropdown
|
||
|
.addOptions({
|
||
|
"html": "HTML",
|
||
|
"md": "Markdown",
|
||
|
})
|
||
|
.setValue(this.plugin.settings.exportFrom)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
this.plugin.settings.exportFrom = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
})));
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("Export folder")
|
||
|
.setDesc("Absolute path to an export folder, like 'C:\Users\Example\Documents' or '/home/user/zettelkasten'. If left blank, files are saved next to where they were exported from.")
|
||
|
.addText(text => text
|
||
|
.setPlaceholder('same as target')
|
||
|
.setValue(this.plugin.settings.outputFolder)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
this.plugin.settings.outputFolder = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
})));
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("Show Pandoc command line interface commands")
|
||
|
.setDesc("Doesn't apply to HTML exports. Using the CLI will have slightly different results due to how this plugin works.")
|
||
|
.addToggle(toggle => toggle
|
||
|
.setValue(this.plugin.settings.showCLICommands)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
this.plugin.settings.showCLICommands = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
})));
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("Pandoc path")
|
||
|
.setDesc("Optional override for Pandoc's path if you have command not found issues. On Mac/Linux use the output of 'which pandoc' in a terminal; on Windows use the output of 'Get-Command pandoc' in powershell.")
|
||
|
.addText(text => text
|
||
|
.setPlaceholder('pandoc')
|
||
|
.setValue(this.plugin.settings.pandoc)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
this.plugin.settings.pandoc = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
})));
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("PDFLaTeX path")
|
||
|
.setDesc("Optional override for pdflatex's path. Same as above but with 'which pdflatex'")
|
||
|
.addText(text => text
|
||
|
.setPlaceholder('pdflatex')
|
||
|
.setValue(this.plugin.settings.pdflatex)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
this.plugin.settings.pdflatex = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
})));
|
||
|
new obsidian.Setting(containerEl)
|
||
|
.setName("Extra Pandoc arguments")
|
||
|
.setDesc("Add extra command line arguments so you can use templates or bibliographies. Newlines are turned into spaces")
|
||
|
.addTextArea(text => text
|
||
|
.setPlaceholder('Example: --bibliography "Zotero Exports\My Library.json" or --template letter')
|
||
|
.setValue(this.plugin.settings.extraArguments)
|
||
|
.onChange((value) => __awaiter(this, void 0, void 0, function* () {
|
||
|
this.plugin.settings.extraArguments = value;
|
||
|
yield this.plugin.saveSettings();
|
||
|
}))
|
||
|
.inputEl.style.minHeight = '150px');
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const DEFAULT_SETTINGS = {
|
||
|
showCLICommands: false,
|
||
|
addExtensionsToInternalLinks: 'html',
|
||
|
injectAppCSS: 'light',
|
||
|
injectThemeCSS: false,
|
||
|
customCSSFile: null,
|
||
|
displayYAMLFrontmatter: false,
|
||
|
linkStrippingBehaviour: 'text',
|
||
|
highDPIDiagrams: true,
|
||
|
pandoc: null,
|
||
|
pdflatex: null,
|
||
|
outputFolder: null,
|
||
|
extraArguments: '',
|
||
|
exportFrom: 'html',
|
||
|
};
|
||
|
function replaceFileExtension(file, ext) {
|
||
|
// Source: https://stackoverflow.com/a/5953384/4642943
|
||
|
let pos = file.lastIndexOf('.');
|
||
|
return file.substr(0, pos < 0 ? file.length : pos) + '.' + ext;
|
||
|
}
|
||
|
|
||
|
/*
|
||
|
* main.ts
|
||
|
*
|
||
|
* Initialises the plugin, adds command palette options, adds the settings UI
|
||
|
* Markdown processing is done in renderer.ts and Pandoc invocation in pandoc.ts
|
||
|
*
|
||
|
*/
|
||
|
class PandocPlugin extends obsidian.Plugin {
|
||
|
constructor() {
|
||
|
super(...arguments);
|
||
|
this.features = {};
|
||
|
}
|
||
|
onload() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
console.log('Loading Pandoc plugin');
|
||
|
yield this.loadSettings();
|
||
|
// Check if Pandoc, LaTeX, etc. are installed and in the PATH
|
||
|
this.createBinaryMap();
|
||
|
// Register all of the command palette entries
|
||
|
this.registerCommands();
|
||
|
this.addSettingTab(new PandocPluginSettingTab(this.app, this));
|
||
|
});
|
||
|
}
|
||
|
registerCommands() {
|
||
|
for (let [prettyName, pandocFormat, extension, shortName] of outputFormats) {
|
||
|
const name = 'Export as ' + prettyName;
|
||
|
this.addCommand({
|
||
|
id: 'pandoc-export-' + pandocFormat, name,
|
||
|
checkCallback: (checking) => {
|
||
|
if (!this.app.workspace.activeLeaf)
|
||
|
return false;
|
||
|
if (!this.currentFileCanBeExported(pandocFormat))
|
||
|
return false;
|
||
|
if (!checking) {
|
||
|
this.startPandocExport(this.getCurrentFile(), pandocFormat, extension, shortName);
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
vaultBasePath() {
|
||
|
return this.app.vault.adapter.getBasePath();
|
||
|
}
|
||
|
getCurrentFile() {
|
||
|
const fileData = this.app.workspace.getActiveFile();
|
||
|
if (!fileData)
|
||
|
return null;
|
||
|
const adapter = this.app.vault.adapter;
|
||
|
if (adapter instanceof obsidian.FileSystemAdapter)
|
||
|
return adapter.getFullPath(fileData.path);
|
||
|
return null;
|
||
|
}
|
||
|
currentFileCanBeExported(format) {
|
||
|
// Is it an available output type?
|
||
|
if (needsPandoc(format) && !this.features['pandoc'])
|
||
|
return false;
|
||
|
if (needsLaTeX(format) && !this.features['pdflatex'])
|
||
|
return false;
|
||
|
// Is it a supported input type?
|
||
|
const file = this.getCurrentFile();
|
||
|
if (!file)
|
||
|
return false;
|
||
|
for (const ext of inputExtensions) {
|
||
|
if (file.endsWith(ext))
|
||
|
return true;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
createBinaryMap() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
this.features['pandoc'] = this.settings.pandoc || (yield lib.lookpath('pandoc'));
|
||
|
this.features['pdflatex'] = this.settings.pdflatex || (yield lib.lookpath('pdflatex'));
|
||
|
});
|
||
|
}
|
||
|
startPandocExport(inputFile, format, extension, shortName) {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
new obsidian.Notice(`Exporting ${inputFile} to ${shortName}`);
|
||
|
// Instead of using Pandoc to process the raw Markdown, we use Obsidian's
|
||
|
// internal markdown renderer, and process the HTML it generates instead.
|
||
|
// This allows us to more easily deal with Obsidian specific Markdown syntax.
|
||
|
// However, we provide an option to use MD instead to use citations
|
||
|
let outputFile = replaceFileExtension(inputFile, extension);
|
||
|
if (this.settings.outputFolder) {
|
||
|
outputFile = path__namespace.join(this.settings.outputFolder, path__namespace.basename(outputFile));
|
||
|
}
|
||
|
const view = this.app.workspace.getActiveViewOfType(obsidian.MarkdownView);
|
||
|
try {
|
||
|
let error, command;
|
||
|
switch (this.settings.exportFrom) {
|
||
|
case 'html': {
|
||
|
const { html, metadata } = yield render(this, view, inputFile, format);
|
||
|
if (format === 'html') {
|
||
|
// Write to HTML file
|
||
|
yield fs__namespace.promises.writeFile(outputFile, html);
|
||
|
new obsidian.Notice('Successfully exported via Pandoc to ' + outputFile);
|
||
|
return;
|
||
|
}
|
||
|
else {
|
||
|
// Spawn Pandoc
|
||
|
const metadataFile = temp.path();
|
||
|
const metadataString = stringify(metadata);
|
||
|
yield fs__namespace.promises.writeFile(metadataFile, metadataString);
|
||
|
const result = yield pandoc({
|
||
|
file: 'STDIN', contents: html, format: 'html', metadataFile,
|
||
|
pandoc: this.settings.pandoc, pdflatex: this.settings.pdflatex
|
||
|
}, { file: outputFile, format }, this.settings.extraArguments.split('\n'));
|
||
|
error = result.error;
|
||
|
command = result.command;
|
||
|
}
|
||
|
break;
|
||
|
}
|
||
|
case 'md': {
|
||
|
const result = yield pandoc({
|
||
|
file: inputFile, format: 'markdown',
|
||
|
pandoc: this.settings.pandoc, pdflatex: this.settings.pdflatex
|
||
|
}, { file: outputFile, format }, this.settings.extraArguments.split('\n'));
|
||
|
error = result.error;
|
||
|
command = result.command;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (error.length) {
|
||
|
new obsidian.Notice('Exported via Pandoc to ' + outputFile + ' with warnings');
|
||
|
new obsidian.Notice('Pandoc warnings:' + error, 10000);
|
||
|
}
|
||
|
else {
|
||
|
new obsidian.Notice('Successfully exported via Pandoc to ' + outputFile);
|
||
|
}
|
||
|
if (this.settings.showCLICommands) {
|
||
|
new obsidian.Notice('Pandoc command: ' + command, 10000);
|
||
|
console.log(command);
|
||
|
}
|
||
|
}
|
||
|
catch (e) {
|
||
|
new obsidian.Notice('Pandoc export failed: ' + e.toString(), 15000);
|
||
|
console.error(e);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
onunload() {
|
||
|
console.log('Unloading Pandoc plugin');
|
||
|
}
|
||
|
loadSettings() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
this.settings = Object.assign({}, DEFAULT_SETTINGS, yield this.loadData());
|
||
|
});
|
||
|
}
|
||
|
saveSettings() {
|
||
|
return __awaiter(this, void 0, void 0, function* () {
|
||
|
yield this.saveData(this.settings);
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
|
||
|
module.exports = PandocPlugin;
|
||
|
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcyIsInNvdXJjZXMiOlsibm9kZV9tb2R1bGVzL3RzbGliL3RzbGliLmVzNi5qcyIsIm5vZGVfbW9kdWxlcy9sb29rcGF0aC9saWIvaW5kZXguanMiLCJwYW5kb2MudHMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvbm9kZXMvTm9kZS5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC92aXNpdC5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9kb2MvZGlyZWN0aXZlcy5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9ub2Rlcy90b0pTLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L25vZGVzL0FsaWFzLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L25vZGVzL1NjYWxhci5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9kb2MvY3JlYXRlTm9kZS5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9zdHJpbmdpZnkvYWRkQ29tbWVudC5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9zdHJpbmdpZnkvZm9sZEZsb3dMaW5lcy5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9zdHJpbmdpZnkvc3RyaW5naWZ5U3RyaW5nLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3N0cmluZ2lmeS9zdHJpbmdpZnkuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvbm9kZXMvQ29sbGVjdGlvbi5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9sb2cuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvbm9kZXMvUGFpci5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9vcHRpb25zLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L25vZGVzL1lBTUxTZXEuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvZG9jL0FuY2hvcnMuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvc3RyaW5naWZ5L3N0cmluZ2lmeU51bWJlci5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9ub2Rlcy9ZQU1MTWFwLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3RhZ3MvZmFpbHNhZmUvbWFwLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3RhZ3MvZmFpbHNhZmUvc2VxLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3RhZ3MvZmFpbHNhZmUvc3RyaW5nLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3RhZ3MvZmFpbHNhZmUvaW5kZXguanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvdGFncy9jb3JlLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3RhZ3MvanNvbi5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC90YWdzL3lhbWwtMS4xL2JpbmFyeS5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC90YWdzL3lhbWwtMS4xL3BhaXJzLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3RhZ3MveWFtbC0xLjEvb21hcC5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC90YWdzL3lhbWwtMS4xL3NldC5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC90YWdzL3lhbWwtMS4xL3RpbWVzdGFtcC5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC90YWdzL3lhbWwtMS4xL2luZGV4LmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3RhZ3MvaW5kZXguanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvZG9jL2dldFNjaGVtYVRhZ3MuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvZG9jL1NjaGVtYS5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9kb2MvYXBwbHlSZXZpdmVyLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2RvYy9Eb2N1bWVudC5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9lcnJvcnMuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvY29tcG9zZS9yZXNvbHZlLXByb3BzLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2NvbXBvc2UvdXRpbC1jb250YWlucy1uZXdsaW5lLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2NvbXBvc2UvcmVzb2x2ZS1ibG9jay1tYXAuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvY29tcG9zZS9yZXNvbHZlLWJsb2NrLXNlcS5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9jb21wb3NlL3Jlc29sdmUtZW5kLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2NvbXBvc2UvcmVzb2x2ZS1mbG93LWNvbGxlY3Rpb24uanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvY29tcG9zZS9jb21wb3NlLWNvbGxlY3Rpb24uanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvY29tcG9zZS9yZXNvbHZlLWJsb2NrLXNjYWxhci5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9jb21wb3NlL3Jlc29sdmUtZmxvdy1zY2FsYXIuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvY29tcG9zZS9jb21wb3NlLXNjYWxhci5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9jb21wb3NlL3V0aWwtZW1wdHktc2NhbGFyLXBvc2l0aW9uLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2NvbXBvc2UvY29tcG9zZS1ub2RlLmpzIiwibm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2NvbXBvc2UvY29tcG9zZS1kb2MuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvY29tcG9zZS9jb21wb3Nlci5qcyIsIm5vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9wYXJzZS90b2tlbnMuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvcGFyc2UvbGV4ZXIuanMiLCJub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3QvcGF
|